Skip to content

Commit 2f22759

Browse files
committed
msk-go-iam
1 parent f8e6bc5 commit 2f22759

9 files changed

Lines changed: 567 additions & 0 deletions

File tree

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
require github.com/aws/aws-lambda-go v1.36.1
2+
3+
replace gopkg.in/yaml.v2 => gopkg.in/yaml.v2 v2.2.8
4+
5+
module HandlerKafka
6+
7+
go 1.16
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
github.com/aws/aws-lambda-go v1.36.1 h1:CJxGkL9uKszIASRDxzcOcLX6juzTLoTKtCIgUGcTjTU=
2+
github.com/aws/aws-lambda-go v1.36.1/go.mod h1:jwFe2KmMsHmffA1X2R09hH6lFzJQxzI8qK17ewzbQMM=
3+
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
4+
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
5+
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
6+
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
7+
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
8+
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
9+
github.com/stretchr/testify v1.7.2 h1:4jaiDzPyXQvSd7D0EjG45355tLlV3VOECpq10pLC+8s=
10+
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
11+
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
12+
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
13+
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
package main
2+
3+
import (
4+
"context"
5+
b64 "encoding/base64"
6+
"fmt"
7+
"github.com/aws/aws-lambda-go/events"
8+
"github.com/aws/aws-lambda-go/lambda"
9+
)
10+
11+
//Core lambda Kafka event handling logic
12+
13+
func handler(ctx context.Context, kafkaEvent events.KafkaEvent) error {
14+
15+
//Lambda Runtime delivers a batch of messages to the lambda function
16+
//Each batch of messages has two fields EventSource and EventSourceARN
17+
//Each batch of messages also has a field called Records
18+
//The Records is a map with multiple keys and values
19+
//Each key is a combination of the Topic Name and the Partition Number
20+
//One batch of messages can contain messages from multiple partitions
21+
eventSource := kafkaEvent.EventSource
22+
eventSourceARN := kafkaEvent.EventSourceARN
23+
records := kafkaEvent.Records
24+
fmt.Println("EventSource = ", eventSource)
25+
fmt.Println("EventSourceARN = ", eventSourceARN)
26+
//Defining a variable to keep track of the message number in the batch
27+
var i = 1
28+
//Now looping through the keys in the map
29+
for key, arrayOfKafkaMessage := range records {
30+
fmt.Println("This Key = ", key)
31+
//Each key (topic-partition) can in turn have a number of messages
32+
//Now looping through the messages in a particular key and getting fields in the message
33+
for _, thisKafkaMessage := range arrayOfKafkaMessage {
34+
fmt.Println("**********")
35+
fmt.Println("Start of message ", i)
36+
fmt.Println("Topic = ", thisKafkaMessage.Topic)
37+
fmt.Println("Partition = ", thisKafkaMessage.Partition)
38+
fmt.Println("Offset = ", thisKafkaMessage.Offset)
39+
fmt.Println("Timestamp = ", thisKafkaMessage.Timestamp)
40+
fmt.Println("TimestampType = ", thisKafkaMessage.TimestampType)
41+
//Each message in turn has a key and a value which are base64 encoded and need to be decoded
42+
var thisKafkaMessageKey = "null"
43+
if thisKafkaMessage.Key != "" {
44+
var thisKafkaMessageKeyBase64 = thisKafkaMessage.Key
45+
var thisKafkaMessageKeyDecodedInByteArray, _ = b64.StdEncoding.DecodeString(thisKafkaMessageKeyBase64)
46+
thisKafkaMessageKey = string(thisKafkaMessageKeyDecodedInByteArray)
47+
}
48+
fmt.Println("Key = ", thisKafkaMessageKey)
49+
var thisKafkaMessageValue = "null"
50+
if thisKafkaMessage.Value != "" {
51+
var thisKafkaMessageValueBase64 = thisKafkaMessage.Value
52+
var thisKafkaMessageValueDecodedInByteArray, _ = b64.StdEncoding.DecodeString(thisKafkaMessageValueBase64)
53+
thisKafkaMessageValue = string(thisKafkaMessageValueDecodedInByteArray)
54+
55+
}
56+
fmt.Println("Value = ", thisKafkaMessageValue)
57+
fmt.Println("End of message ", i)
58+
fmt.Println("**********")
59+
i = i + 1
60+
}
61+
}
62+
63+
return nil
64+
}
65+
66+
func main() {
67+
lambda.Start(handler)
68+
}
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
package main
2+
3+
import (
4+
"fmt"
5+
"net/http"
6+
"net/http/httptest"
7+
"testing"
8+
9+
"github.com/aws/aws-lambda-go/events"
10+
)
11+
12+
func TestHandler(t *testing.T) {
13+
t.Run("Unable to get IP", func(t *testing.T) {
14+
DefaultHTTPGetAddress = "http://127.0.0.1:12345"
15+
16+
_, err := handler(events.APIGatewayProxyRequest{})
17+
if err == nil {
18+
t.Fatal("Error failed to trigger with an invalid request")
19+
}
20+
})
21+
22+
t.Run("Non 200 Response", func(t *testing.T) {
23+
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
24+
w.WriteHeader(500)
25+
}))
26+
defer ts.Close()
27+
28+
DefaultHTTPGetAddress = ts.URL
29+
30+
_, err := handler(events.APIGatewayProxyRequest{})
31+
if err != nil && err.Error() != ErrNon200Response.Error() {
32+
t.Fatalf("Error failed to trigger with an invalid HTTP response: %v", err)
33+
}
34+
})
35+
36+
t.Run("Unable decode IP", func(t *testing.T) {
37+
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
38+
w.WriteHeader(500)
39+
}))
40+
defer ts.Close()
41+
42+
DefaultHTTPGetAddress = ts.URL
43+
44+
_, err := handler(events.APIGatewayProxyRequest{})
45+
if err == nil {
46+
t.Fatal("Error failed to trigger with an invalid HTTP response")
47+
}
48+
})
49+
50+
t.Run("Successful Request", func(t *testing.T) {
51+
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
52+
w.WriteHeader(200)
53+
fmt.Fprintf(w, "127.0.0.1")
54+
}))
55+
defer ts.Close()
56+
57+
DefaultHTTPGetAddress = ts.URL
58+
59+
_, err := handler(events.APIGatewayProxyRequest{})
60+
if err != nil {
61+
t.Fatal("Everything should be ok")
62+
}
63+
})
64+
}

msk-lambda-iam-go-sam/Makefile

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
.PHONY: build
2+
3+
build:
4+
sam build

msk-lambda-iam-go-sam/README.md

Lines changed: 208 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,208 @@
1+
# Golang AWS Lambda Kafka consumer with IAM auth, using AWS SAM
2+
3+
This pattern is an example of a Lambda function that consumes messages from an Amazon Managed Streaming for Kafka (Amazon MSK) topic, where the MSK Cluster has been configured to use IAM authentication. This pattern assumes you already have an MSK cluster with a topic configured, if you need a sample pattern to deploy an MSK cluster either in Provisioned or Serverless modes please see the [msk-cfn-sasl-lambda pattern](https://serverlessland.com/patterns/msk-cfn-sasl-lambda).
4+
5+
This project contains source code and supporting files for a serverless application that you can deploy with the AWS Serverless Application Model (AWS SAM) CLI. It includes the following files and folders.
6+
7+
8+
```bash
9+
.
10+
├── Makefile <-- Make to automate build
11+
├── README.md <-- This instructions file
12+
├── HandlerKafka <-- Source code for a lambda function
13+
│ ├── main.go <-- Lambda function code
14+
│ └── main_test.go <-- Unit tests
15+
└── template.yaml
16+
```
17+
18+
The application creates a Lambda function that listens to Kafka messages on a topic of an MSK Cluster. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code.
19+
20+
Important: this application uses various AWS services and there are costs associated with these services after the Free Tier usage - please see the [AWS Pricing page](https://aws.amazon.com/pricing/) for details. You are responsible for any AWS costs incurred. No warranty is implied in this example.
21+
22+
## Requirements
23+
24+
* [Create an AWS account](https://portal.aws.amazon.com/gp/aws/developer/registration/index.html) if you do not already have one and log in. The IAM user that you use must have sufficient permissions to make necessary AWS service calls and manage AWS resources.
25+
* [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html) installed and configured
26+
* [Git installed](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
27+
* [AWS Serverless Application Model](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) (AWS SAM) installed
28+
* [Docker installed](https://www.docker.com/community-edition)
29+
* [Golang](https://golang.org)
30+
* Create MSK cluster and topic that will be used for testing. It is important to create the topic before deploying the Lambda function, otherwise the event source mapping will stay disabled.
31+
32+
## Deploy the sample application
33+
34+
The AWS SAM CLI is a serverless tool for building and testing Lambda applications. It uses Docker to locally test your functions in an Amazon Linux environment that resembles the Lambda execution environment. It can also emulate your application's build environment and API.
35+
36+
To use the AWS SAM CLI, you need the following tools.
37+
38+
* AWS SAM CLI - [Install the AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html)
39+
* Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community)
40+
41+
In this example we use the built-in `sam build` to automatically download all the dependencies and package our build target.
42+
Read more about [SAM Build here](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-cli-command-reference-sam-build.html)
43+
44+
45+
1. Create a new directory, navigate to that directory in a terminal and clone the GitHub repository:
46+
```
47+
git clone https://github.com/aws-samples/serverless-patterns.git
48+
```
49+
1. Change directory to the pattern directory:
50+
```
51+
cd msk-lambda-iam-python-sam
52+
```
53+
54+
1. The `sam build` command is wrapped inside of the `Makefile`.
55+
56+
```shell
57+
make
58+
```
59+
60+
AWS Lambda Golang runtime requires a flat folder with the executable generated on build step. SAM will use `CodeUri` property to know where to look up for the application:
61+
62+
```yaml
63+
...
64+
FirstFunction:
65+
Type: AWS::Serverless::Function
66+
Properties:
67+
CodeUri: hello_world/
68+
...
69+
```
70+
71+
1. To deploy your application for the first time, run the following in your shell:
72+
73+
```bash
74+
sam deploy --guided
75+
```
76+
77+
1. During the prompts:
78+
* **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name.
79+
* **AWS Region**: The AWS region you want to deploy your app to.
80+
* **Parameter MSKClusterName**: The name of the MSKCluster, eg. msk-test-cluster
81+
82+
* **Parameter MSKClusterId**: The unique ID of the MSKCluster, eg. a4e132c8-6ad0-4334-a313-123456789012-s2
83+
* **Parameter MSKTopic**: The Kafka topic on which the lambda function will listen on
84+
* **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes.
85+
* **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command.
86+
* **Disable rollback**: Defaults to No and it preserves the state of previously provisioned resources when an operation fails
87+
* **Save arguments to configuration file**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application.
88+
* **SAM configuration file [samconfig.toml]**: Name of the configuration file to store configuration information locally
89+
* **SAM configuration environment [default]**: Environment for storing deployment information locally
90+
91+
You should get a message "Successfully created/updated stack - <StackName> in <Region>" if all goes well.
92+
93+
Once you have run `sam deploy --guided` mode once and saved arguments to a configuration file (samconfig.toml), you can use `sam deploy` in future to use these defaults.
94+
95+
## How it works
96+
97+
This pattern creates a Lambda function along with a Lambda Event Source Mapping(ESM) resource. This maps a Kafka topic on an MSK Cluster as a trigger to a Lambda function. The ESM takes care of polling the Kafka topic and then invokes the Lambda function with a batch of messages.
98+
99+
## Test the sample application
100+
101+
Once the Lambda function is deployed, send some Kafka messages to the topic that you configured in the Lambda function trigger.
102+
103+
Either send at least 10 messages or wait for 300 seconds (check the values of BatchSize: 10 and MaximumBatchingWindowInSeconds: 300 in the template.yaml file)
104+
105+
Then check Amazon CloudWatch logs and you should see messages in the CloudWatch Log Group with the name of the deployed Lambda function.
106+
107+
The Lambda code parses the Kafka messages and outputs the fields in the Kafka messages to CloudWatch logs.
108+
109+
A single Lambda function receives a batch of messages. The messages are received as a map with each key being a combination of the topic and the partition, as a single batch can receive messages from multiple partitions.
110+
111+
Each key has a list of messages. Each Kafka message has the following properties - `Topic`, `Partition`, `Offset`, `TimeStamp`, `TimeStampType`, `Key`, and `Value`.
112+
113+
The `Key` and `Value` are base64 encoded and have to be decoded. A message can also have a list of headers, each header having a key and a value.
114+
115+
The code in this example prints out the fields in the Kafka message and also decrypts the key and the value and logs them to CloudWatch logs.
116+
117+
118+
### Local development
119+
120+
**You can invoke the function locally using `sam local`**
121+
122+
```bash
123+
sam local invoke --event=events/event.json
124+
```
125+
126+
You should see a response similar to the below
127+
128+
`START RequestId: 5c10310a-abf9-416e-b017-697d2c3ba097 Version: $LATEST
129+
Received an event: {'eventSource': 'aws:kafka', 'eventSourceArn': 'arn:aws:kafka:us-west-2:123456789012:cluster/MSKWorkshopCluster/a93759a9-c9d0-4952-984c-492c6bfa2be8-13', 'bootstrapServers': 'b-1.mskworkshopcluster.z9kc4f.c13.kafka.us-west-2.amazonaws.com:9098,b-3.mskworkshopcluster.z9kc4f.c13.kafka.us-west-2.amazonaws.com:9098,b-2.mskworkshopcluster.z9kc4f.c13.kafka.us-west-2.amazonaws.com:9098', 'records': {'myTopic-0': [{'topic': 'myTopic', 'partition': 0, 'offset': 383, 'timestamp': 1678484822068, 'timestampType': 'CREATE_TIME', 'value': 'bTE=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 384, 'timestamp': 1678484823448, 'timestampType': 'CREATE_TIME', 'value': 'bTI=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 385, 'timestamp': 1678484824763, 'timestampType': 'CREATE_TIME', 'value': 'bTM=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 386, 'timestamp': 1678484825902, 'timestampType': 'CREATE_TIME', 'value': 'bTQ=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 387, 'timestamp': 1678484827810, 'timestampType': 'CREATE_TIME', 'value': 'bTU=', 'headers': []}]}}
130+
Event Source: aws:kafka
131+
Event Source ARN: arn:aws:kafka:us-west-2:123456789012:cluster/MSKWorkshopCluster/a93759a9-c9d0-4952-984c-492c6bfa2be8-13
132+
Bootstrap Servers: b-1.mskworkshopcluster.z9kc4f.c13.kafka.us-west-2.amazonaws.com:9098,b-3.mskworkshopcluster.z9kc4f.c13.kafka.us-west-2.amazonaws.com:9098,b-2.mskworkshopcluster.z9kc4f.c13.kafka.us-west-2.amazonaws.com:9098
133+
Records: {'myTopic-0': [{'topic': 'myTopic', 'partition': 0, 'offset': 383, 'timestamp': 1678484822068, 'timestampType': 'CREATE_TIME', 'value': 'bTE=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 384, 'timestamp': 1678484823448, 'timestampType': 'CREATE_TIME', 'value': 'bTI=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 385, 'timestamp': 1678484824763, 'timestampType': 'CREATE_TIME', 'value': 'bTM=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 386, 'timestamp': 1678484825902, 'timestampType': 'CREATE_TIME', 'value': 'bTQ=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 387, 'timestamp': 1678484827810, 'timestampType': 'CREATE_TIME', 'value': 'bTU=', 'headers': []}]}
134+
Current Record: [{'topic': 'myTopic', 'partition': 0, 'offset': 383, 'timestamp': 1678484822068, 'timestampType': 'CREATE_TIME', 'value': 'bTE=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 384, 'timestamp': 1678484823448, 'timestampType': 'CREATE_TIME', 'value': 'bTI=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 385, 'timestamp': 1678484824763, 'timestampType': 'CREATE_TIME', 'value': 'bTM=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 386, 'timestamp': 1678484825902, 'timestampType': 'CREATE_TIME', 'value': 'bTQ=', 'headers': []}, {'topic': 'myTopic', 'partition': 0, 'offset': 387, 'timestamp': 1678484827810, 'timestampType': 'CREATE_TIME', 'value': 'bTU=', 'headers': []}]`
135+
********************
136+
Now printing details of record number: 1
137+
Topic: myTopic
138+
Partition: 0
139+
Offset: 383
140+
Topic: myTopic
141+
Timestamp: 1678484822068
142+
TimestampType: CREATE_TIME
143+
Key = null
144+
Value = m1
145+
Now finished printing details of record number: 1
146+
********************
147+
********************
148+
Now printing details of record number: 2
149+
Topic: myTopic
150+
Partition: 0
151+
Offset: 384
152+
Topic: myTopic
153+
Timestamp: 1678484823448
154+
TimestampType: CREATE_TIME
155+
Key = null
156+
Value = m2
157+
Now finished printing details of record number: 2
158+
********************
159+
********************
160+
Now printing details of record number: 3
161+
Topic: myTopic
162+
Partition: 0
163+
Offset: 385
164+
Topic: myTopic
165+
Timestamp: 1678484824763
166+
TimestampType: CREATE_TIME
167+
Key = null
168+
Value = m3
169+
Now finished printing details of record number: 3
170+
********************
171+
********************
172+
Now printing details of record number: 4
173+
Topic: myTopic
174+
Partition: 0
175+
Offset: 386
176+
Topic: myTopic
177+
Timestamp: 1678484825902
178+
TimestampType: CREATE_TIME
179+
Key = null
180+
Value = m4
181+
Now finished printing details of record number: 4
182+
********************
183+
********************
184+
Now printing details of record number: 5
185+
Topic: myTopic
186+
Partition: 0
187+
Offset: 387
188+
Topic: myTopic
189+
Timestamp: 1678484827810
190+
TimestampType: CREATE_TIME
191+
Key = null
192+
Value = m5
193+
Now finished printing details of record number: 5
194+
********************
195+
END RequestId: 5c10310a-abf9-416e-b017-697d2c3ba097
196+
REPORT RequestId: 5c10310a-abf9-416e-b017-697d2c3ba097 Init Duration: 6.68 ms Duration: 1502.83 ms Billed Duration: 1503 ms Memory Size: 128 MB Max Memory Used: 128 MB
197+
198+
## Cleanup
199+
200+
1. Delete the stack
201+
```bash
202+
sam delete
203+
```
204+
205+
----
206+
Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
207+
208+
SPDX-License-Identifier: MIT-0

0 commit comments

Comments
 (0)