In this demo, we will:
demo-stream
KinesisDataGenerator
import json
import boto3
import random
import time
def lambda_handler(event, context):
kinesis = boto3.client('kinesis')
for _ in range(100): # Generate 100 records
data = {
'sensor_id': random.randint(1, 10),
'temperature': round(random.uniform(20, 30), 2),
'humidity': round(random.uniform(30, 70), 2),
'timestamp': int(time.time())
}
response = kinesis.put_record(
StreamName='demo-stream',
Data=json.dumps(data),
PartitionKey=str(data['sensor_id'])
)
print(f"Put record in stream: {data}")
time.sleep(0.1) # Wait for 0.1 second before sending next record
return {
'statusCode': 200,
'body': json.dumps('Data generation complete')
}
1
AmazonKinesisFullAccess
KinesisProcessedData
sensor_id
timestamp
ProcessKinesisData
import json
import boto3
from decimal import Decimal
import base64
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('KinesisProcessedData')
def lambda_handler(event, context):
for record in event['Records']:
# Decode and load the Kinesis data
payload = json.loads(base64.b64decode(record['kinesis']['data']).decode('utf-8'))
# Convert float to Decimal for DynamoDB
payload['temperature'] = Decimal(str(payload['temperature']))
payload['humidity'] = Decimal(str(payload['humidity']))
table.put_item(Item=payload)
print(f"Processed and stored record: {payload}")
return {
'statusCode': 200,
'body': json.dumps('Processing complete')
}
AmazonKinesisFullAccess
AmazonDynamoDBFullAccess
kinesis/demo-stream
Test-Event