|
| 1 | +from aws_lambda_powertools import Logger |
| 2 | +from aws_lambda_powertools.utilities.kafka import ConsumerRecords, SchemaConfig, kafka_consumer |
| 3 | +from aws_lambda_powertools.utilities.typing import LambdaContext |
| 4 | + |
| 5 | +logger = Logger() |
| 6 | + |
| 7 | +# Define Avro schema |
| 8 | +avro_schema = """ |
| 9 | +{ |
| 10 | + "type": "record", |
| 11 | + "name": "User", |
| 12 | + "namespace": "com.example", |
| 13 | + "fields": [ |
| 14 | + {"name": "name", "type": "string"}, |
| 15 | + {"name": "age", "type": "int"} |
| 16 | + ] |
| 17 | +} |
| 18 | +""" |
| 19 | + |
| 20 | +schema_config = SchemaConfig( |
| 21 | + value_schema_type="AVRO", |
| 22 | + value_schema=avro_schema, |
| 23 | +) |
| 24 | + |
| 25 | + |
| 26 | +@kafka_consumer(schema_config=schema_config) |
| 27 | +def lambda_handler(event: ConsumerRecords, context: LambdaContext): |
| 28 | + for record in event.records: |
| 29 | + # Log record coordinates for tracing |
| 30 | + logger.info(f"Processing message from topic '{record.topic}'") |
| 31 | + logger.info(f"Partition: {record.partition}, Offset: {record.offset}") |
| 32 | + logger.info(f"Produced at: {record.timestamp}") |
| 33 | + |
| 34 | + # Process message headers |
| 35 | + logger.info(f"Headers: {record.headers}") |
| 36 | + |
| 37 | + # Access the Avro deserialized message content |
| 38 | + value = record.value |
| 39 | + logger.info(f"Deserialized value: {value['name']}") |
| 40 | + |
| 41 | + # For debugging, you can access the original raw data |
| 42 | + logger.info(f"Raw message: {record.original_value}") |
| 43 | + |
| 44 | + return {"statusCode": 200} |
0 commit comments