Skip to content

Commit b6463a9

Browse files
authored
docs(batch): new visuals and error handling section (#2857)
* fix(parameters): make cache aware of single vs multiple calls Signed-off-by: heitorlessa <[email protected]> * chore: cleanup, add test for single and nested Signed-off-by: heitorlessa <[email protected]> * docs(batch): simplify background with diagram and wording Signed-off-by: heitorlessa <[email protected]> * docs: recommend sqs new record.json_body Signed-off-by: heitorlessa <[email protected]> * docs: add code annotation for more details Signed-off-by: heitorlessa <[email protected]> * docs: add initial error handling section Signed-off-by: heitorlessa <[email protected]> * docs: add visual for partial failure mechanics Signed-off-by: heitorlessa <[email protected]> * docs: add fifo and stream sequence numbers Signed-off-by: heitorlessa <[email protected]> * docs: revamp ctx manager processing Signed-off-by: heitorlessa <[email protected]> * docs: highlight use of model in BatchProcessor Signed-off-by: heitorlessa <[email protected]> * docs: diagram for BYOP --------- Signed-off-by: heitorlessa <[email protected]>
1 parent 453a8f2 commit b6463a9

10 files changed

+333
-46
lines changed

.gitleaksignore

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
examples/batch_processing/src/context_manager_access_output_pydantic.txt:aws-access-token:10
2+
examples/batch_processing/src/context_manager_access_output_pydantic.txt:aws-access-token:15
3+
examples/batch_processing/src/context_manager_access_output.txt:aws-access-token:10

docs/utilities/batch.md

+248-26
Large diffs are not rendered by default.

examples/batch_processing/src/context_manager_access.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,15 @@ def record_handler(record: SQSRecord):
2626
@logger.inject_lambda_context
2727
@tracer.capture_lambda_handler
2828
def lambda_handler(event, context: LambdaContext):
29-
batch = event["Records"]
29+
batch = event["Records"] # (1)!
3030
with processor(records=batch, handler=record_handler):
3131
processed_messages: List[Tuple] = processor.process()
3232

3333
for message in processed_messages:
3434
status: Literal["success"] | Literal["fail"] = message[0]
35+
cause: str = message[1] # (2)!
3536
record: SQSRecord = message[2]
3637

37-
logger.info(status, record=record)
38+
logger.info(status, record=record, cause=cause)
3839

3940
return processor.response()
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
[
2+
(
3+
"fail",
4+
"<class 'Exception': Failed to process record", # (1)!
5+
<aws_lambda_powertools.utilities.data_classes.sqs_event.SQSRecord object at 0x103c590a0>
6+
),
7+
(
8+
"success",
9+
"success",
10+
{'messageId': '88891c36-32eb-4a25-9905-654a32916893', 'receiptHandle': 'AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a', 'body': 'success', 'attributes': {'ApproximateReceiveCount': '1', 'SentTimestamp': '1545082649183', 'SenderId': 'AIDAIENQZJOLO23YVJ4VO', 'ApproximateFirstReceiveTimestamp': '1545082649185'}, 'messageAttributes': {}, 'md5OfBody': 'e4e68fb7bd0e697a0ae8f1bb342846b3', 'eventSource': 'aws:sqs', 'eventSourceARN': 'arn:aws:sqs:us-east-2:123456789012:my-queue', 'awsRegion': 'us-east-1'}
11+
)
12+
]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
[
2+
(
3+
"fail", # (1)!
4+
"<class 'pydantic.error_wrappers.ValidationError'>:1 validation error for OrderSqs\nbody\n JSON object must be str, bytes or bytearray (type=type_error.json)",
5+
<aws_lambda_powertools.utilities.data_classes.sqs_event.SQSRecord object at 0x103c590a0>
6+
),
7+
(
8+
"success",
9+
"success",
10+
{'messageId': '88891c36-32eb-4a25-9905-654a32916893', 'receiptHandle': 'AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a', 'body': 'success', 'attributes': {'ApproximateReceiveCount': '1', 'SentTimestamp': '1545082649183', 'SenderId': 'AIDAIENQZJOLO23YVJ4VO', 'ApproximateFirstReceiveTimestamp': '1545082649185'}, 'messageAttributes': {}, 'md5OfBody': 'e4e68fb7bd0e697a0ae8f1bb342846b3', 'eventSource': 'aws:sqs', 'eventSourceARN': 'arn:aws:sqs:us-east-2:123456789012:my-queue', 'awsRegion': 'us-east-1'}
11+
),
12+
(
13+
"fail", # (2)!
14+
"<class 'Exception'>:Failed to process record.",
15+
OrderSqs(messageId='9d0bfba5-d213-4b64-89bd-f4fbd7e58358', receiptHandle='AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a', body=Order(item={'type': 'fail'}), attributes=SqsAttributesModel(ApproximateReceiveCount='1', ApproximateFirstReceiveTimestamp=datetime.datetime(2018, 12, 17, 21, 37, 29, 185000, tzinfo=datetime.timezone.utc), MessageDeduplicationId=None, MessageGroupId=None, SenderId='AIDAIENQZJOLO23YVJ4VO', SentTimestamp=datetime.datetime(2018, 12, 17, 21, 37, 29, 183000, tzinfo=datetime.timezone.utc), SequenceNumber=None, AWSTraceHeader=None), messageAttributes={}, md5OfBody='e4e68fb7bd0e697a0ae8f1bb342846b3', md5OfMessageAttributes=None, eventSource='aws:sqs', eventSourceARN='arn:aws:sqs:us-east-2:123456789012:my-queue', awsRegion='us-east-1')
16+
)
17+
]

examples/batch_processing/src/getting_started_dynamodb.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
)
1212
from aws_lambda_powertools.utilities.typing import LambdaContext
1313

14-
processor = BatchProcessor(event_type=EventType.DynamoDBStreams)
14+
processor = BatchProcessor(event_type=EventType.DynamoDBStreams) # (1)!
1515
tracer = Tracer()
1616
logger = Logger()
1717

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
from aws_lambda_powertools import Logger, Tracer
2+
from aws_lambda_powertools.utilities.batch import (
3+
BatchProcessor,
4+
EventType,
5+
process_partial_response,
6+
)
7+
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
8+
from aws_lambda_powertools.utilities.typing import LambdaContext
9+
10+
processor = BatchProcessor(event_type=EventType.SQS)
11+
tracer = Tracer()
12+
logger = Logger()
13+
14+
15+
class InvalidPayload(Exception):
16+
...
17+
18+
19+
@tracer.capture_method
20+
def record_handler(record: SQSRecord):
21+
payload: str = record.body
22+
logger.info(payload)
23+
if not payload:
24+
raise InvalidPayload("Payload does not contain minimum information to be processed.") # (1)!
25+
26+
27+
@logger.inject_lambda_context
28+
@tracer.capture_lambda_handler
29+
def lambda_handler(event, context: LambdaContext):
30+
return process_partial_response( # (2)!
31+
event=event,
32+
record_handler=record_handler,
33+
processor=processor,
34+
context=context,
35+
)

examples/batch_processing/src/getting_started_kinesis.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
)
1010
from aws_lambda_powertools.utilities.typing import LambdaContext
1111

12-
processor = BatchProcessor(event_type=EventType.KinesisDataStreams)
12+
processor = BatchProcessor(event_type=EventType.KinesisDataStreams) # (1)!
1313
tracer = Tracer()
1414
logger = Logger()
1515

Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import json
2-
31
from aws_lambda_powertools import Logger, Tracer
42
from aws_lambda_powertools.utilities.batch import (
53
BatchProcessor,
@@ -9,20 +7,23 @@
97
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
108
from aws_lambda_powertools.utilities.typing import LambdaContext
119

12-
processor = BatchProcessor(event_type=EventType.SQS)
10+
processor = BatchProcessor(event_type=EventType.SQS) # (1)!
1311
tracer = Tracer()
1412
logger = Logger()
1513

1614

1715
@tracer.capture_method
18-
def record_handler(record: SQSRecord):
19-
payload: str = record.body
20-
if payload:
21-
item: dict = json.loads(payload)
22-
logger.info(item)
16+
def record_handler(record: SQSRecord): # (2)!
17+
payload: str = record.json_body # if json string data, otherwise record.body for str
18+
logger.info(payload)
2319

2420

2521
@logger.inject_lambda_context
2622
@tracer.capture_lambda_handler
2723
def lambda_handler(event, context: LambdaContext):
28-
return process_partial_response(event=event, record_handler=record_handler, processor=processor, context=context)
24+
return process_partial_response( # (3)!
25+
event=event,
26+
record_handler=record_handler,
27+
processor=processor,
28+
context=context,
29+
)

examples/batch_processing/src/getting_started_sqs_fifo.py

+3-7
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import json
2-
31
from aws_lambda_powertools import Logger, Tracer
42
from aws_lambda_powertools.utilities.batch import (
53
SqsFifoPartialProcessor,
@@ -8,17 +6,15 @@
86
from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord
97
from aws_lambda_powertools.utilities.typing import LambdaContext
108

11-
processor = SqsFifoPartialProcessor()
9+
processor = SqsFifoPartialProcessor() # (1)!
1210
tracer = Tracer()
1311
logger = Logger()
1412

1513

1614
@tracer.capture_method
1715
def record_handler(record: SQSRecord):
18-
payload: str = record.body
19-
if payload:
20-
item: dict = json.loads(payload)
21-
logger.info(item)
16+
payload: str = record.json_body # if json string data, otherwise record.body for str
17+
logger.info(payload)
2218

2319

2420
@logger.inject_lambda_context

0 commit comments

Comments
 (0)