Skip to content

Commit b92614a

Browse files
docs: small changes to help others + sentry dev dependency
1 parent bd0cde8 commit b92614a

18 files changed

+263
-112
lines changed

docs/utilities/batch.md

Lines changed: 33 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -92,13 +92,13 @@ Processing batches from SQS works in three stages:
9292
The second record failed to be processed, therefore the processor added its message ID in the response.
9393

9494
```json
95-
--8<-- "examples/batch_processing/src/getting_started_response.json"
95+
--8<-- "examples/batch_processing/src/getting_started_sqs_response.json"
9696
```
9797

9898
=== "Sample event"
9999

100100
```json
101-
--8<-- "examples/batch_processing/src/getting_started_event.json"
101+
--8<-- "examples/batch_processing/src/getting_started_sqs_event.json"
102102
```
103103

104104
#### FIFO queues
@@ -201,13 +201,13 @@ Processing batches from DynamoDB Streams works in three stages:
201201
The second record failed to be processed, therefore the processor added its sequence number in the response.
202202

203203
```json
204-
--8<-- "examples/batch_processing/src/getting_started_dynamodb_event.json"
204+
--8<-- "examples/batch_processing/src/getting_started_dynamodb_response.json"
205205
```
206206

207207
=== "Sample event"
208208

209209
```json
210-
--8<-- "examples/batch_processing/src/getting_started_dynamodb_response.json"
210+
--8<-- "examples/batch_processing/src/getting_started_dynamodb_event.json"
211211
```
212212

213213
### Partial failure mechanics
@@ -250,30 +250,48 @@ Inheritance is importance because we need to access message IDs and sequence num
250250

251251
=== "SQS"
252252

253-
```python hl_lines="8 17 27 34"
253+
```python hl_lines="8 17 27 35"
254254
--8<-- "examples/batch_processing/src/pydantic_sqs.py"
255255
```
256256

257+
=== "SQS - Sample Event "
258+
259+
```json hl_lines="6 22"
260+
--8<-- "examples/batch_processing/src/pydantic_sqs_event.json"
261+
```
262+
257263
=== "Kinesis Data Streams"
258264

259-
```python hl_lines="9 10 20 28 34 41"
265+
```python hl_lines="9 10 20 28 34 42"
260266
--8<-- "examples/batch_processing/src/pydantic_kinesis.py"
261267
```
262268

269+
=== "Kinesis - Sample Event "
270+
271+
```json hl_lines="8 24"
272+
--8<-- "examples/batch_processing/src/pydantic_kinesis_event.json"
273+
```
274+
263275
=== "DynamoDB Streams"
264276

265-
```python hl_lines="12 13 22 32 37 41 47 55"
277+
```python hl_lines="12 13 22 32 37 41 47 56"
266278
--8<-- "examples/batch_processing/src/pydantic_dynamodb.py"
267279
```
268280

281+
=== "DynamoDB - Sample Event "
282+
283+
```json hl_lines="13-15 36-38"
284+
--8<-- "examples/batch_processing/src/pydantic_dynamodb_event.json"
285+
```
286+
269287
### Accessing processed messages
270288

271289
Use the context manager to access a list of all returned values from your `record_handler` function.
272290

273291
* **When successful**. We will include a tuple with `success`, the result of `record_handler`, and the batch record
274292
* **When failed**. We will include a tuple with `fail`, exception as a string, and the batch record
275293

276-
```python hl_lines="26-34" title="Accessing processed messages via context manager"
294+
```python hl_lines="28-33" title="Accessing processed messages via context manager"
277295
--8<-- "examples/batch_processing/src/context_manager_access.py"
278296
```
279297

@@ -285,13 +303,13 @@ We can automatically inject the [Lambda context](https://docs.aws.amazon.com/lam
285303

286304
=== "Recommended"
287305

288-
```python hl_lines="18"
306+
```python hl_lines="18 27"
289307
--8<-- "examples/batch_processing/src/advanced_accessing_lambda_context.py"
290308
```
291309

292310
=== "As a decorator (legacy)"
293311

294-
```python hl_lines="18"
312+
```python hl_lines="18 26"
295313
--8<-- "examples/batch_processing/src/advanced_accessing_lambda_context_decorator.py"
296314
```
297315

@@ -352,22 +370,22 @@ As there is no external calls, you can unit test your code with `BatchProcessor`
352370

353371
Given a SQS batch where the first batch record succeeds and the second fails processing, we should have a single item reported in the function response.
354372

355-
=== "test_app.py"
373+
=== "getting_started_with_test.py"
356374

357375
```python
358-
--8<-- "examples/batch_processing/testing/test_app.py"
376+
--8<-- "examples/batch_processing/src/getting_started_with_test.py"
359377
```
360378

361-
=== "src/app.py"
379+
=== "getting_started_with_test_app.py"
362380

363381
```python
364-
--8<-- "examples/batch_processing/testing/src/app.py"
382+
--8<-- "examples/batch_processing/src/getting_started_with_test_app.py"
365383
```
366384

367385
=== "Sample SQS event"
368386

369387
```json title="events/sqs_event.json"
370-
--8<-- "examples/batch_processing/testing/events/sqs_event.json"
388+
--8<-- "examples/batch_processing/src/getting_started_sqs_event.json"
371389
```
372390

373391
## FAQ

examples/batch_processing/src/context_manager_access.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1+
from __future__ import annotations
2+
13
import json
2-
from typing import Any, List, Literal, Tuple, Union
4+
from typing import List, Literal, Tuple
35

46
from aws_lambda_powertools import Logger, Tracer
57
from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType
@@ -27,10 +29,9 @@ def lambda_handler(event, context: LambdaContext):
2729
processed_messages: List[Tuple] = processor.process()
2830

2931
for message in processed_messages:
30-
status: Union[Literal["success"], Literal["fail"]] = message[0]
31-
result: Any = message[1]
32+
status: Literal["success"] | Literal["fail"] = message[0]
3233
record: SQSRecord = message[2]
3334

34-
logger.info(status, result, record)
35+
logger.info(status, record=record)
3536

3637
return processor.response()
Lines changed: 46 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,51 @@
1+
12
{
2-
"batchItemFailures": [
3+
"Records": [
34
{
4-
"itemIdentifier": "8640712661"
5+
"eventID": "1",
6+
"eventVersion": "1.0",
7+
"dynamodb": {
8+
"Keys": {
9+
"Id": {
10+
"N": "101"
11+
}
12+
},
13+
"NewImage": {
14+
"Message": {
15+
"S": "failure"
16+
}
17+
},
18+
"StreamViewType": "NEW_AND_OLD_IMAGES",
19+
"SequenceNumber": "3275880929",
20+
"SizeBytes": 26
21+
},
22+
"awsRegion": "us-west-2",
23+
"eventName": "INSERT",
24+
"eventSourceARN": "eventsource_arn",
25+
"eventSource": "aws:dynamodb"
26+
},
27+
{
28+
"eventID": "1",
29+
"eventVersion": "1.0",
30+
"dynamodb": {
31+
"Keys": {
32+
"Id": {
33+
"N": "101"
34+
}
35+
},
36+
"NewImage": {
37+
"SomethingElse": {
38+
"S": "success"
39+
}
40+
},
41+
"StreamViewType": "NEW_AND_OLD_IMAGES",
42+
"SequenceNumber": "8640712661",
43+
"SizeBytes": 26
44+
},
45+
"awsRegion": "us-west-2",
46+
"eventName": "INSERT",
47+
"eventSourceARN": "eventsource_arn",
48+
"eventSource": "aws:dynamodb"
549
}
650
]
751
}
Lines changed: 2 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,50 +1,7 @@
11
{
2-
"Records": [
2+
"batchItemFailures": [
33
{
4-
"eventID": "1",
5-
"eventVersion": "1.0",
6-
"dynamodb": {
7-
"Keys": {
8-
"Id": {
9-
"N": "101"
10-
}
11-
},
12-
"NewImage": {
13-
"Message": {
14-
"S": "failure"
15-
}
16-
},
17-
"StreamViewType": "NEW_AND_OLD_IMAGES",
18-
"SequenceNumber": "3275880929",
19-
"SizeBytes": 26
20-
},
21-
"awsRegion": "us-west-2",
22-
"eventName": "INSERT",
23-
"eventSourceARN": "eventsource_arn",
24-
"eventSource": "aws:dynamodb"
25-
},
26-
{
27-
"eventID": "1",
28-
"eventVersion": "1.0",
29-
"dynamodb": {
30-
"Keys": {
31-
"Id": {
32-
"N": "101"
33-
}
34-
},
35-
"NewImage": {
36-
"SomethingElse": {
37-
"S": "success"
38-
}
39-
},
40-
"StreamViewType": "NEW_AND_OLD_IMAGES",
41-
"SequenceNumber": "8640712661",
42-
"SizeBytes": 26
43-
},
44-
"awsRegion": "us-west-2",
45-
"eventName": "INSERT",
46-
"eventSourceARN": "eventsource_arn",
47-
"eventSource": "aws:dynamodb"
4+
"itemIdentifier": "8640712661"
485
}
496
]
507
}

examples/batch_processing/testing/test_app.py renamed to examples/batch_processing/src/getting_started_with_test.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33
from pathlib import Path
44

55
import pytest
6-
7-
from examples.batch_processing.testing.src import app
6+
from getting_started_with_test_app import lambda_handler, processor
87

98

109
def load_event(path: Path):
@@ -32,15 +31,15 @@ def sqs_event():
3231

3332
def test_app_batch_partial_response(sqs_event, lambda_context):
3433
# GIVEN
35-
processor = app.processor # access processor for additional assertions
34+
processor_result = processor # access processor for additional assertions
3635
successful_record = sqs_event["Records"][0]
3736
failed_record = sqs_event["Records"][1]
3837
expected_response = {"batchItemFailures": [{"itemIdentifier": failed_record["messageId"]}]}
3938

4039
# WHEN
41-
ret = app.lambda_handler(sqs_event, lambda_context)
40+
ret = lambda_handler(sqs_event, lambda_context)
4241

4342
# THEN
4443
assert ret == expected_response
45-
assert len(processor.fail_messages) == 1
46-
assert processor.success_messages[0] == successful_record
44+
assert len(processor_result.fail_messages) == 1
45+
assert processor_result.success_messages[0] == successful_record

examples/batch_processing/src/pydantic_dynamodb.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ class OrderDynamoDB(BaseModel):
2525
# auto transform json string
2626
# so Pydantic can auto-initialize nested Order model
2727
@validator("Message", pre=True)
28-
def transform_message_to_dict(self, value: Dict[Literal["S"], str]):
28+
def transform_message_to_dict(cls, value: Dict[Literal["S"], str]):
2929
return json.loads(value["S"])
3030

3131

@@ -46,6 +46,7 @@ class OrderDynamoDBRecord(DynamoDBStreamRecordModel):
4646
@tracer.capture_method
4747
def record_handler(record: OrderDynamoDBRecord):
4848
if record.dynamodb.NewImage and record.dynamodb.NewImage.Message:
49+
logger.info(record.dynamodb.NewImage.Message.item)
4950
return record.dynamodb.NewImage.Message.item
5051

5152

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
{
2+
"Records": [
3+
{
4+
"eventID": "1",
5+
"eventVersion": "1.0",
6+
"dynamodb": {
7+
"Keys": {
8+
"Id": {
9+
"N": "101"
10+
}
11+
},
12+
"NewImage": {
13+
"Message": {
14+
"S": "{\"item\": {\"laptop\": \"amd\"}}"
15+
}
16+
},
17+
"StreamViewType": "NEW_AND_OLD_IMAGES",
18+
"SequenceNumber": "3275880929",
19+
"SizeBytes": 26
20+
},
21+
"awsRegion": "us-west-2",
22+
"eventName": "INSERT",
23+
"eventSourceARN": "eventsource_arn",
24+
"eventSource": "aws:dynamodb"
25+
},
26+
{
27+
"eventID": "1",
28+
"eventVersion": "1.0",
29+
"dynamodb": {
30+
"Keys": {
31+
"Id": {
32+
"N": "101"
33+
}
34+
},
35+
"NewImage": {
36+
"SomethingElse": {
37+
"S": "success"
38+
}
39+
},
40+
"StreamViewType": "NEW_AND_OLD_IMAGES",
41+
"SequenceNumber": "8640712661",
42+
"SizeBytes": 26
43+
},
44+
"awsRegion": "us-west-2",
45+
"eventName": "INSERT",
46+
"eventSourceARN": "eventsource_arn",
47+
"eventSource": "aws:dynamodb"
48+
}
49+
]
50+
}

examples/batch_processing/src/pydantic_kinesis.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ class OrderKinesisRecord(KinesisDataStreamRecord):
3232

3333
@tracer.capture_method
3434
def record_handler(record: OrderKinesisRecord):
35+
logger.info(record.kinesis.data.item)
3536
return record.kinesis.data.item
3637

3738

0 commit comments

Comments
 (0)