Skip to content

Commit ea3b79b

Browse files
committed
docs: created utility docs
1 parent b4fd557 commit ea3b79b

22 files changed

+1003
-18
lines changed
+37
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { Logger } from '@aws-lambda-powertools/logger';
7+
import type {
8+
SQSEvent,
9+
SQSRecord,
10+
Context,
11+
SQSBatchResponse,
12+
} from 'aws-lambda';
13+
14+
const processor = new BatchProcessor(EventType.SQS);
15+
const logger = new Logger();
16+
17+
const recordHandler = (record: SQSRecord, lambdaContext?: Context): void => {
18+
const payload = record.body;
19+
if (payload) {
20+
const item = JSON.parse(payload);
21+
logger.info('Processed item', { item });
22+
}
23+
if (lambdaContext) {
24+
logger.info('Remaining time', {
25+
time: lambdaContext.getRemainingTimeInMillis(),
26+
});
27+
}
28+
};
29+
30+
export const handler = async (
31+
event: SQSEvent,
32+
context: Context
33+
): Promise<SQSBatchResponse> => {
34+
return processPartialResponse(event, recordHandler, processor, {
35+
context,
36+
});
37+
};
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import { BatchProcessor, EventType } from '@aws-lambda-powertools/batch';
2+
import { Logger } from '@aws-lambda-powertools/logger';
3+
import type {
4+
SQSEvent,
5+
SQSRecord,
6+
Context,
7+
SQSBatchResponse,
8+
} from 'aws-lambda';
9+
10+
const processor = new BatchProcessor(EventType.SQS);
11+
const logger = new Logger();
12+
13+
const recordHandler = (record: SQSRecord): void => {
14+
const payload = record.body;
15+
if (payload) {
16+
const item = JSON.parse(payload);
17+
logger.info('Processed item', { item });
18+
}
19+
};
20+
21+
export const handler = async (
22+
event: SQSEvent,
23+
context: Context
24+
): Promise<SQSBatchResponse> => {
25+
const batch = event.Records;
26+
27+
processor.register(batch, recordHandler, { context });
28+
const processedMessages = await processor.process();
29+
30+
for (const message of processedMessages) {
31+
const status: 'success' | 'fail' = message[0];
32+
const record = message[2];
33+
34+
logger.info('Processed record', { status, record });
35+
}
36+
37+
return processor.response();
38+
};
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
import {
2+
DynamoDBClient,
3+
BatchWriteItemCommand,
4+
} from '@aws-sdk/client-dynamodb';
5+
import { marshall } from '@aws-sdk/util-dynamodb';
6+
import {
7+
BasePartialProcessor,
8+
processPartialResponse,
9+
} from '@aws-lambda-powertools/batch';
10+
import type {
11+
SuccessResponse,
12+
FailureResponse,
13+
BaseRecord,
14+
} from '@aws-lambda-powertools/batch';
15+
import type { SQSEvent, Context, SQSBatchResponse } from 'aws-lambda';
16+
17+
const tableName = process.env.TABLE_NAME || 'table-not-found';
18+
19+
class MyPartialProcessor extends BasePartialProcessor {
20+
#tableName: string;
21+
#client?: DynamoDBClient;
22+
23+
public constructor(tableName: string) {
24+
super();
25+
this.#tableName = tableName;
26+
}
27+
28+
public async asyncProcessRecord(
29+
_record: BaseRecord
30+
): Promise<SuccessResponse | FailureResponse> {
31+
throw new Error('Not implemented');
32+
}
33+
34+
/**
35+
* It's called once, **after** processing the batch.
36+
*
37+
* Here we are writing all the processed messages to DynamoDB.
38+
*/
39+
public clean(): void {
40+
// We know that the client is defined because clean() is called after prepare()
41+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
42+
this.#client!.send(
43+
new BatchWriteItemCommand({
44+
RequestItems: {
45+
[this.#tableName]: this.successMessages.map((message) => ({
46+
PutRequest: {
47+
Item: marshall(message),
48+
},
49+
})),
50+
},
51+
})
52+
);
53+
}
54+
55+
/**
56+
* It's called once, **before** processing the batch.
57+
*
58+
* It initializes a new client and cleans up any existing data.
59+
*/
60+
public prepare(): void {
61+
this.#client = new DynamoDBClient({});
62+
this.successMessages = [];
63+
}
64+
65+
/**
66+
* It handles how your record is processed.
67+
*
68+
* Here we are keeping the status of each run, `this.handler` is
69+
* the function that is passed when calling `processor.register()`.
70+
*/
71+
public processRecord(record: BaseRecord): SuccessResponse | FailureResponse {
72+
try {
73+
const result = this.handler(record);
74+
75+
return this.successHandler(record, result);
76+
} catch (error) {
77+
return this.failureHandler(record, error as Error);
78+
}
79+
}
80+
}
81+
82+
const processor = new MyPartialProcessor(tableName);
83+
84+
const recordHandler = (): number => {
85+
return Math.floor(Math.random() * 10);
86+
};
87+
88+
export const handler = async (
89+
event: SQSEvent,
90+
context: Context
91+
): Promise<SQSBatchResponse> => {
92+
return processPartialResponse(event, recordHandler, processor, {
93+
context,
94+
});
95+
};
+53
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
import { Metrics, MetricUnits } from '@aws-lambda-powertools/metrics';
2+
import {
3+
BatchProcessor,
4+
EventType,
5+
FailureResponse,
6+
EventSourceDataClassTypes,
7+
processPartialResponse,
8+
} from '@aws-lambda-powertools/batch';
9+
import { Logger } from '@aws-lambda-powertools/logger';
10+
import type {
11+
SQSEvent,
12+
SQSRecord,
13+
Context,
14+
SQSBatchResponse,
15+
} from 'aws-lambda';
16+
17+
class MyProcessor extends BatchProcessor {
18+
#metrics: Metrics;
19+
20+
public constructor(eventType: keyof typeof EventType) {
21+
super(eventType);
22+
this.#metrics = new Metrics({ namespace: 'test' });
23+
}
24+
25+
public failureHandler(
26+
record: EventSourceDataClassTypes,
27+
error: Error
28+
): FailureResponse {
29+
this.#metrics.addMetric('BatchRecordFailures', MetricUnits.Count, 1);
30+
31+
return super.failureHandler(record, error);
32+
}
33+
}
34+
35+
const processor = new MyProcessor(EventType.SQS);
36+
const logger = new Logger();
37+
38+
const recordHandler = (record: SQSRecord): void => {
39+
const payload = record.body;
40+
if (payload) {
41+
const item = JSON.parse(payload);
42+
logger.info('Processed item', { item });
43+
}
44+
};
45+
46+
export const handler = async (
47+
event: SQSEvent,
48+
context: Context
49+
): Promise<SQSBatchResponse> => {
50+
return processPartialResponse(event, recordHandler, processor, {
51+
context,
52+
});
53+
};
+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import {
2+
AsyncBatchProcessor,
3+
EventType,
4+
asyncProcessPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import axios from 'axios'; // axios is an external dependency
7+
import type {
8+
SQSEvent,
9+
SQSRecord,
10+
Context,
11+
SQSBatchResponse,
12+
} from 'aws-lambda';
13+
14+
const processor = new AsyncBatchProcessor(EventType.SQS);
15+
16+
const recordHandler = async (record: SQSRecord): Promise<number> => {
17+
const res = await axios.post('https://httpbin.org/anything', {
18+
message: record.body,
19+
});
20+
21+
return res.status;
22+
};
23+
24+
export const handler = async (
25+
event: SQSEvent,
26+
context: Context
27+
): Promise<SQSBatchResponse> => {
28+
return await asyncProcessPartialResponse(event, recordHandler, processor, {
29+
context,
30+
});
31+
};
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { Logger } from '@aws-lambda-powertools/logger';
7+
import type {
8+
DynamoDBStreamEvent,
9+
DynamoDBRecord,
10+
Context,
11+
DynamoDBBatchResponse,
12+
} from 'aws-lambda';
13+
14+
const processor = new BatchProcessor(EventType.DynamoDBStreams);
15+
const logger = new Logger();
16+
17+
const recordHandler = (record: DynamoDBRecord): void => {
18+
if (record.dynamodb && record.dynamodb.NewImage) {
19+
logger.info('Processing record', { record: record.dynamodb.NewImage });
20+
const message = record.dynamodb.NewImage.Message.S;
21+
if (message) {
22+
const payload = JSON.parse(message);
23+
logger.info('Processed item', { item: payload });
24+
}
25+
}
26+
};
27+
28+
export const handler = async (
29+
event: DynamoDBStreamEvent,
30+
context: Context
31+
): Promise<DynamoDBBatchResponse> => {
32+
return processPartialResponse(event, recordHandler, processor, {
33+
context,
34+
});
35+
};
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { Logger } from '@aws-lambda-powertools/logger';
7+
import type {
8+
KinesisStreamEvent,
9+
KinesisStreamRecord,
10+
Context,
11+
KinesisStreamBatchResponse,
12+
} from 'aws-lambda';
13+
14+
const processor = new BatchProcessor(EventType.KinesisDataStreams);
15+
const logger = new Logger();
16+
17+
const recordHandler = (record: KinesisStreamRecord): void => {
18+
logger.info('Processing record', { record: record.kinesis.data });
19+
const payload = JSON.parse(record.kinesis.data);
20+
logger.info('Processed item', { item: payload });
21+
};
22+
23+
export const handler = async (
24+
event: KinesisStreamEvent,
25+
context: Context
26+
): Promise<KinesisStreamBatchResponse> => {
27+
return processPartialResponse(event, recordHandler, processor, {
28+
context,
29+
});
30+
};
+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { Logger } from '@aws-lambda-powertools/logger';
7+
import type {
8+
SQSEvent,
9+
SQSRecord,
10+
Context,
11+
SQSBatchResponse,
12+
} from 'aws-lambda';
13+
14+
const processor = new BatchProcessor(EventType.SQS);
15+
const logger = new Logger();
16+
17+
const recordHandler = (record: SQSRecord): void => {
18+
const payload = record.body;
19+
if (payload) {
20+
const item = JSON.parse(payload);
21+
logger.info('Processed item', { item });
22+
}
23+
};
24+
25+
export const handler = async (
26+
event: SQSEvent,
27+
context: Context
28+
): Promise<SQSBatchResponse> => {
29+
return processPartialResponse(event, recordHandler, processor, {
30+
context,
31+
});
32+
};
33+
export { processor };
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import {
2+
SqsFifoPartialProcessor,
3+
processPartialResponse,
4+
} from '@aws-lambda-powertools/batch';
5+
import { Logger } from '@aws-lambda-powertools/logger';
6+
import type {
7+
SQSEvent,
8+
SQSRecord,
9+
Context,
10+
SQSBatchResponse,
11+
} from 'aws-lambda';
12+
13+
const processor = new SqsFifoPartialProcessor();
14+
const logger = new Logger();
15+
16+
const recordHandler = (record: SQSRecord): void => {
17+
const payload = record.body;
18+
if (payload) {
19+
const item = JSON.parse(payload);
20+
logger.info('Processed item', { item });
21+
}
22+
};
23+
24+
export const handler = async (
25+
event: SQSEvent,
26+
context: Context
27+
): Promise<SQSBatchResponse> => {
28+
return processPartialResponse(event, recordHandler, processor, {
29+
context,
30+
});
31+
};

0 commit comments

Comments
 (0)