diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md
index c8e253492a..e6f4ae2c5f 100644
--- a/docs/utilities/batch.md
+++ b/docs/utilities/batch.md
@@ -113,11 +113,11 @@ Processing batches from SQS works in three stages:
=== "index.ts"
- ```typescript hl_lines="1-5 14 17 29-31"
+ ```typescript hl_lines="1-5 9 12 21-23"
--8<--
- examples/snippets/batch/gettingStartedSQS.ts::16
- examples/snippets/batch/gettingStartedSQS.ts:18:29
- examples/snippets/batch/gettingStartedSQS.ts:31:34
+ examples/snippets/batch/gettingStartedSQS.ts::11
+ examples/snippets/batch/gettingStartedSQS.ts:13:21
+ examples/snippets/batch/gettingStartedSQS.ts:23:25
--8<--
```
@@ -144,7 +144,7 @@ Processing batches from SQS works in three stages:
When using [SQS FIFO queues](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/FIFO-queues.html){target="_blank"}, we will stop processing messages after the first failure, and return all failed and unprocessed messages in `batchItemFailures`.
This helps preserve the ordering of messages in your queue.
-```typescript hl_lines="1-4 13 28-30"
+```typescript hl_lines="1-4 8 20-22"
--8<-- "examples/snippets/batch/gettingStartedSQSFifo.ts"
```
@@ -167,11 +167,11 @@ Processing batches from Kinesis works in three stages:
=== "index.ts"
- ```typescript hl_lines="1-5 14 17 27-29"
+ ```typescript hl_lines="1-5 9 12 19-21"
--8<-- "examples/snippets/batch/gettingStartedKinesis.ts"
```
- 1. **Step 1**. Creates a partial failure batch processor for Kinesis Data Streams. See [partial failure mechanics for details](#partial-failure-mechanics)
+ 1. Creates a partial failure batch processor for Kinesis Data Streams. See [partial failure mechanics for details](#partial-failure-mechanics)
=== "Sample response"
@@ -200,11 +200,11 @@ Processing batches from DynamoDB Streams works in three stages:
=== "index.ts"
- ```typescript hl_lines="1-5 14 17 32-34"
+ ```typescript hl_lines="1-5 9 12 24-26"
--8<-- "examples/snippets/batch/gettingStartedDynamoDBStreams.ts"
```
- 1. **Step 1**. Creates a partial failure batch processor for DynamoDB Streams. See [partial failure mechanics for details](#partial-failure-mechanics)
+ 1. Creates a partial failure batch processor for DynamoDB Streams. See [partial failure mechanics for details](#partial-failure-mechanics)
=== "Sample response"
@@ -226,17 +226,17 @@ By default, we catch any exception raised by your record handler function. This
=== "Sample error handling with custom exception"
- ```typescript hl_lines="30"
+ ```typescript hl_lines="25"
--8<--
- examples/snippets/batch/gettingStartedErrorHandling.ts::29
- examples/snippets/batch/gettingStartedErrorHandling.ts:31:38
- examples/snippets/batch/gettingStartedErrorHandling.ts:40:43
+ examples/snippets/batch/gettingStartedErrorHandling.ts::24
+ examples/snippets/batch/gettingStartedErrorHandling.ts:26:30
+ examples/snippets/batch/gettingStartedErrorHandling.ts:32:
--8<--
```
1. Any exception works here. See [extending BatchProcessorSync section, if you want to override this behavior.](#extending-batchprocessor)
- 2. Exceptions raised in `record_handler` will propagate to `process_partial_response`.
We catch them and include each failed batch item identifier in the response dictionary (see `Sample response` tab).
+ 2. Exceptions raised in `recordHandler` will propagate to `process_partial_response`.
We catch them and include each failed batch item identifier in the response dictionary (see `Sample response` tab).
=== "Sample response"
@@ -397,7 +397,7 @@ Use the `BatchProcessor` directly in your function to access a list of all retur
* **When successful**. We will include a tuple with `success`, the result of `recordHandler`, and the batch record
* **When failed**. We will include a tuple with `fail`, exception as a string, and the batch record
-```typescript hl_lines="25 27-28 30-33 38" title="Accessing processed messages"
+```typescript hl_lines="17 19-20 23 28" title="Accessing processed messages"
--8<-- "examples/snippets/batch/accessProcessedMessages.ts"
```
@@ -410,7 +410,7 @@ Within your `recordHandler` function, you might need access to the Lambda contex
We can automatically inject the [Lambda context](https://docs.aws.amazon.com/lambda/latest/dg/typescript-context.html){target="_blank"} into your `recordHandler` as optional second argument if you register it when using `BatchProcessorSync` or the `processPartialResponseSync` function.
-```typescript hl_lines="17 35"
+```typescript hl_lines="12 27"
--8<-- "examples/snippets/batch/accessLambdaContext.ts"
```
@@ -425,7 +425,7 @@ For these scenarios, you can subclass `BatchProcessor` and quickly override `suc
Let's suppose you'd like to add a metric named `BatchRecordFailures` for each batch record that failed processing
-```typescript hl_lines="3 20 24 31 37" title="Extending failure handling mechanism in BatchProcessor"
+```typescript hl_lines="3 15 19 26 32" title="Extending failure handling mechanism in BatchProcessor"
--8<-- "examples/snippets/batch/extendingFailure.ts"
```
diff --git a/examples/snippets/batch/accessLambdaContext.ts b/examples/snippets/batch/accessLambdaContext.ts
index 0f8aa89409..301d51c052 100644
--- a/examples/snippets/batch/accessLambdaContext.ts
+++ b/examples/snippets/batch/accessLambdaContext.ts
@@ -4,12 +4,7 @@ import {
processPartialResponse,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSRecord, Context, SQSHandler } from 'aws-lambda';
const processor = new BatchProcessor(EventType.SQS);
const logger = new Logger();
@@ -27,11 +22,7 @@ const recordHandler = (record: SQSRecord, lambdaContext?: Context): void => {
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
- return processPartialResponse(event, recordHandler, processor, {
+export const handler: SQSHandler = async (event, context) =>
+ processPartialResponse(event, recordHandler, processor, {
context,
});
-};
diff --git a/examples/snippets/batch/accessProcessedMessages.ts b/examples/snippets/batch/accessProcessedMessages.ts
index ea99abeda7..e60d4bc0e6 100644
--- a/examples/snippets/batch/accessProcessedMessages.ts
+++ b/examples/snippets/batch/accessProcessedMessages.ts
@@ -1,11 +1,6 @@
import { BatchProcessor, EventType } from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSRecord, SQSHandler } from 'aws-lambda';
const processor = new BatchProcessor(EventType.SQS);
const logger = new Logger();
@@ -18,19 +13,14 @@ const recordHandler = (record: SQSRecord): void => {
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
+export const handler: SQSHandler = async (event, context) => {
const batch = event.Records; // (1)!
processor.register(batch, recordHandler, { context }); // (2)!
const processedMessages = await processor.process();
for (const message of processedMessages) {
- const status: 'success' | 'fail' = message[0];
- const error = message[1];
- const record = message[2];
+ const [status, error, record] = message;
logger.info('Processed record', { status, record, error });
}
diff --git a/examples/snippets/batch/advancedTracingRecordHandler.ts b/examples/snippets/batch/advancedTracingRecordHandler.ts
index ea85b6833a..71bd750bd1 100644
--- a/examples/snippets/batch/advancedTracingRecordHandler.ts
+++ b/examples/snippets/batch/advancedTracingRecordHandler.ts
@@ -6,12 +6,7 @@ import {
import { Tracer } from '@aws-lambda-powertools/tracer';
import { captureLambdaHandler } from '@aws-lambda-powertools/tracer/middleware';
import middy from '@middy/core';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSRecord, SQSHandler, SQSEvent } from 'aws-lambda';
const processor = new BatchProcessor(EventType.SQS);
const tracer = new Tracer({ serviceName: 'serverlessAirline' });
@@ -35,10 +30,8 @@ const recordHandler = async (record: SQSRecord): Promise => {
subsegment?.close(); // (3)!
};
-export const handler = middy(
- async (event: SQSEvent, context: Context): Promise => {
- return processPartialResponse(event, recordHandler, processor, {
- context,
- });
- }
+export const handler: SQSHandler = middy(async (event: SQSEvent, context) =>
+ processPartialResponse(event, recordHandler, processor, {
+ context,
+ })
).use(captureLambdaHandler(tracer));
diff --git a/examples/snippets/batch/customPartialProcessor.ts b/examples/snippets/batch/customPartialProcessor.ts
index 65bcd1694c..5cad5ecb70 100644
--- a/examples/snippets/batch/customPartialProcessor.ts
+++ b/examples/snippets/batch/customPartialProcessor.ts
@@ -14,7 +14,7 @@ import type {
FailureResponse,
BaseRecord,
} from '@aws-lambda-powertools/batch/types';
-import type { SQSEvent, Context, SQSBatchResponse } from 'aws-lambda';
+import type { SQSHandler } from 'aws-lambda';
const tableName = process.env.TABLE_NAME || 'table-not-found';
@@ -89,11 +89,7 @@ const recordHandler = (): number => {
return Math.floor(randomInt(1, 10));
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
- return processPartialResponse(event, recordHandler, processor, {
+export const handler: SQSHandler = async (event, context) =>
+ processPartialResponse(event, recordHandler, processor, {
context,
});
-};
diff --git a/examples/snippets/batch/extendingFailure.ts b/examples/snippets/batch/extendingFailure.ts
index ccfa78966a..e21ba98096 100644
--- a/examples/snippets/batch/extendingFailure.ts
+++ b/examples/snippets/batch/extendingFailure.ts
@@ -9,12 +9,7 @@ import type {
EventSourceDataClassTypes,
} from '@aws-lambda-powertools/batch/types';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSRecord, SQSHandler } from 'aws-lambda';
class MyProcessor extends BatchProcessor {
#metrics: Metrics;
@@ -45,11 +40,7 @@ const recordHandler = (record: SQSRecord): void => {
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
- return processPartialResponse(event, recordHandler, processor, {
+export const handler: SQSHandler = async (event, context) =>
+ processPartialResponse(event, recordHandler, processor, {
context,
});
-};
diff --git a/examples/snippets/batch/gettingStartedDynamoDBStreams.ts b/examples/snippets/batch/gettingStartedDynamoDBStreams.ts
index 8c4ee77e98..4c93fdb389 100644
--- a/examples/snippets/batch/gettingStartedDynamoDBStreams.ts
+++ b/examples/snippets/batch/gettingStartedDynamoDBStreams.ts
@@ -4,12 +4,7 @@ import {
processPartialResponse,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- DynamoDBStreamEvent,
- DynamoDBRecord,
- Context,
- DynamoDBBatchResponse,
-} from 'aws-lambda';
+import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
const processor = new BatchProcessor(EventType.DynamoDBStreams); // (1)!
const logger = new Logger();
@@ -25,11 +20,7 @@ const recordHandler = async (record: DynamoDBRecord): Promise => {
}
};
-export const handler = async (
- event: DynamoDBStreamEvent,
- context: Context
-): Promise => {
- return processPartialResponse(event, recordHandler, processor, {
+export const handler: DynamoDBStreamHandler = async (event, context) =>
+ processPartialResponse(event, recordHandler, processor, {
context,
});
-};
diff --git a/examples/snippets/batch/gettingStartedErrorHandling.ts b/examples/snippets/batch/gettingStartedErrorHandling.ts
index 3f36c18e62..022f82378c 100644
--- a/examples/snippets/batch/gettingStartedErrorHandling.ts
+++ b/examples/snippets/batch/gettingStartedErrorHandling.ts
@@ -4,12 +4,7 @@ import {
processPartialResponse,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSRecord, SQSHandler } from 'aws-lambda';
const processor = new BatchProcessor(EventType.SQS);
const logger = new Logger();
@@ -32,12 +27,8 @@ const recordHandler = async (record: SQSRecord): Promise => {
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
+export const handler: SQSHandler = async (event, context) =>
// prettier-ignore
- return processPartialResponse(event, recordHandler, processor, { // (2)!
+ processPartialResponse(event, recordHandler, processor, { // (2)!
context,
});
-};
diff --git a/examples/snippets/batch/gettingStartedKinesis.ts b/examples/snippets/batch/gettingStartedKinesis.ts
index b4f3e7403f..832f09385e 100644
--- a/examples/snippets/batch/gettingStartedKinesis.ts
+++ b/examples/snippets/batch/gettingStartedKinesis.ts
@@ -4,12 +4,7 @@ import {
processPartialResponse,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- KinesisStreamEvent,
- KinesisStreamRecord,
- Context,
- KinesisStreamBatchResponse,
-} from 'aws-lambda';
+import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
const processor = new BatchProcessor(EventType.KinesisDataStreams); // (1)!
const logger = new Logger();
@@ -20,11 +15,7 @@ const recordHandler = async (record: KinesisStreamRecord): Promise => {
logger.info('Processed item', { item: payload });
};
-export const handler = async (
- event: KinesisStreamEvent,
- context: Context
-): Promise => {
- return processPartialResponse(event, recordHandler, processor, {
+export const handler: KinesisStreamHandler = async (event, context) =>
+ processPartialResponse(event, recordHandler, processor, {
context,
});
-};
diff --git a/examples/snippets/batch/gettingStartedSQS.ts b/examples/snippets/batch/gettingStartedSQS.ts
index 5d03281490..50de7474de 100644
--- a/examples/snippets/batch/gettingStartedSQS.ts
+++ b/examples/snippets/batch/gettingStartedSQS.ts
@@ -4,12 +4,7 @@ import {
processPartialResponse,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSRecord, SQSHandler } from 'aws-lambda';
const processor = new BatchProcessor(EventType.SQS); // (1)!
const logger = new Logger();
@@ -23,13 +18,10 @@ const recordHandler = async (record: SQSRecord): Promise => { // (2)!
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
+export const handler: SQSHandler = async (event, context) =>
// prettier-ignore
- return processPartialResponse(event, recordHandler, processor, { // (3)!
+ processPartialResponse(event, recordHandler, processor, { // (3)!
context,
});
-};
+
export { processor };
diff --git a/examples/snippets/batch/gettingStartedSQSFifo.ts b/examples/snippets/batch/gettingStartedSQSFifo.ts
index 3b7e03b3a6..b4b3cc5ddf 100644
--- a/examples/snippets/batch/gettingStartedSQSFifo.ts
+++ b/examples/snippets/batch/gettingStartedSQSFifo.ts
@@ -3,12 +3,7 @@ import {
processPartialResponseSync,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSHandler, SQSRecord } from 'aws-lambda';
const processor = new SqsFifoPartialProcessor(); // (1)!
const logger = new Logger();
@@ -21,11 +16,7 @@ const recordHandler = (record: SQSRecord): void => {
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
- return processPartialResponseSync(event, recordHandler, processor, {
+export const handler: SQSHandler = async (event, context) =>
+ processPartialResponseSync(event, recordHandler, processor, {
context,
});
-};
diff --git a/examples/snippets/batch/testingYourCode.ts b/examples/snippets/batch/testingYourCode.ts
index d477e3dfc6..9047f8f8df 100644
--- a/examples/snippets/batch/testingYourCode.ts
+++ b/examples/snippets/batch/testingYourCode.ts
@@ -36,7 +36,7 @@ describe('Function tests', () => {
};
// Act
- const response = await handler(sqsEvent, context);
+ const response = await handler(sqsEvent, context, () => {});
// Assess
expect(response).toEqual(expectedResponse);
diff --git a/package.json b/package.json
index cd21eb0075..09dad7dfce 100644
--- a/package.json
+++ b/package.json
@@ -32,7 +32,7 @@
"docs-generateApiDoc": "typedoc .",
"docs-runLocalApiDoc": "npx live-server api",
"postpublish": "git restore .",
- "lint:markdown": "markdownlint-cli2 '**/*.md' '#node_modules' '#**/*/node_modules' '#docs/changelog.md' '#LICENSE.md' '#.github' '#**/*/CHANGELOG.md' '#examples/app/README.md' '#packages/batch/README.md' '#packages/commons/README.md' '#packages/idempotency/README.md' '#packages/jmespath/README.md' '#packages/logger/README.md' '#packages/metrics/README.md' '#packages/parameters/README.md' '#packages/parser/README.md' '#packages/tracer/README.md'"
+ "lint:markdown": "markdownlint-cli2 '**/*.md' '#node_modules' '#**/*/node_modules' '#docs/changelog.md' '#LICENSE.md' '#.github' '#**/*/CHANGELOG.md' '#examples/app/README.md' '#packages/commons/README.md' '#packages/idempotency/README.md' '#packages/jmespath/README.md' '#packages/logger/README.md' '#packages/metrics/README.md' '#packages/parameters/README.md' '#packages/parser/README.md' '#packages/tracer/README.md'"
},
"repository": {
"type": "git",
diff --git a/packages/batch/README.md b/packages/batch/README.md
index e151314d71..17798b0e2f 100644
--- a/packages/batch/README.md
+++ b/packages/batch/README.md
@@ -1,43 +1,19 @@
-# Powertools for AWS Lambda (TypeScript) - Batch Processing Utility
+# Powertools for AWS Lambda (TypeScript) - Batch Processing Utility
Powertools for AWS Lambda (TypeScript) is a developer toolkit to implement Serverless [best practices and increase developer velocity](https://docs.powertools.aws.dev/lambda/typescript/latest/#features).
-You can use the package in both TypeScript and JavaScript code bases.
-
-- [Intro](#intro)
-- [Key features](#key-features)
-- [Usage](#usage)
- - [Batch Processor](#batch-processor)
- - [SQS Processor](#sqs-processor)
- - [Kinesis Processor](#kinesis-processor)
- - [DynamoDB Streams Processor](#dynamodb-streams-processor)
- - [Async processing](#async-processing)
-- [Contribute](#contribute)
-- [Roadmap](#roadmap)
-- [Connect](#connect)
-- [How to support Powertools for AWS Lambda (TypeScript)?](#how-to-support-powertools-for-aws-lambda-typescript)
- - [Becoming a reference customer](#becoming-a-reference-customer)
- - [Sharing your work](#sharing-your-work)
- - [Using Lambda Layer](#using-lambda-layer)
-- [Credits](#credits)
-- [License](#license)
+You can use the library in both TypeScript and JavaScript code bases.
## Intro
-The batch processing utility handles partial failures when processing batches from Amazon SQS, Amazon Kinesis Data Streams, and Amazon DynamoDB Streams.
-
-## Key features
-
-* Reports batch item failures to reduce number of retries for a record upon errors
-* Simple interface to process each batch record
-* Build your own batch processor by extending primitives
+The Batch Processing utility handles partial failures when processing batches from Amazon SQS, Amazon Kinesis Data Streams, and Amazon DynamoDB Streams.
## Usage
To get started, install the library by running:
```sh
-npm install @aws-lambda-powertools/batch
+npm i @aws-lambda-powertools/batch
```
### Batch Processor
@@ -59,12 +35,7 @@ import {
processPartialResponseSync,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSHandler, SQSRecord } from 'aws-lambda';
const processor = new BatchProcessorSync(EventType.SQS);
const logger = new Logger();
@@ -77,15 +48,10 @@ const recordHandler = (record: SQSRecord): void => {
}
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
- return processPartialResponseSync(event, recordHandler, processor, {
+export const handler: SQSHandler = async (event, context) =>
+ processPartialResponseSync(event, recordHandler, processor, {
context,
});
-};
-export { processor };
```
### Kinesis Processor
@@ -99,12 +65,7 @@ import {
processPartialResponseSync,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- KinesisStreamEvent,
- KinesisStreamRecord,
- Context,
- KinesisStreamBatchResponse,
-} from 'aws-lambda';
+import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
const processor = new BatchProcessorSync(EventType.KinesisDataStreams);
const logger = new Logger();
@@ -115,14 +76,10 @@ const recordHandler = (record: KinesisStreamRecord): void => {
logger.info('Processed item', { item: payload });
};
-export const handler = async (
- event: KinesisStreamEvent,
- context: Context
-): Promise => {
- return processPartialResponseSync(event, recordHandler, processor, {
+export const handler: KinesisStreamHandler = async (event, context) =>
+ processPartialResponseSync(event, recordHandler, processor, {
context,
});
-};
```
### DynamoDB Streams Processor
@@ -131,19 +88,14 @@ When using DynamoDB Streams as a Lambda event source, you can use the `BatchProc
```ts
import {
- BatchProcessorSync,
+ BatchProcessor,
EventType,
processPartialResponseSync,
} from '@aws-lambda-powertools/batch';
import { Logger } from '@aws-lambda-powertools/logger';
-import type {
- DynamoDBStreamEvent,
- DynamoDBRecord,
- Context,
- DynamoDBBatchResponse,
-} from 'aws-lambda';
-
-const processor = new BatchProcessorSync(EventType.DynamoDBStreams);
+import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
+
+const processor = new BatchProcessor(EventType.DynamoDBStreams); // (1)!
const logger = new Logger();
const recordHandler = (record: DynamoDBRecord): void => {
@@ -157,14 +109,10 @@ const recordHandler = (record: DynamoDBRecord): void => {
}
};
-export const handler = async (
- event: DynamoDBStreamEvent,
- context: Context
-): Promise => {
- return processPartialResponseSync(event, recordHandler, processor, {
+export const handler: DynamoDBStreamHandler = async (event, context) =>
+ processPartialResponseSync(event, recordHandler, processor, {
context,
});
-};
```
### Async processing
@@ -177,12 +125,7 @@ import {
EventType,
processPartialResponse,
} from '@aws-lambda-powertools/batch';
-import type {
- SQSEvent,
- SQSRecord,
- Context,
- SQSBatchResponse,
-} from 'aws-lambda';
+import type { SQSHandler, SQSRecord } from 'aws-lambda';
const processor = new BatchProcessor(EventType.SQS);
@@ -194,14 +137,10 @@ const recordHandler = async (record: SQSRecord): Promise => {
return res.status;
};
-export const handler = async (
- event: SQSEvent,
- context: Context
-): Promise => {
- return await processPartialResponse(event, recordHandler, processor, {
+export const handler: SQSHandler = async (event, context) =>
+ await processPartialResponse(event, recordHandler, processor, {
context,
});
-};
```
Check the [docs](https://docs.powertools.aws.dev/lambda/typescript/latest/utilities/batch/) for more examples.
@@ -217,28 +156,28 @@ Help us prioritize upcoming functionalities or utilities by [upvoting existing R
## Connect
-* **Powertools for AWS Lambda on Discord**: `#typescript` - **[Invite link](https://discord.gg/B8zZKbbyET)**
-* **Email**: aws-lambda-powertools-feedback@amazon.com
+- **Powertools for AWS Lambda on Discord**: `#typescript` - **[Invite link](https://discord.gg/B8zZKbbyET)**
+- **Email**:
## How to support Powertools for AWS Lambda (TypeScript)?
### Becoming a reference customer
-Knowing which companies are using this library is important to help prioritize the project internally. If your company is using Powertools for AWS Lambda (TypeScript), you can request to have your name and logo added to the README file by raising a [Support Powertools for AWS Lambda (TypeScript) (become a reference)](https://github.com/aws-powertools/powertools-lambda-typescript/issues/new?assignees=&labels=customer-reference&template=support_powertools.yml&title=%5BSupport+Lambda+Powertools%5D%3A+%3Cyour+organization+name%3E) issue.
+Knowing which companies are using this library is important to help prioritize the project internally. If your company is using Powertools for AWS Lambda (TypeScript), you can request to have your name and logo added to the README file by raising a [Support Powertools for AWS Lambda (TypeScript) (become a reference)](https://s12d.com/become-reference-pt-ts) issue.
The following companies, among others, use Powertools:
-* [Hashnode](https://hashnode.com/)
-* [Trek10](https://www.trek10.com/)
-* [Elva](https://elva-group.com)
-* [globaldatanet](https://globaldatanet.com/)
-* [Bailey Nelson](https://www.baileynelson.com.au)
-* [Perfect Post](https://www.perfectpost.fr)
-* [Sennder](https://sennder.com/)
-* [Certible](https://www.certible.com/)
-* [tecRacer GmbH & Co. KG](https://www.tecracer.com/)
-* [AppYourself](https://appyourself.net)
-* [Alma Media](https://www.almamedia.fi)
+- [Hashnode](https://hashnode.com/)
+- [Trek10](https://www.trek10.com/)
+- [Elva](https://elva-group.com)
+- [globaldatanet](https://globaldatanet.com/)
+- [Bailey Nelson](https://www.baileynelson.com.au)
+- [Perfect Post](https://www.perfectpost.fr)
+- [Sennder](https://sennder.com/)
+- [Certible](https://www.certible.com/)
+- [tecRacer GmbH & Co. KG](https://www.tecracer.com/)
+- [AppYourself](https://appyourself.net)
+- [Alma Media](https://www.almamedia.fi)
### Sharing your work
@@ -246,11 +185,7 @@ Share what you did with Powertools for AWS Lambda (TypeScript) 💞💞. Blog po
### Using Lambda Layer
-This helps us understand who uses Powertools for AWS Lambda (TypeScript) in a non-intrusive way, and helps us gain future investments for other Powertools for AWS Lambda languages. When [using Layers](https://docs.powertools.aws.dev/lambda/typescript/latest/#lambda-layer), you can add Powertools as a dev dependency (or as part of your virtual env) to not impact the development process.
-
-## Credits
-
-Credits for the Lambda Powertools for AWS Lambda (TypeScript) idea go to [DAZN](https://github.com/getndazn) and their [DAZN Lambda Powertools](https://github.com/getndazn/dazn-lambda-powertools/).
+This helps us understand who uses Powertools for AWS Lambda (TypeScript) in a non-intrusive way, and helps us gain future investments for other Powertools for AWS Lambda languages. When [using Layers](https://docs.powertools.aws.dev/lambda/typescript/latest/#lambda-layer), you can add Powertools as a dev dependency to not impact the development process.
## License
diff --git a/packages/batch/src/BasePartialBatchProcessor.ts b/packages/batch/src/BasePartialBatchProcessor.ts
index 10b04b3e4e..ca913e49e9 100644
--- a/packages/batch/src/BasePartialBatchProcessor.ts
+++ b/packages/batch/src/BasePartialBatchProcessor.ts
@@ -17,18 +17,37 @@ import type {
} from './types.js';
/**
- * Process batch and partially report failed items
+ * Base abstract class for processing batch records with partial failure handling
+ *
+ * This class extends the {@link BasePartialProcessor} class and adds additional
+ * functionality to handle batch processing. Specifically, it provides methods
+ * to collect failed records and build the partial failure response.
+ *
+ * @abstract
*/
abstract class BasePartialBatchProcessor extends BasePartialProcessor {
+ /**
+ * Mapping of event types to their respective failure collectors
+ *
+ * Each service expects a different format for partial failure reporting,
+ * this mapping ensures that the correct format is used for each event type.
+ */
public COLLECTOR_MAPPING;
+ /**
+ * Response to be returned after processing
+ */
public batchResponse: PartialItemFailureResponse;
+ /**
+ * Type of event that the processor is handling
+ */
public eventType: keyof typeof EventType;
/**
* Initializes base batch processing class
- * @param eventType Whether this is SQS, DynamoDB stream, or Kinesis data stream event
+ *
+ * @param eventType The type of event to process (SQS, Kinesis, DynamoDB)
*/
public constructor(eventType: keyof typeof EventType) {
super();
@@ -42,7 +61,13 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor {
}
/**
- * Report messages to be deleted in case of partial failures
+ * Clean up logic to be run after processing a batch
+ *
+ * If the entire batch failed, and the utility is not configured otherwise,
+ * this method will throw a `FullBatchFailureError` with the list of errors
+ * that occurred during processing.
+ *
+ * Otherwise, it will build the partial failure response based on the event type.
*/
public clean(): void {
if (!this.hasMessagesToReport()) {
@@ -58,8 +83,11 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor {
}
/**
- * Collects identifiers of failed items for a DynamoDB stream
- * @returns list of identifiers for failed items
+ * Collect the identifiers of failed items for a DynamoDB stream
+ *
+ * The failures are collected based on the sequence number of the record
+ * and formatted as a list of objects with the key `itemIdentifier` as
+ * expected by the service.
*/
public collectDynamoDBFailures(): PartialItemFailures[] {
const failures: PartialItemFailures[] = [];
@@ -75,8 +103,11 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor {
}
/**
- * Collects identifiers of failed items for a Kinesis stream
- * @returns list of identifiers for failed items
+ * Collect identifiers of failed items for a Kinesis batch
+ *
+ * The failures are collected based on the sequence number of the record
+ * and formatted as a list of objects with the key `itemIdentifier` as
+ * expected by the service.
*/
public collectKinesisFailures(): PartialItemFailures[] {
const failures: PartialItemFailures[] = [];
@@ -90,8 +121,11 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor {
}
/**
- * Collects identifiers of failed items for an SQS batch
- * @returns list of identifiers for failed items
+ * Collect identifiers of failed items for an SQS batch
+ *
+ * The failures are collected based on the message ID of the record
+ * and formatted as a list of objects with the key `itemIdentifier` as
+ * expected by the service.
*/
public collectSqsFailures(): PartialItemFailures[] {
const failures: PartialItemFailures[] = [];
@@ -105,31 +139,37 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor {
}
/**
- * Determines whether all records in a batch failed to process
- * @returns true if all records resulted in exception results
+ * Determine if the entire batch failed
+ *
+ * If the number of errors is equal to the number of records, then the
+ * entire batch failed and this method will return `true`.
*/
public entireBatchFailed(): boolean {
return this.errors.length == this.records.length;
}
/**
- * Collects identifiers for failed batch items
- * @returns formatted messages to use in batch deletion
+ * Collect identifiers for failed batch items
+ *
+ * The method will call the appropriate collector based on the event type
+ * and return the list of failed items.
*/
public getMessagesToReport(): PartialItemFailures[] {
return this.COLLECTOR_MAPPING[this.eventType]();
}
/**
- * Determines if any records failed to process
- * @returns true if any records resulted in exception
+ * Determine if there are any failed records to report
+ *
+ * If there are no failed records, then the batch was successful
+ * and this method will return `false`.
*/
public hasMessagesToReport(): boolean {
return this.failureMessages.length != 0;
}
/**
- * Remove results from previous execution
+ * Set up the processor with the initial state ready for processing
*/
public prepare(): void {
this.successMessages.length = 0;
@@ -139,12 +179,21 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor {
}
/**
- * @returns Batch items that failed processing, if any
+ * Get the response from the batch processing
*/
public response(): PartialItemFailureResponse {
return this.batchResponse;
}
+ /**
+ * Forward a record to the appropriate batch type
+ *
+ * Based on the event type that the processor was initialized with, this method
+ * will cast the record to the appropriate batch type handler.
+ *
+ * @param record The record to be processed
+ * @param eventType The type of event to process
+ */
public toBatchType(
record: EventSourceDataClassTypes,
eventType: keyof typeof EventType
diff --git a/packages/batch/src/BasePartialProcessor.ts b/packages/batch/src/BasePartialProcessor.ts
index f4ec911446..6325372d25 100644
--- a/packages/batch/src/BasePartialProcessor.ts
+++ b/packages/batch/src/BasePartialProcessor.ts
@@ -3,54 +3,90 @@ import type {
BatchProcessingOptions,
EventSourceDataClassTypes,
FailureResponse,
- ResultType,
SuccessResponse,
} from './types.js';
/**
* Abstract class for batch processors.
+ *
+ * This class provides a common interface for processing records in a batch.
+ *
+ * Batch processors implementing this class should provide implementations for
+ * a number of abstract methods that are specific to the type of processor or the
+ * type of records being processed.
+ *
+ * The class comes with a few helper methods and hooks that can be used to prepare
+ * the processor before processing records, clean up after processing records, and
+ * handle records that succeed or fail processing.
+ *
+ * @abstract
*/
abstract class BasePartialProcessor {
+ /**
+ * List of errors that occurred during processing
+ */
public errors: Error[];
+ /**
+ * List of records that failed processing
+ */
public failureMessages: EventSourceDataClassTypes[];
+ /**
+ * Record handler provided by customers to process records
+ */
public handler: CallableFunction;
+ /**
+ * Options to be used during processing (optional)
+ */
public options?: BatchProcessingOptions;
+ /**
+ * List of records to be processed
+ */
public records: BaseRecord[];
- public successMessages: EventSourceDataClassTypes[];
-
/**
- * Initializes base processor class
+ * List of records that were processed successfully
*/
+ public successMessages: EventSourceDataClassTypes[];
+
public constructor() {
this.successMessages = [];
this.failureMessages = [];
this.errors = [];
this.records = [];
+ // No-op function to avoid null checks, will be overridden by customer when using the class
this.handler = new Function();
}
/**
- * Clean class instance after processing
+ * Clean or resets the processor instance after completing a batch
+ *
+ * This method should be called after processing a full batch to reset the processor.
+ *
+ * You can use this as a hook to run any cleanup logic after processing the records.
+ *
+ * @abstract
*/
public abstract clean(): void;
/**
- * Keeps track of batch records that failed processing
- * @param record record that failed processing
- * @param exception exception that was thrown
- * @returns FailureResponse object with ["fail", exception, original record]
+ * Method to handle a record that failed processing
+ *
+ * This method should be called when a record fails processing so that
+ * the processor can keep track of the error and the record that failed.
+ *
+ * @param record Record that failed processing
+ * @param error Error that was thrown
*/
public failureHandler(
record: EventSourceDataClassTypes,
- exception: Error
+ error: Error
): FailureResponse {
- const entry: FailureResponse = ['fail', exception.message, record];
- this.errors.push(exception);
+ const entry: FailureResponse = ['fail', error.message, record];
+ this.errors.push(error);
this.failureMessages.push(record);
return entry;
@@ -58,12 +94,22 @@ abstract class BasePartialProcessor {
/**
* Prepare class instance before processing
+ *
+ * This method should be called before processing the records
+ *
+ * You can use this as a hook to run any setup logic before processing the records.
+ *
+ * @abstract
*/
public abstract prepare(): void;
/**
- * Call instance's handler for each record
- * @returns List of processed records
+ * Process all records with an asyncronous handler
+ *
+ * Once called, the processor will create an array of promises to process each record
+ * and wait for all of them to settle before returning the results.
+ *
+ * Before and after processing, the processor will call the prepare and clean methods respectively.
*/
public async process(): Promise<(SuccessResponse | FailureResponse)[]> {
/**
@@ -89,6 +135,17 @@ abstract class BasePartialProcessor {
/**
* Process a record with an asyncronous handler
*
+ * An implementation of this method is required for asyncronous processors.
+ *
+ * When implementing this method, you should at least call the successHandler method
+ * when a record succeeds processing and the failureHandler method when a record
+ * fails processing.
+ *
+ * This is to ensure that the processor keeps track of the results and the records
+ * that succeeded and failed processing.
+ *
+ * @abstract
+ *
* @param record Record to be processed
*/
public abstract processRecord(
@@ -96,7 +153,19 @@ abstract class BasePartialProcessor {
): Promise;
/**
- * Process a record with the handler
+ * Process a record with a synchronous handler
+ *
+ * An implementation of this method is required for synchronous processors.
+ *
+ * When implementing this method, you should at least call the successHandler method
+ * when a record succeeds processing and the failureHandler method when a record
+ * fails processing.
+ *
+ * This is to ensure that the processor keeps track of the results and the records
+ * that succeeded and failed processing.
+ *
+ * @abstract
+ *
* @param record Record to be processed
*/
public abstract processRecordSync(
@@ -104,7 +173,15 @@ abstract class BasePartialProcessor {
): SuccessResponse | FailureResponse;
/**
- * Call instance's handler for each record
+ * Orchestrate the processing of a batch of records synchronously
+ * and sequentially.
+ *
+ * The method is responsible for calling the prepare method before
+ * processing the records and the clean method after processing the records.
+ *
+ * In the middle, the method will iterate over the records and call the
+ * processRecordSync method for each record.
+ *
* @returns List of processed records
*/
public processSync(): (SuccessResponse | FailureResponse)[] {
@@ -128,17 +205,25 @@ abstract class BasePartialProcessor {
}
/**
- * Set class instance attributes before execution
- * @param records List of records to be processed
- * @param handler CallableFunction to process entries of "records"
- * @param options Options to be used during processing
- * @returns this object
+ * Set up the processor with the records and the handler
+ *
+ * This method should be called before processing the records to
+ * bind the records and the handler for a specific invocation to
+ * the processor.
+ *
+ * We use a separate method to do this rather than the constructor
+ * to allow for reusing the processor instance across multiple invocations
+ * by instantiating the processor outside of the Lambda function handler.
+ *
+ * @param records Array of records to be processed
+ * @param handler CallableFunction to process each record from the batch
+ * @param options Options to be used during processing (optional)
*/
public register(
records: BaseRecord[],
handler: CallableFunction,
options?: BatchProcessingOptions
- ): BasePartialProcessor {
+ ): this {
this.records = records;
this.handler = handler;
@@ -150,14 +235,17 @@ abstract class BasePartialProcessor {
}
/**
- * Keeps track of batch records that were processed successfully
- * @param record record that succeeded processing
- * @param result result from record handler
- * @returns SuccessResponse object with ["success", result, original record]
+ * Method to handle a record that succeeded processing
+ *
+ * This method should be called when a record succeeds processing so that
+ * the processor can keep track of the result and the record that succeeded.
+ *
+ * @param record Record that succeeded processing
+ * @param result Result from record handler
*/
public successHandler(
record: EventSourceDataClassTypes,
- result: ResultType
+ result: unknown
): SuccessResponse {
const entry: SuccessResponse = ['success', result, record];
this.successMessages.push(record);
diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts
index 960bcd4dde..4cff0ee8c9 100644
--- a/packages/batch/src/BatchProcessor.ts
+++ b/packages/batch/src/BatchProcessor.ts
@@ -3,9 +3,99 @@ import { BatchProcessingError } from './errors.js';
import type { BaseRecord, FailureResponse, SuccessResponse } from './types.js';
/**
- * Process native partial responses from SQS, Kinesis Data Streams, and DynamoDB
+ * Process records in a batch asynchronously and handle partial failure cases.
+ *
+ * The batch processor supports processing records coming from Amazon SQS,
+ * Amazon Kinesis Data Streams, and Amazon DynamoDB Streams.
+ *
+ * Items are processed asynchronously and in parallel.
+ *
+ * **Process batch triggered by SQS**
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessor,
+ * EventType,
+ * processPartialResponse,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { SQSRecord, SQSHandler } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessor(EventType.SQS);
+ *
+ * const recordHandler = async (record: SQSRecord): Promise => {
+ * const payload = JSON.parse(record.body);
+ * };
+ *
+ * export const handler: SQSHandler = async (event, context) =>
+ * processPartialResponse(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * **Process batch triggered by Kinesis Data Streams*
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessor,
+ * EventType,
+ * processPartialResponse,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessor(EventType.KinesisDataStreams);
+ *
+ * const recordHandler = async (record: KinesisStreamRecord): Promise => {
+ * const payload = JSON.parse(record.kinesis.data);
+ * };
+ *
+ * export const handler: KinesisStreamHandler = async (event, context) =>
+ * processPartialResponse(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * **Process batch triggered by DynamoDB Streams**
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessor,
+ * EventType,
+ * processPartialResponse,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessor(EventType.DynamoDBStreams);
+ *
+ * const recordHandler = async (record: DynamoDBRecord): Promise => {
+ * const payload = record.dynamodb.NewImage.Message.S;
+ * };
+ *
+ * export const handler: DynamoDBStreamHandler = async (event, context) =>
+ * processPartialResponse(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * @param eventType The type of event to process (SQS, Kinesis, DynamoDB)
*/
class BatchProcessor extends BasePartialBatchProcessor {
+ /**
+ * Handle a record asynchronously with the instance handler provided.
+ *
+ * This method implements the abstract method from the parent class,
+ * and orchestrates the processing of a single record.
+ *
+ * First, it converts the record to the appropriate type for the batch processor.
+ * Then, it calls the handler function with the record data and context.
+ *
+ * If the handler function completes successfully, the method returns a success response.
+ * Otherwise, it returns a failure response with the error that occurred during processing.
+ *
+ * @param record The record to be processed
+ */
public async processRecord(
record: BaseRecord
): Promise {
@@ -20,9 +110,9 @@ class BatchProcessor extends BasePartialBatchProcessor {
}
/**
- * Process a record with instance's handler
- * @param _record Batch record to be processed
- * @returns response of success or failure
+ * @throws {BatchProcessingError} This method is not implemented for synchronous processing.
+ *
+ * @param _record The record to be processed
*/
public processRecordSync(
_record: BaseRecord
diff --git a/packages/batch/src/BatchProcessorSync.ts b/packages/batch/src/BatchProcessorSync.ts
index 87a1566f4c..2018355f88 100644
--- a/packages/batch/src/BatchProcessorSync.ts
+++ b/packages/batch/src/BatchProcessorSync.ts
@@ -3,9 +3,90 @@ import { BatchProcessingError } from './errors.js';
import type { BaseRecord, FailureResponse, SuccessResponse } from './types.js';
/**
- * Process native partial responses from SQS, Kinesis Data Streams, and DynamoDB
+ * Process records in a batch synchronously and handle partial failure cases.
+ *
+ * The batch processor supports processing records coming from Amazon SQS,
+ * Amazon Kinesis Data Streams, and Amazon DynamoDB Streams.
+ *
+ * Items are processed synchronously and in sequence.
+ *
+ * **Process batch triggered by SQS**
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessorSync,
+ * EventType,
+ * processPartialResponseSync,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { SQSRecord, SQSHandler } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessorSync(EventType.SQS);
+ *
+ * const recordHandler = (record: SQSRecord): void => {
+ * const payload = JSON.parse(record.body);
+ * };
+ *
+ * export const handler: SQSHandler = async (event, context) =>
+ * processPartialResponseSync(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * **Process batch triggered by Kinesis Data Streams*
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessorSync,
+ * EventType,
+ * processPartialResponseSync,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessorSync(EventType.KinesisDataStreams);
+ *
+ * const recordHandler = (record: KinesisStreamRecord): void => {
+ * const payload = JSON.parse(record.kinesis.data);
+ * };
+ *
+ * export const handler: KinesisStreamHandler = async (event, context) =>
+ * processPartialResponseSync(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * **Process batch triggered by DynamoDB Streams**
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessorSync,
+ * EventType,
+ * processPartialResponseSync,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessorSync(EventType.DynamoDBStreams);
+ *
+ * const recordHandler = (record: DynamoDBRecord): void => {
+ * const payload = record.dynamodb.NewImage.Message.S;
+ * };
+ *
+ * export const handler: DynamoDBStreamHandler = async (event, context) =>
+ * processPartialResponseSync(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * @param eventType The type of event to process (SQS, Kinesis, DynamoDB)
*/
class BatchProcessorSync extends BasePartialBatchProcessor {
+ /**
+ * @throws {BatchProcessingError} This method is not implemented for asynchronous processing.
+ *
+ * @param _record The record to be processed
+ */
public async processRecord(
_record: BaseRecord
): Promise {
@@ -13,9 +94,18 @@ class BatchProcessorSync extends BasePartialBatchProcessor {
}
/**
- * Process a record with instance's handler
- * @param record Batch record to be processed
- * @returns response of success or failure
+ * Handle a record synchronously with the instance handler provided.
+ *
+ * This method implements the abstract method from the parent class,
+ * and orchestrates the processing of a single record.
+ *
+ * First, it converts the record to the appropriate type for the batch processor.
+ * Then, it calls the handler function with the record data and context.
+ *
+ * If the handler function completes successfully, the method returns a success response.
+ * Otherwise, it returns a failure response with the error that occurred during processing.
+ *
+ * @param record The record to be processed
*/
public processRecordSync(
record: BaseRecord
diff --git a/packages/batch/src/SqsFifoPartialProcessor.ts b/packages/batch/src/SqsFifoPartialProcessor.ts
index 3189c00fd5..bac4939d8b 100644
--- a/packages/batch/src/SqsFifoPartialProcessor.ts
+++ b/packages/batch/src/SqsFifoPartialProcessor.ts
@@ -4,9 +4,35 @@ import { SqsFifoShortCircuitError } from './errors.js';
import type { FailureResponse, SuccessResponse } from './types.js';
/**
- * Process native partial responses from SQS FIFO queues
- * Stops processing records when the first record fails
- * The remaining records are reported as failed items
+ * Batch processor for SQS FIFO queues
+ *
+ * This class extends the {@link BatchProcessorSync} class and provides
+ * a mechanism to process records from SQS FIFO queues synchronously.
+ *
+ * By default, we will stop processing at the first failure and mark unprocessed messages as failed to preserve ordering.
+ *
+ * However, this behavior may not be optimal for customers who wish to proceed with processing messages from a different group ID.
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessor,
+ * EventType,
+ * processPartialResponseSync,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { SQSRecord, SQSHandler } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessor(EventType.SQS);
+ *
+ * const recordHandler = async (record: SQSRecord): Promise => {
+ * const payload = JSON.parse(record.body);
+ * };
+ *
+ * export const handler: SQSHandler = async (event, context) =>
+ * processPartialResponseSync(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
*/
class SqsFifoPartialProcessor extends BatchProcessorSync {
public constructor() {
@@ -14,9 +40,19 @@ class SqsFifoPartialProcessor extends BatchProcessorSync {
}
/**
- * Call instance's handler for each record.
- * When the first failed message is detected, the process is short-circuited
- * And the remaining messages are reported as failed items
+ * Process a record with a synchronous handler
+ *
+ * This method orchestrates the processing of a batch of records synchronously
+ * for SQS FIFO queues.
+ *
+ * The method calls the prepare hook to initialize the processor and then
+ * iterates over each record in the batch, processing them one by one.
+ *
+ * If one of them fails, the method short circuits the processing and fails
+ * the remaining records in the batch.
+ *
+ * Then, it calls the clean hook to clean up the processor and returns the
+ * processed records.
*/
public processSync(): (SuccessResponse | FailureResponse)[] {
this.prepare();
@@ -40,11 +76,18 @@ class SqsFifoPartialProcessor extends BatchProcessorSync {
}
/**
- * Starting from the first failure index, fail all remaining messages and append them to the result list
+ * Starting from the first failure index, fail all remaining messages regardless
+ * of their group ID.
+ *
+ * This short circuit mechanism is used when we detect a failed message in the batch.
+ *
+ * Since messages in a FIFO queue are processed in order, we must stop processing any
+ * remaining messages in the batch to prevent out-of-order processing.
+ *
* @param firstFailureIndex Index of first message that failed
- * @param result List of success and failure responses with remaining messages failed
+ * @param processedRecords Array of response items that have been processed both successfully and unsuccessfully
*/
- public shortCircuitProcessing(
+ protected shortCircuitProcessing(
firstFailureIndex: number,
processedRecords: (SuccessResponse | FailureResponse)[]
): (SuccessResponse | FailureResponse)[] {
diff --git a/packages/batch/src/constants.ts b/packages/batch/src/constants.ts
index 5f55e1e347..a794ec0ef0 100644
--- a/packages/batch/src/constants.ts
+++ b/packages/batch/src/constants.ts
@@ -8,16 +8,25 @@ import type {
EventSourceDataClassTypes,
} from './types.js';
+/**
+ * Enum of supported event types for the utility
+ */
const EventType = {
SQS: 'SQS',
KinesisDataStreams: 'KinesisDataStreams',
DynamoDBStreams: 'DynamoDBStreams',
} as const;
+/**
+ * Default response for the partial batch processor
+ */
const DEFAULT_RESPONSE: PartialItemFailureResponse = {
batchItemFailures: [],
};
+/**
+ * Mapping of event types to their respective data classes
+ */
const DATA_CLASS_MAPPING = {
[EventType.SQS]: (record: EventSourceDataClassTypes) => record as SQSRecord,
[EventType.KinesisDataStreams]: (record: EventSourceDataClassTypes) =>
diff --git a/packages/batch/src/errors.ts b/packages/batch/src/errors.ts
index 4ce6b27de9..df128e7aa2 100644
--- a/packages/batch/src/errors.ts
+++ b/packages/batch/src/errors.ts
@@ -51,6 +51,7 @@ class UnexpectedBatchTypeError extends BatchProcessingError {
this.name = 'UnexpectedBatchTypeError';
}
}
+
export {
BatchProcessingError,
FullBatchFailureError,
diff --git a/packages/batch/src/processPartialResponse.ts b/packages/batch/src/processPartialResponse.ts
index 8c5d7d79c9..ca74e2801e 100644
--- a/packages/batch/src/processPartialResponse.ts
+++ b/packages/batch/src/processPartialResponse.ts
@@ -7,12 +7,45 @@ import type {
} from './types.js';
/**
- * Higher level function to handle batch event processing
- * @param event Lambda's original event
- * @param recordHandler Callable function to process each record from the batch
- * @param processor Batch processor to handle partial failure cases
+ * Higher level function to process a batch of records asynchronously
+ * and handle partial failure cases.
+ *
+ * This function is intended to be used within asynchronous Lambda handlers
+ * and together with a batch processor that implements the {@link BasePartialBatchProcessor}
+ * interface.
+ *
+ * It accepts a batch of records, a record handler function, a batch processor,
+ * and an optional set of options to configure the batch processing.
+ *
+ * By default, the function will process the batch of records asynchronously
+ * and in parallel. If you need to process the records synchronously, you can
+ * use the {@link processPartialResponseSync} function instead.
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessor,
+ * EventType,
+ * processPartialResponse,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessor(EventType.KinesisDataStreams);
+ *
+ * const recordHandler = async (record: KinesisStreamRecord): Promise => {
+ * const payload = JSON.parse(record.kinesis.data);
+ * };
+ *
+ * export const handler: KinesisStreamHandler = async (event, context) =>
+ * processPartialResponse(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * @param event The event object containing the batch of records
+ * @param recordHandler Async function to process each record from the batch
+ * @param processor Batch processor instance to handle the batch processing
* @param options Batch processing options
- * @returns Lambda Partial Batch Response
*/
const processPartialResponse = async (
event: { Records: BaseRecord[] },
diff --git a/packages/batch/src/processPartialResponseSync.ts b/packages/batch/src/processPartialResponseSync.ts
index 7fb7056385..526d355a89 100644
--- a/packages/batch/src/processPartialResponseSync.ts
+++ b/packages/batch/src/processPartialResponseSync.ts
@@ -7,11 +7,45 @@ import type {
} from './types.js';
/**
- * Higher level function to handle batch event processing
- * @param event Lambda's original event
- * @param recordHandler Callable function to process each record from the batch
- * @param processor Batch processor to handle partial failure cases
- * @returns Lambda Partial Batch Response
+ * Higher level function to process a batch of records synchronously
+ * and handle partial failure cases.
+ *
+ * This function is intended to be used within synchronous Lambda handlers
+ * and together with a batch processor that implements the {@link BasePartialBatchProcessor}
+ * interface.
+ *
+ * It accepts a batch of records, a record handler function, a batch processor,
+ * and an optional set of options to configure the batch processing.
+ *
+ * By default, the function will process the batch of records synchronously
+ * and in sequence. If you need to process the records asynchronously, you can
+ * use the {@link processPartialResponse} function instead.
+ *
+ * @example
+ * ```typescript
+ * import {
+ * BatchProcessor,
+ * EventType,
+ * processPartialResponseSync,
+ * } from '@aws-lambda-powertools/batch';
+ * import type { SQSRecord, SQSHandler } from 'aws-lambda';
+ *
+ * const processor = new BatchProcessor(EventType.SQS);
+ *
+ * const recordHandler = async (record: SQSRecord): Promise => {
+ * const payload = JSON.parse(record.body);
+ * };
+ *
+ * export const handler: SQSHandler = async (event, context) =>
+ * processPartialResponseSync(event, recordHandler, processor, {
+ * context,
+ * });
+ * ```
+ *
+ * @param event The event object containing the batch of records
+ * @param recordHandler Sync function to process each record from the batch
+ * @param processor Batch processor instance to handle the batch processing
+ * @param options Batch processing options
*/
const processPartialResponseSync = (
event: { Records: BaseRecord[] },
diff --git a/packages/batch/src/types.ts b/packages/batch/src/types.ts
index 5033ce6f5a..b06391df79 100644
--- a/packages/batch/src/types.ts
+++ b/packages/batch/src/types.ts
@@ -5,31 +5,64 @@ import type {
SQSRecord,
} from 'aws-lambda';
+/**
+ * Options for batch processing
+ *
+ * @property context The context object provided by the AWS Lambda runtime
+ */
type BatchProcessingOptions = {
+ /**
+ * The context object provided by the AWS Lambda runtime. When provided,
+ * it's made available to the handler function you specify
+ */
context: Context;
};
+/**
+ * The types of data that can be provided by an event source
+ */
type EventSourceDataClassTypes =
| SQSRecord
| KinesisStreamRecord
| DynamoDBRecord;
-type RecordValue = unknown;
-type BaseRecord = { [key: string]: RecordValue } | EventSourceDataClassTypes;
+/**
+ * Type representing a record from an event source
+ */
+type BaseRecord = { [key: string]: unknown } | EventSourceDataClassTypes;
-type ResultType = unknown;
-type SuccessResponse = ['success', ResultType, EventSourceDataClassTypes];
+/**
+ * Type representing a successful response
+ *
+ * The first element is the string literal 'success',
+ * the second element is the result of the handler function,
+ * and the third element is the type of data provided by the event source
+ */
+type SuccessResponse = ['success', unknown, EventSourceDataClassTypes];
+/**
+ * Type representing a failure response
+ *
+ * The first element is the string literal 'fail',
+ * the second element is the error message,
+ * and the third element is the type of data provided by the event source
+ */
type FailureResponse = ['fail', string, EventSourceDataClassTypes];
+/**
+ * Type representing a partial failure response
+ */
type PartialItemFailures = { itemIdentifier: string };
+
+/**
+ * Type representing a partial failure response
+ */
type PartialItemFailureResponse = { batchItemFailures: PartialItemFailures[] };
export type {
BatchProcessingOptions,
BaseRecord,
EventSourceDataClassTypes,
- ResultType,
SuccessResponse,
FailureResponse,
PartialItemFailures,
diff --git a/packages/batch/typedoc.json b/packages/batch/typedoc.json
index ed0ca6fc47..7b5f0117b6 100644
--- a/packages/batch/typedoc.json
+++ b/packages/batch/typedoc.json
@@ -3,7 +3,8 @@
"../../typedoc.base.json"
],
"entryPoints": [
- "./src/index.ts"
+ "./src/index.ts",
+ "./src/types.ts"
],
"readme": "README.md"
}
\ No newline at end of file