diff --git a/.github/workflows/reusable-run-linting-check-and-unit-tests.yml b/.github/workflows/reusable-run-linting-check-and-unit-tests.yml index 65f53b6d57..0dba209acf 100644 --- a/.github/workflows/reusable-run-linting-check-and-unit-tests.yml +++ b/.github/workflows/reusable-run-linting-check-and-unit-tests.yml @@ -1,5 +1,31 @@ name: Run unit tests +# +# PROCESS +# +# 1. Checkout code +# 2. Install dependencies & build project +# 3. Run linting +# 4. Run unit tests +# +# NOTES +# We create different jobs for different workspaces of the monorepo, since we have slightly different requirements for each. +# For example, the docs (`check-docs`), runs markdown linting, while the layer (`check-layer-publisher`), examples (`check-examples`), +# and code snippets (`check-docs-snippets`) jobs run linting and unit tests but only for the current LTS version of Node.js. +# +# For the Powertools for AWS main features (aka `packages/*`), instead we run linting and unit tests for all the supported +# versions of Node.js. +# +# Since #2938, we are in the process of improving our test suite, so we are gradually extracting the tests for each package +# from (`run-linting-check-and-unit-tests-on-utilities`) to their own job, so we can run them in parallel using the matrix +# strategy and reduce the time it takes to run the tests, as well as improve maintainer experience in case of failures. +# +# USAGE +# +# NOTE: meant to be called by ./.github/workflows/pr-run-linting-check-and-unit-tests.yml when a PR is opened or updated, +# or by ./.github/workflows/make-release.yml when a release is made. +# + on: workflow_call: @@ -7,6 +33,31 @@ permissions: contents: read jobs: + code-quality: + runs-on: ubuntu-latest + env: + NODE_ENV: dev + strategy: + matrix: + version: [18, 20] + workspace: ["packages/batch"] + fail-fast: false + steps: + - name: Checkout code + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Setup NodeJS + uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + with: + node-version: ${{ matrix.version }} + cache: "npm" + - name: Setup dependencies + uses: aws-powertools/actions/.github/actions/cached-node-modules@d406bac5563f1d8c793519a3eedfe620f6a14872 + with: + nodeVersion: ${{ matrix.version }} + - name: Linting + run: npm run lint -w ${{ matrix.workspace }} + - name: Unit tests + run: npm run test:unit:coverage -w ${{ matrix.workspace }} run-linting-check-and-unit-tests-on-utilities: runs-on: ubuntu-latest env: diff --git a/.husky/pre-push b/.husky/pre-push index 99a9f73c20..745483f08e 100755 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -7,4 +7,6 @@ npm t \ -w packages/idempotency \ -w packages/parameters \ -w packages/parser \ - -w packages/event-handler \ No newline at end of file + -w packages/event-handler + +npx vitest --run --coverage --changed="$(git merge-base HEAD main)" \ No newline at end of file diff --git a/packages/batch/jest.config.cjs b/packages/batch/jest.config.cjs deleted file mode 100644 index 9eaa88fbf4..0000000000 --- a/packages/batch/jest.config.cjs +++ /dev/null @@ -1,31 +0,0 @@ -module.exports = { - displayName: { - name: 'Powertools for AWS Lambda (TypeScript) utility: BATCH', - color: 'orange', - }, - runner: 'groups', - preset: 'ts-jest', - moduleNameMapper: { - '^(\\.{1,2}/.*)\\.js$': '$1', - }, - transform: { - '^.+\\.ts?$': 'ts-jest', - }, - moduleFileExtensions: ['js', 'ts'], - collectCoverageFrom: ['**/src/**/*.ts', '!**/node_modules/**'], - testMatch: ['**/?(*.)+(spec|test).ts'], - roots: ['/src', '/tests'], - testPathIgnorePatterns: ['/node_modules/'], - testEnvironment: 'node', - coveragePathIgnorePatterns: ['/node_modules/', '/types/'], - coverageThreshold: { - global: { - statements: 100, - branches: 100, - functions: 100, - lines: 100, - }, - }, - coverageReporters: ['json-summary', 'text', 'lcov'], - setupFiles: ['/tests/helpers/populateEnvironmentVariables.ts'], -}; diff --git a/packages/batch/package.json b/packages/batch/package.json index a07cfde8a9..bae7f56cb1 100644 --- a/packages/batch/package.json +++ b/packages/batch/package.json @@ -10,9 +10,9 @@ "access": "public" }, "scripts": { - "test": "npm run test:unit", - "test:unit": "jest --group=unit --detectOpenHandles --coverage --verbose", - "jest": "jest --detectOpenHandles --verbose", + "test": "vitest --run", + "test:unit": "vitest --run", + "test:unit:coverage": "vitest --run --coverage.enabled --coverage.thresholds.100 --coverage.include='src/**'", "test:e2e:nodejs18x": "echo 'Not Implemented'", "test:e2e:nodejs20x": "echo 'Not Implemented'", "test:e2e": "echo 'Not Implemented'", @@ -75,4 +75,4 @@ "devDependencies": { "@aws-lambda-powertools/testing-utils": "file:../testing" } -} +} \ No newline at end of file diff --git a/packages/batch/src/BasePartialProcessor.ts b/packages/batch/src/BasePartialProcessor.ts index 6325372d25..a01c09db29 100644 --- a/packages/batch/src/BasePartialProcessor.ts +++ b/packages/batch/src/BasePartialProcessor.ts @@ -115,9 +115,16 @@ abstract class BasePartialProcessor { /** * If this is a sync processor, user should have called processSync instead, * so we call the method early to throw the error early thus failing fast. + * + * The type casting is necessary to ensure that we have test coverage for the + * block of code that throws the error, without having to change the return type + * of the method. This is because this call will always throw an error. */ if (this.constructor.name === 'BatchProcessorSync') { - await this.processRecord(this.records[0]); + return (await this.processRecord(this.records[0])) as ( + | SuccessResponse + | FailureResponse + )[]; } this.prepare(); diff --git a/packages/batch/tests/helpers/populateEnvironmentVariables.ts b/packages/batch/tests/helpers/populateEnvironmentVariables.ts deleted file mode 100644 index 9dc602e5f0..0000000000 --- a/packages/batch/tests/helpers/populateEnvironmentVariables.ts +++ /dev/null @@ -1,12 +0,0 @@ -// Reserved variables -process.env._X_AMZN_TRACE_ID = '1-abcdef12-3456abcdef123456abcdef12'; -process.env.AWS_LAMBDA_FUNCTION_NAME = 'my-lambda-function'; -process.env.AWS_EXECUTION_ENV = 'nodejs20.x'; -process.env.AWS_LAMBDA_FUNCTION_MEMORY_SIZE = '128'; -if ( - process.env.AWS_REGION === undefined && - process.env.CDK_DEFAULT_REGION === undefined -) { - process.env.AWS_REGION = 'eu-west-1'; -} -process.env._HANDLER = 'index.handler'; diff --git a/packages/batch/tests/unit/BasePartialProcessor.test.ts b/packages/batch/tests/unit/BasePartialProcessor.test.ts index aa3bdb3d4b..6c5421c01b 100644 --- a/packages/batch/tests/unit/BasePartialProcessor.test.ts +++ b/packages/batch/tests/unit/BasePartialProcessor.test.ts @@ -1,8 +1,4 @@ -/** - * Test BasePartialBatchProcessor class - * - * @group unit/batch/class/basepartialbatchprocessor - */ +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { BasePartialBatchProcessor, EventType } from '../../src/index.js'; import type { BaseRecord, @@ -16,8 +12,7 @@ describe('Class: BasePartialBatchProcessor', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); + vi.clearAllMocks(); process.env = { ...ENVIRONMENT_VARIABLES }; }); @@ -46,7 +41,7 @@ describe('Class: BasePartialBatchProcessor', () => { } describe('create custom batch partial processor', () => { - it('should create a custom batch partial processor', () => { + it('creates a custom batch partial processor', () => { // Act const processor = new MyPartialProcessor(); @@ -54,11 +49,11 @@ describe('Class: BasePartialBatchProcessor', () => { expect(processor).toBeInstanceOf(BasePartialBatchProcessor); }); - it('should process a batch of records', () => { + it('processes a batch of records', () => { // Prepare const processor = new MyPartialProcessor(); const records = [sqsRecordFactory('success')]; - const consoleSpy = jest.spyOn(console, 'log'); + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); // Act processor.register(records, sqsRecordHandler); diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 2160865a05..e49ee4c30f 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -1,10 +1,6 @@ -/** - * Test BatchProcessor class - * - * @group unit/batch/class/batchprocessor - */ import context from '@aws-lambda-powertools/testing-utils/context'; import type { Context } from 'aws-lambda'; +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { BatchProcessingError, BatchProcessor, @@ -31,8 +27,7 @@ describe('Class: AsyncBatchProcessor', () => { }; beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); + vi.clearAllMocks(); process.env = { ...ENVIRONMENT_VARIABLES }; }); @@ -41,7 +36,7 @@ describe('Class: AsyncBatchProcessor', () => { }); describe('Asynchronously processing SQS Records', () => { - test('Batch processing SQS records with no failures', async () => { + it('completes processing with no failures', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -59,7 +54,7 @@ describe('Class: AsyncBatchProcessor', () => { ]); }); - test('Batch processing SQS records with some failures', async () => { + it('completes processing with with some failures', async () => { // Prepare const firstRecord = sqsRecordFactory('failure'); const secondRecord = sqsRecordFactory('success'); @@ -86,7 +81,7 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - test('Batch processing SQS records with all failures', async () => { + it('completes processing with all failures', async () => { // Prepare const firstRecord = sqsRecordFactory('failure'); const secondRecord = sqsRecordFactory('failure'); @@ -106,7 +101,7 @@ describe('Class: AsyncBatchProcessor', () => { }); describe('Asynchronously processing Kinesis Records', () => { - test('Batch processing Kinesis records with no failures', async () => { + it('completes processing with no failures', async () => { // Prepare const firstRecord = kinesisRecordFactory('success'); const secondRecord = kinesisRecordFactory('success'); @@ -124,7 +119,7 @@ describe('Class: AsyncBatchProcessor', () => { ]); }); - test('Batch processing Kinesis records with some failures', async () => { + it('completes processing with some failures', async () => { // Prepare const firstRecord = kinesisRecordFactory('failure'); const secondRecord = kinesisRecordFactory('success'); @@ -151,7 +146,7 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - test('Batch processing Kinesis records with all failures', async () => { + it('completes processing with all failures', async () => { // Prepare const firstRecord = kinesisRecordFactory('failure'); const secondRecord = kinesisRecordFactory('failure'); @@ -171,7 +166,7 @@ describe('Class: AsyncBatchProcessor', () => { }); describe('Asynchronously processing DynamoDB Records', () => { - test('Batch processing DynamoDB records with no failures', async () => { + it('completes processing with no failures', async () => { // Prepare const firstRecord = dynamodbRecordFactory('success'); const secondRecord = dynamodbRecordFactory('success'); @@ -189,7 +184,7 @@ describe('Class: AsyncBatchProcessor', () => { ]); }); - test('Batch processing DynamoDB records with some failures', async () => { + it('completes processing with some failures', async () => { // Prepare const firstRecord = dynamodbRecordFactory('failure'); const secondRecord = dynamodbRecordFactory('success'); @@ -216,7 +211,7 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - test('Batch processing DynamoDB records with all failures', async () => { + it('completes processing with all failures', async () => { // Prepare const firstRecord = dynamodbRecordFactory('failure'); const secondRecord = dynamodbRecordFactory('failure'); @@ -236,7 +231,7 @@ describe('Class: AsyncBatchProcessor', () => { }); describe('Batch processing with Lambda context', () => { - test('Batch processing when context is provided and handler accepts', async () => { + it('passes the context to the record handler', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -254,25 +249,7 @@ describe('Class: AsyncBatchProcessor', () => { ]); }); - test('Batch processing when context is provided and handler does not accept', async () => { - // Prepare - const firstRecord = sqsRecordFactory('success'); - const secondRecord = sqsRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS); - - // Act - processor.register(records, asyncSqsRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', firstRecord.body, firstRecord], - ['success', secondRecord.body, secondRecord], - ]); - }); - - test('Batch processing when malformed context is provided and handler attempts to use', async () => { + it('throws an error when passing an invalid context object', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -289,7 +266,7 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - test('When calling the sync process method, it should throw an error', () => { + it('throws an error when the sync process method is called', () => { // Prepare const processor = new BatchProcessor(EventType.SQS); diff --git a/packages/batch/tests/unit/BatchProcessorSync.test.ts b/packages/batch/tests/unit/BatchProcessorSync.test.ts index 07f9257a63..e341628da8 100644 --- a/packages/batch/tests/unit/BatchProcessorSync.test.ts +++ b/packages/batch/tests/unit/BatchProcessorSync.test.ts @@ -1,10 +1,6 @@ -/** - * Test BatchProcessorSync class - * - * @group unit/batch/class/batchprocessorsync - */ import context from '@aws-lambda-powertools/testing-utils/context'; import type { Context } from 'aws-lambda'; +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { BatchProcessingError, BatchProcessorSync, @@ -31,8 +27,7 @@ describe('Class: BatchProcessor', () => { }; beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); + vi.clearAllMocks(); process.env = { ...ENVIRONMENT_VARIABLES }; }); @@ -41,7 +36,7 @@ describe('Class: BatchProcessor', () => { }); describe('Synchronously processing SQS Records', () => { - test('Batch processing SQS records with no failures', () => { + it('completes processing with no failures', () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -59,7 +54,7 @@ describe('Class: BatchProcessor', () => { ]); }); - test('Batch processing SQS records with some failures', () => { + it('completes processing with some failures', () => { // Prepare const firstRecord = sqsRecordFactory('failure'); const secondRecord = sqsRecordFactory('success'); @@ -86,7 +81,7 @@ describe('Class: BatchProcessor', () => { }); }); - test('Batch processing SQS records with all failures', () => { + it('completes processing with all failures', () => { // Prepare const firstRecord = sqsRecordFactory('failure'); const secondRecord = sqsRecordFactory('failure'); @@ -102,7 +97,7 @@ describe('Class: BatchProcessor', () => { }); describe('Synchronously processing Kinesis Records', () => { - test('Batch processing Kinesis records with no failures', () => { + it('completes processing with no failures', async () => { // Prepare const firstRecord = kinesisRecordFactory('success'); const secondRecord = kinesisRecordFactory('success'); @@ -120,7 +115,7 @@ describe('Class: BatchProcessor', () => { ]); }); - test('Batch processing Kinesis records with some failures', () => { + it('completes processing with some failures', async () => { // Prepare const firstRecord = kinesisRecordFactory('failure'); const secondRecord = kinesisRecordFactory('success'); @@ -147,7 +142,7 @@ describe('Class: BatchProcessor', () => { }); }); - test('Batch processing Kinesis records with all failures', () => { + it('completes processing with all failures', async () => { const firstRecord = kinesisRecordFactory('failure'); const secondRecord = kinesisRecordFactory('failure'); const thirdRecord = kinesisRecordFactory('fail'); @@ -164,7 +159,7 @@ describe('Class: BatchProcessor', () => { }); describe('Synchronously processing DynamoDB Records', () => { - test('Batch processing DynamoDB records with no failures', () => { + it('completes processing with no failures', async () => { // Prepare const firstRecord = dynamodbRecordFactory('success'); const secondRecord = dynamodbRecordFactory('success'); @@ -182,7 +177,7 @@ describe('Class: BatchProcessor', () => { ]); }); - test('Batch processing DynamoDB records with some failures', () => { + it('completes processing with some failures', async () => { // Prepare const firstRecord = dynamodbRecordFactory('failure'); const secondRecord = dynamodbRecordFactory('success'); @@ -209,7 +204,7 @@ describe('Class: BatchProcessor', () => { }); }); - test('Batch processing DynamoDB records with all failures', () => { + it('completes processing with all failures', async () => { // Prepare const firstRecord = dynamodbRecordFactory('failure'); const secondRecord = dynamodbRecordFactory('failure'); @@ -227,7 +222,7 @@ describe('Class: BatchProcessor', () => { }); describe('Batch processing with Lambda context', () => { - test('Batch processing when context is provided and handler accepts', () => { + it('passes the context to the record handler', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -245,7 +240,7 @@ describe('Class: BatchProcessor', () => { ]); }); - test('Batch processing when context is provided and handler does not accept', () => { + it('throws an error when passing an invalid context object', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -262,23 +257,9 @@ describe('Class: BatchProcessor', () => { ['success', secondRecord.body, secondRecord], ]); }); - - test('Batch processing when malformed context is provided and handler attempts to use', () => { - // Prepare - const firstRecord = sqsRecordFactory('success'); - const secondRecord = sqsRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessorSync(EventType.SQS); - const badContext = { foo: 'bar' }; - const badOptions = { context: badContext as unknown as Context }; - - // Act - processor.register(records, handlerWithContext, badOptions); - expect(() => processor.processSync()).toThrowError(FullBatchFailureError); - }); }); - test('When calling the async process method, it should throw an error', async () => { + it('throws an error when the async process method is called', async () => { // Prepare const processor = new BatchProcessorSync(EventType.SQS); diff --git a/packages/batch/tests/unit/SqsFifoPartialProcessor.test.ts b/packages/batch/tests/unit/SqsFifoPartialProcessor.test.ts index 4ef46e4f70..7eec6164de 100644 --- a/packages/batch/tests/unit/SqsFifoPartialProcessor.test.ts +++ b/packages/batch/tests/unit/SqsFifoPartialProcessor.test.ts @@ -1,8 +1,4 @@ -/** - * Test SqsFifoBatchProcessor class - * - * @group unit/batch/class/sqsfifobatchprocessor - */ +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { SqsFifoMessageGroupShortCircuitError, SqsFifoPartialProcessor, @@ -16,8 +12,7 @@ describe('Class: SqsFifoBatchProcessor', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); + vi.clearAllMocks(); process.env = { ...ENVIRONMENT_VARIABLES }; }); @@ -26,7 +21,7 @@ describe('Class: SqsFifoBatchProcessor', () => { }); describe('Synchronous SQS FIFO batch processing', () => { - test('SQS FIFO Batch processor with no failures', () => { + it('completes processing with no failures', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('success'); @@ -44,7 +39,7 @@ describe('Class: SqsFifoBatchProcessor', () => { expect(result.batchItemFailures).toStrictEqual([]); }); - test('SQS FIFO Batch processor with failures', () => { + it('completes processing with some failures', async () => { // Prepare const firstRecord = sqsRecordFactory('success'); const secondRecord = sqsRecordFactory('fail'); @@ -70,7 +65,7 @@ describe('Class: SqsFifoBatchProcessor', () => { expect(processor.errors[1]).toBeInstanceOf(SqsFifoShortCircuitError); }); - test('When `skipGroupOnError` is true, SQS FIFO processor is set to continue processing even after first failure', () => { + it('continues processing and moves to the next group when `skipGroupOnError` is true', () => { // Prepare const firstRecord = sqsRecordFactory('fail', '1'); const secondRecord = sqsRecordFactory('success', '1'); @@ -121,101 +116,7 @@ describe('Class: SqsFifoBatchProcessor', () => { ); }); - test('When `skipGroupOnError` is true, SQS FIFO processor is set to continue processing even after encountering errors in specific MessageGroupID', () => { - // Prepare - const firstRecord = sqsRecordFactory('success', '1'); - const secondRecord = sqsRecordFactory('success', '1'); - const thirdRecord = sqsRecordFactory('fail', '2'); - const fourthRecord = sqsRecordFactory('success', '2'); - const fifthRecord = sqsRecordFactory('success', '3'); - const event = { - Records: [ - firstRecord, - secondRecord, - thirdRecord, - fourthRecord, - fifthRecord, - ], - }; - const processor = new SqsFifoPartialProcessor(); - - // Act - const result = processPartialResponseSync( - event, - sqsRecordHandler, - processor, - { - skipGroupOnError: true, - } - ); - - // Assess - expect(result.batchItemFailures.length).toBe(2); - expect(result.batchItemFailures[0].itemIdentifier).toBe( - thirdRecord.messageId - ); - expect(result.batchItemFailures[1].itemIdentifier).toBe( - fourthRecord.messageId - ); - expect(processor.errors.length).toBe(2); - expect(processor.errors[1]).toBeInstanceOf( - SqsFifoMessageGroupShortCircuitError - ); - }); - - test('When `skipGroupOnError` is true, SQS FIFO Batch processor processes everything with no failures', () => { - // Prepare - const firstRecord = sqsRecordFactory('success', '1'); - const secondRecord = sqsRecordFactory('success', '2'); - const thirdRecord = sqsRecordFactory('success', '3'); - const fourthRecord = sqsRecordFactory('success', '4'); - const event = { - Records: [firstRecord, secondRecord, thirdRecord, fourthRecord], - }; - const processor = new SqsFifoPartialProcessor(); - - // Act - const result = processPartialResponseSync( - event, - sqsRecordHandler, - processor, - { - skipGroupOnError: true, - } - ); - - // Assess - expect(result.batchItemFailures.length).toBe(0); - expect(processor.errors.length).toBe(0); - }); - - test('When `skipGroupOnError` is false, SQS FIFO Batch processor processes everything with no failures', () => { - // Prepare - const firstRecord = sqsRecordFactory('success', '1'); - const secondRecord = sqsRecordFactory('success', '2'); - const thirdRecord = sqsRecordFactory('success', '3'); - const fourthRecord = sqsRecordFactory('success', '4'); - const event = { - Records: [firstRecord, secondRecord, thirdRecord, fourthRecord], - }; - const processor = new SqsFifoPartialProcessor(); - - // Act - const result = processPartialResponseSync( - event, - sqsRecordHandler, - processor, - { - skipGroupOnError: false, - } - ); - - // Assess - expect(result.batchItemFailures.length).toBe(0); - expect(processor.errors.length).toBe(0); - }); - - test('When `skipGroupOnError` is false, SQS FIFO Batch processor short circuits the process on first failure', () => { + it('short circuits on the first failure when `skipGroupOnError` is false', () => { // Prepare const firstRecord = sqsRecordFactory('success', '1'); const secondRecord = sqsRecordFactory('fail', '2'); diff --git a/packages/batch/tests/unit/processPartialResponse.test.ts b/packages/batch/tests/unit/processPartialResponse.test.ts index 2ebd22d5ec..2b2ec185ad 100644 --- a/packages/batch/tests/unit/processPartialResponse.test.ts +++ b/packages/batch/tests/unit/processPartialResponse.test.ts @@ -1,8 +1,3 @@ -/** - * Test asyncProcessPartialResponse function - * - * @group unit/batch/function/asyncProcesspartialresponse - */ import assert from 'node:assert'; import context from '@aws-lambda-powertools/testing-utils/context'; import type { @@ -11,6 +6,7 @@ import type { KinesisStreamEvent, SQSEvent, } from 'aws-lambda'; +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { BatchProcessor, EventType, @@ -41,8 +37,7 @@ describe('Function: processPartialResponse()', () => { }; beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); + vi.clearAllMocks(); process.env = { ...ENVIRONMENT_VARIABLES }; }); @@ -51,7 +46,7 @@ describe('Function: processPartialResponse()', () => { }); describe('Process partial response function call tests', () => { - test('Process partial response function call with asynchronous handler', async () => { + it('Process partial response function call with asynchronous handler', async () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -71,7 +66,7 @@ describe('Function: processPartialResponse()', () => { expect(ret).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response function call with context provided', async () => { + it('Process partial response function call with context provided', async () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -92,7 +87,7 @@ describe('Function: processPartialResponse()', () => { expect(ret).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response function call with asynchronous handler for full batch failure', async () => { + it('Process partial response function call with asynchronous handler for full batch failure', async () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const batch = { Records: records }; @@ -104,7 +99,7 @@ describe('Function: processPartialResponse()', () => { ).rejects.toThrow(FullBatchFailureError); }); - test('Process partial response function call with asynchronous handler for full batch failure when `throwOnFullBatchFailure` is `true`', async () => { + it('Process partial response function call with asynchronous handler for full batch failure when `throwOnFullBatchFailure` is `true`', async () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const batch = { Records: records }; @@ -119,7 +114,7 @@ describe('Function: processPartialResponse()', () => { ).rejects.toThrow(FullBatchFailureError); }); - test('Process partial response function call with asynchronous handler for full batch failure when `throwOnFullBatchFailure` is `false`', async () => { + it('Process partial response function call with asynchronous handler for full batch failure when `throwOnFullBatchFailure` is `false`', async () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const batch = { Records: records }; @@ -147,7 +142,7 @@ describe('Function: processPartialResponse()', () => { }); describe('Process partial response function call through handler', () => { - test('Process partial response through handler with SQS event', async () => { + it('Process partial response through handler with SQS event', async () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -170,7 +165,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler with Kinesis event', async () => { + it('Process partial response through handler with Kinesis event', async () => { // Prepare const records = [ kinesisRecordFactory('success'), @@ -197,7 +192,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler with DynamoDB event', async () => { + it('Process partial response through handler with DynamoDB event', async () => { // Prepare const records = [ dynamodbRecordFactory('success'), @@ -224,7 +219,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler for SQS records with incorrect event type', async () => { + it('Process partial response through handler for SQS records with incorrect event type', async () => { // Prepare const processor = new BatchProcessor(EventType.SQS); @@ -253,7 +248,7 @@ describe('Function: processPartialResponse()', () => { } }); - test('Process partial response through handler with context provided', async () => { + it('Process partial response through handler with context provided', async () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -283,7 +278,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler for full batch failure', async () => { + it('Process partial response through handler for full batch failure', async () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const processor = new BatchProcessor(EventType.SQS); @@ -302,7 +297,7 @@ describe('Function: processPartialResponse()', () => { ); }); - test('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `true`', async () => { + it('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `true`', async () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const processor = new BatchProcessor(EventType.SQS); @@ -324,7 +319,7 @@ describe('Function: processPartialResponse()', () => { ); }); - test('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `false`', async () => { + it('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `false`', async () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const processor = new BatchProcessor(EventType.SQS); diff --git a/packages/batch/tests/unit/processPartialResponseSync.test.ts b/packages/batch/tests/unit/processPartialResponseSync.test.ts index 56519ec63b..b274f6f8f6 100644 --- a/packages/batch/tests/unit/processPartialResponseSync.test.ts +++ b/packages/batch/tests/unit/processPartialResponseSync.test.ts @@ -1,9 +1,3 @@ -/** - * Test processPartialResponse function - * - * @group unit/batch/function/processpartialresponse - */ -import assert from 'node:assert'; import context from '@aws-lambda-powertools/testing-utils/context'; import type { Context, @@ -11,6 +5,7 @@ import type { KinesisStreamEvent, SQSEvent, } from 'aws-lambda'; +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { BatchProcessorSync, EventType, @@ -41,8 +36,7 @@ describe('Function: processPartialResponse()', () => { }; beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); + vi.clearAllMocks(); process.env = { ...ENVIRONMENT_VARIABLES }; }); @@ -51,7 +45,7 @@ describe('Function: processPartialResponse()', () => { }); describe('Process partial response function call tests', () => { - test('Process partial response function call with synchronous handler', () => { + it('Process partial response function call with synchronous handler', () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -71,7 +65,7 @@ describe('Function: processPartialResponse()', () => { expect(ret).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response function call with context provided', () => { + it('Process partial response function call with context provided', () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -92,7 +86,7 @@ describe('Function: processPartialResponse()', () => { expect(ret).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response function call with synchronous handler for full batch failure', () => { + it('Process partial response function call with synchronous handler for full batch failure', () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const batch = { Records: records }; @@ -104,7 +98,7 @@ describe('Function: processPartialResponse()', () => { ).toThrow(FullBatchFailureError); }); - test('Process partial response function call with synchronous handler for full batch failure when `throwOnFullBatchFailure` is `true`', () => { + it('Process partial response function call with synchronous handler for full batch failure when `throwOnFullBatchFailure` is `true`', () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const batch = { Records: records }; @@ -119,7 +113,7 @@ describe('Function: processPartialResponse()', () => { ).toThrow(FullBatchFailureError); }); - test('Process partial response function call with synchronous handler for full batch failure when `throwOnFullBatchFailure` is `false`', () => { + it('Process partial response function call with synchronous handler for full batch failure when `throwOnFullBatchFailure` is `false`', () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const batch = { Records: records }; @@ -147,7 +141,7 @@ describe('Function: processPartialResponse()', () => { }); describe('Process partial response function call through handler', () => { - test('Process partial response through handler with SQS event', () => { + it('Process partial response through handler with SQS event', () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -170,7 +164,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler with Kinesis event', () => { + it('Process partial response through handler with Kinesis event', () => { // Prepare const records = [ kinesisRecordFactory('success'), @@ -197,7 +191,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler with DynamoDB event', () => { + it('Process partial response through handler with DynamoDB event', () => { // Prepare const records = [ dynamodbRecordFactory('success'), @@ -224,7 +218,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler for SQS records with incorrect event type', () => { + it('Process partial response through handler for SQS records with incorrect event type', () => { // Prepare const processor = new BatchProcessorSync(EventType.SQS); @@ -240,8 +234,8 @@ describe('Function: processPartialResponse()', () => { handler({} as unknown as SQSEvent, context); } catch (error) { // Assess - assert(error instanceof UnexpectedBatchTypeError); - expect(error.message).toBe( + expect(error).toBeInstanceOf(UnexpectedBatchTypeError); + expect((error as Error).message).toBe( `Unexpected batch type. Possible values are: ${Object.keys( EventType ).join(', ')}` @@ -249,7 +243,7 @@ describe('Function: processPartialResponse()', () => { } }); - test('Process partial response through handler with context provided', () => { + it('Process partial response through handler with context provided', () => { // Prepare const records = [ sqsRecordFactory('success'), @@ -279,7 +273,7 @@ describe('Function: processPartialResponse()', () => { expect(result).toStrictEqual({ batchItemFailures: [] }); }); - test('Process partial response through handler for full batch failure', () => { + it('Process partial response through handler for full batch failure', () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const processor = new BatchProcessorSync(EventType.SQS); @@ -296,7 +290,7 @@ describe('Function: processPartialResponse()', () => { expect(() => handler(event, context)).toThrow(FullBatchFailureError); }); - test('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `true`', () => { + it('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `true`', () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const processor = new BatchProcessorSync(EventType.SQS); @@ -316,7 +310,7 @@ describe('Function: processPartialResponse()', () => { expect(() => handler(event, context)).toThrow(FullBatchFailureError); }); - test('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `false`', () => { + it('Process partial response through handler for full batch failure when `throwOnFullBatchFailure` is `false`', () => { // Prepare const records = [sqsRecordFactory('fail'), sqsRecordFactory('fail')]; const processor = new BatchProcessorSync(EventType.SQS); diff --git a/packages/batch/vitest.config.ts b/packages/batch/vitest.config.ts new file mode 100644 index 0000000000..d5aa737c68 --- /dev/null +++ b/packages/batch/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineProject } from 'vitest/config'; + +export default defineProject({ + test: { + environment: 'node', + }, +}); diff --git a/vitest.config.ts b/vitest.config.ts new file mode 100644 index 0000000000..eca83dc65c --- /dev/null +++ b/vitest.config.ts @@ -0,0 +1,30 @@ +import { coverageConfigDefaults, defineConfig } from 'vitest/config'; + +export default defineConfig({ + test: { + coverage: { + provider: 'v8', + thresholds: { + statements: 100, + branches: 100, + functions: 100, + lines: 100, + }, + include: ['packages/*/src/**'], + exclude: [ + ...coverageConfigDefaults.exclude, + 'packages/commons/**', + 'packages/batch/src/types.ts', + 'packages/event-handler/**', + 'packages/idempotency/**', + 'packages/jmespath/**', + 'packages/logger/**', + 'packages/metrics/**', + 'packages/parameters/**', + 'packages/parser/**', + 'packages/testing/**', + 'packages/tracer/**', + ], + }, + }, +}); diff --git a/vitest.workspace.ts b/vitest.workspace.ts new file mode 100644 index 0000000000..32fc3c80be --- /dev/null +++ b/vitest.workspace.ts @@ -0,0 +1 @@ +export default ['packages/*/vitest.config.ts'];