diff --git a/.github/scripts/get_pr_info.js b/.github/scripts/get_pr_info.js deleted file mode 100644 index ece2f97f44..0000000000 --- a/.github/scripts/get_pr_info.js +++ /dev/null @@ -1,30 +0,0 @@ -module.exports = async ({ github, context, core }) => { - const prNumber = process.env.PR_NUMBER; - - if (prNumber === '') { - core.setFailed(`No PR number was passed. Aborting`); - } - - // Remove the `#` prefix from the PR number if it exists - const prNumberWithoutPrefix = prNumber.replace('#', ''); - - try { - const { - data: { head, base }, - } = await github.rest.pulls.get({ - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: prNumberWithoutPrefix, - }); - - core.setOutput('headRef', head.ref); - core.setOutput('headSHA', head.sha); - core.setOutput('baseRef', base.ref); - core.setOutput('baseSHA', base.sha); - } catch (error) { - core.setFailed( - `Unable to retrieve info from PR number ${prNumber}.\n\n Error details: ${error}` - ); - throw error; - } -}; diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 4a9145fd19..0601b33d7f 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -17,20 +17,21 @@ jobs: env: NODE_ENV: dev PR_NUMBER: ${{ inputs.prNumber }} + GH_TOKEN: ${{ github.token }} permissions: id-token: write # needed to interact with GitHub's OIDC Token endpoint. contents: read strategy: - max-parallel: 30 + max-parallel: 25 matrix: package: [ - layers, + packages/idempotency, packages/logger, packages/metrics, - packages/tracer, packages/parameters, - packages/idempotency, + packages/tracer, + layers, ] version: [18, 20, 22] arch: [x86_64, arm64] @@ -42,11 +43,14 @@ jobs: - name: Extract PR details id: extract_PR_details if: ${{ inputs.prNumber != '' }} - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - with: - script: | - const script = require('.github/scripts/get_pr_info.js'); - await script({github, context, core}); + run: | + # Get the PR number from the input + pr_number=${{ inputs.prNumber }} + # Get the headSHA of the PR + head_sha=$(gh pr view $pr_number --json headRefOid -q '.headRefOid') + # Set the headSHA as an output variable + echo "headSHA=$head_sha" >> $GITHUB_OUTPUT + echo "headSHA=$head_sha" # Only if a PR Number was passed and the headSHA of the PR extracted, # we checkout the PR at that point in time - name: Checkout PR code @@ -75,4 +79,4 @@ jobs: ARCH: ${{ matrix.arch }} JSII_SILENCE_WARNING_DEPRECATED_NODE_VERSION: true RUNNER_DEBUG: ${{ env.RUNNER_DEBUG }} - run: npm run test:e2e -w ${{ matrix.package }} + run: npm run test:e2e -w ${{ matrix.package }} \ No newline at end of file diff --git a/layers/tests/e2e/constants.ts b/layers/tests/e2e/constants.ts index b6b477031b..e8c5d84251 100644 --- a/layers/tests/e2e/constants.ts +++ b/layers/tests/e2e/constants.ts @@ -1,5 +1 @@ export const RESOURCE_NAME_PREFIX = 'Layers-E2E'; -export const ONE_MINUTE = 60 * 1000; -export const TEST_CASE_TIMEOUT = 3 * ONE_MINUTE; -export const SETUP_TIMEOUT = 7 * ONE_MINUTE; -export const TEARDOWN_TIMEOUT = 5 * ONE_MINUTE; diff --git a/layers/tests/e2e/layerPublisher.test.ts b/layers/tests/e2e/layerPublisher.test.ts index cbffc8abfa..1f932ca98a 100644 --- a/layers/tests/e2e/layerPublisher.test.ts +++ b/layers/tests/e2e/layerPublisher.test.ts @@ -11,11 +11,7 @@ import { LayerVersion } from 'aws-cdk-lib/aws-lambda'; import { afterAll, beforeAll, describe, expect, it, vi } from 'vitest'; import packageJson from '../../package.json'; import { LayerPublisherStack } from '../../src/layer-publisher-stack.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; /** * This test has two stacks: @@ -121,7 +117,7 @@ describe('Layers E2E tests', () => { }) ); } - }, SETUP_TIMEOUT); + }); it.each(cases)( 'imports and instantiates all utilities (%s)', @@ -198,5 +194,5 @@ describe('Layers E2E tests', () => { await testLayerStack.destroy(); await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/layers/vitest.config.ts b/layers/vitest.config.ts index d5aa737c68..1469ff8a66 100644 --- a/layers/vitest.config.ts +++ b/layers/vitest.config.ts @@ -3,5 +3,7 @@ import { defineProject } from 'vitest/config'; export default defineProject({ test: { environment: 'node', + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, }); diff --git a/package-lock.json b/package-lock.json index c7d28b2159..df85f91675 100644 --- a/package-lock.json +++ b/package-lock.json @@ -41,7 +41,7 @@ "typedoc-plugin-missing-exports": "^3.1.0", "typedoc-plugin-zod": "^1.4.0", "typescript": "^5.8.2", - "vitest": "^3.0.5" + "vitest": "^3.0.9" }, "engines": { "node": ">=18" diff --git a/package.json b/package.json index 874e3d337b..bef9e2663d 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "typedoc-plugin-missing-exports": "^3.1.0", "typedoc-plugin-zod": "^1.4.0", "typescript": "^5.8.2", - "vitest": "^3.0.5" + "vitest": "^3.0.9" }, "lint-staged": { "*.{js,ts}": "biome check --write", diff --git a/packages/idempotency/tests/e2e/constants.ts b/packages/idempotency/tests/e2e/constants.ts index 38ae9b6c83..e9cec8eadc 100644 --- a/packages/idempotency/tests/e2e/constants.ts +++ b/packages/idempotency/tests/e2e/constants.ts @@ -1,6 +1 @@ export const RESOURCE_NAME_PREFIX = 'Idempotency'; - -export const ONE_MINUTE = 60 * 1_000; -export const TEARDOWN_TIMEOUT = 5 * ONE_MINUTE; -export const SETUP_TIMEOUT = 7 * ONE_MINUTE; -export const TEST_CASE_TIMEOUT = 5 * ONE_MINUTE; diff --git a/packages/idempotency/tests/e2e/idempotentDecorator.test.ts b/packages/idempotency/tests/e2e/idempotentDecorator.test.ts index 00f0aa236d..9c83ab5417 100644 --- a/packages/idempotency/tests/e2e/idempotentDecorator.test.ts +++ b/packages/idempotency/tests/e2e/idempotentDecorator.test.ts @@ -11,12 +11,7 @@ import { Duration } from 'aws-cdk-lib'; import { AttributeType } from 'aws-cdk-lib/aws-dynamodb'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { IdempotencyTestNodejsFunctionAndDynamoTable } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; const dynamoDBClient = new DynamoDBClient({}); @@ -160,315 +155,289 @@ describe('Idempotency e2e test decorator, default settings', () => { functionNameDataIndex = testStack.findAndGetStackOutputValue('dataIndexFn'); tableNameDataIndex = testStack.findAndGetStackOutputValue('dataIndexTable'); - }, SETUP_TIMEOUT); + }); - it( - 'returns the same result and runs the handler once when called multiple times', - async () => { - const payload = { foo: 'bar' }; + it('returns the same result and runs the handler once when called multiple times', async () => { + const payload = { foo: 'bar' }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload)) - .digest('base64'); + const payloadHash = createHash('md5') + .update(JSON.stringify(payload)) + .digest('base64'); - const logs = await invokeFunction({ - functionName: functionNameDefault, - times: 2, - invocationMode: 'SEQUENTIAL', - payload: payload, - }); + const logs = await invokeFunction({ + functionName: functionNameDefault, + times: 2, + invocationMode: 'SEQUENTIAL', + payload: payload, + }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - const idempotencyRecord = await dynamoDBClient.send( - new ScanCommand({ - TableName: tableNameDefault, - }) - ); - expect(idempotencyRecord.Items).toHaveLength(1); - expect(idempotencyRecord.Items?.[0].id).toEqual( - `${functionNameDefault}#${payloadHash}` - ); - expect(idempotencyRecord.Items?.[0].data).toBeUndefined(); - expect(idempotencyRecord.Items?.[0].status).toEqual('COMPLETED'); - // During the first invocation the handler should be called, so the logs should contain 1 log - expect(functionLogs[0]).toHaveLength(1); - // We test the content of the log as well as the presence of fields from the context, this - // ensures that the all the arguments are passed to the handler when made idempotent - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( - expect.objectContaining({ - message: 'Got test event: {"foo":"bar"}', - }) - ); - }, - TEST_CASE_TIMEOUT - ); + const idempotencyRecord = await dynamoDBClient.send( + new ScanCommand({ + TableName: tableNameDefault, + }) + ); + expect(idempotencyRecord.Items).toHaveLength(1); + expect(idempotencyRecord.Items?.[0].id).toEqual( + `${functionNameDefault}#${payloadHash}` + ); + expect(idempotencyRecord.Items?.[0].data).toBeUndefined(); + expect(idempotencyRecord.Items?.[0].status).toEqual('COMPLETED'); + // During the first invocation the handler should be called, so the logs should contain 1 log + expect(functionLogs[0]).toHaveLength(1); + // We test the content of the log as well as the presence of fields from the context, this + // ensures that the all the arguments are passed to the handler when made idempotent + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( + expect.objectContaining({ + message: 'Got test event: {"foo":"bar"}', + }) + ); + }); - it( - 'handles parallel invocations correctly', - async () => { - const payload = { foo: 'bar' }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload)) - .digest('base64'); - const logs = await invokeFunction({ - functionName: functionNameDefaultParallel, - times: 2, - invocationMode: 'PARALLEL', - payload: payload, - }); - - const functionLogs = logs.map((log) => log.getFunctionLogs()); - - const idempotencyRecords = await dynamoDBClient.send( - new ScanCommand({ - TableName: tableNameDefaultParallel, - }) - ); - expect(idempotencyRecords.Items).toHaveLength(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameDefaultParallel}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - expect(idempotencyRecords?.Items?.[0].expiration).toBeGreaterThan( - Date.now() / 1000 - ); - const successfulInvocationLogs = functionLogs.find( - (functionLog) => - functionLog.toString().includes('Processed event') !== undefined - ); - - const failedInvocationLogs = functionLogs.find( - (functionLog) => - functionLog - .toString() - .includes('There is already an execution in progres') !== undefined - ); - - expect(successfulInvocationLogs).toBeDefined(); - expect(failedInvocationLogs).toBeDefined(); - }, - TEST_CASE_TIMEOUT - ); + it('handles parallel invocations correctly', async () => { + const payload = { foo: 'bar' }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload)) + .digest('base64'); + const logs = await invokeFunction({ + functionName: functionNameDefaultParallel, + times: 2, + invocationMode: 'PARALLEL', + payload: payload, + }); + + const functionLogs = logs.map((log) => log.getFunctionLogs()); + + const idempotencyRecords = await dynamoDBClient.send( + new ScanCommand({ + TableName: tableNameDefaultParallel, + }) + ); + expect(idempotencyRecords.Items).toHaveLength(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameDefaultParallel}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + expect(idempotencyRecords?.Items?.[0].expiration).toBeGreaterThan( + Date.now() / 1000 + ); + const successfulInvocationLogs = functionLogs.find( + (functionLog) => + functionLog.toString().includes('Processed event') !== undefined + ); - it( - 'recovers from a timed out request and processes the next one', - async () => { - const payload = { foo: 'bar' }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload.foo)) - .digest('base64'); - - const logs = await invokeFunction({ - functionName: functionNameTimeout, - times: 2, - invocationMode: 'SEQUENTIAL', - payload: Array.from({ length: 2 }, (_, index) => ({ - ...payload, - invocation: index, - })), - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); - const idempotencyRecord = await dynamoDBClient.send( - new ScanCommand({ - TableName: tableNameTimeout, - }) - ); - expect(idempotencyRecord.Items).toHaveLength(1); - expect(idempotencyRecord.Items?.[0].id).toEqual( - `${functionNameTimeout}#${payloadHash}` - ); - expect(idempotencyRecord.Items?.[0].data).toEqual({ - ...payload, - invocation: 1, - }); - expect(idempotencyRecord.Items?.[0].status).toEqual('COMPLETED'); - - try { - // During the first invocation the handler should be called, so the logs should contain 1 log - expect(functionLogs[0]).toHaveLength(2); - expect(functionLogs[0][0]).toContain('Task timed out after'); - } catch { - // During the first invocation the function should timeout so the logs should not contain any log and the report log should contain a timeout message - expect(functionLogs[0]).toHaveLength(0); - expect(logs[0].getReportLog()).toMatch(/Status: timeout$/); - } - - expect(functionLogs[1]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'bar', - function_name: functionNameTimeout, - }) - ); - }, - TEST_CASE_TIMEOUT - ); + const failedInvocationLogs = functionLogs.find( + (functionLog) => + functionLog + .toString() + .includes('There is already an execution in progres') !== undefined + ); - it( - 'recovers from an expired idempotency record and processes the next request', - async () => { - const payload = { - foo: 'baz', - }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload.foo)) - .digest('base64'); - - // Act - const logs = [ - ( - await invokeFunction({ - functionName: functionNameExpired, - times: 1, - invocationMode: 'SEQUENTIAL', - payload: { ...payload, invocation: 0 }, - }) - )[0], - ]; - // Wait for the idempotency record to expire - await new Promise((resolve) => setTimeout(resolve, 2000)); - logs.push( - ( - await invokeFunction({ - functionName: functionNameExpired, - times: 1, - invocationMode: 'SEQUENTIAL', - payload: { ...payload, invocation: 1 }, - }) - )[0] - ); - const functionLogs = logs.map((log) => log.getFunctionLogs()); - - // Assess - const idempotencyRecords = await dynamoDBClient.send( - new ScanCommand({ - TableName: tableNameExpired, - }) - ); - expect(idempotencyRecords.Items).toHaveLength(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameExpired}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual({ + expect(successfulInvocationLogs).toBeDefined(); + expect(failedInvocationLogs).toBeDefined(); + }); + + it('recovers from a timed out request and processes the next one', async () => { + const payload = { foo: 'bar' }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload.foo)) + .digest('base64'); + + const logs = await invokeFunction({ + functionName: functionNameTimeout, + times: 2, + invocationMode: 'SEQUENTIAL', + payload: Array.from({ length: 2 }, (_, index) => ({ ...payload, - invocation: 1, - }); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - - // Both invocations should be successful and the logs should contain 1 log each - expect(functionLogs[0]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'baz', - function_name: functionNameExpired, - }) - ); - // During the second invocation the handler should be called and complete, so the logs should - // contain 1 log - expect(functionLogs[1]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'baz', - function_name: functionNameExpired, - }) - ); - }, - TEST_CASE_TIMEOUT - ); + invocation: index, + })), + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); + const idempotencyRecord = await dynamoDBClient.send( + new ScanCommand({ + TableName: tableNameTimeout, + }) + ); + expect(idempotencyRecord.Items).toHaveLength(1); + expect(idempotencyRecord.Items?.[0].id).toEqual( + `${functionNameTimeout}#${payloadHash}` + ); + expect(idempotencyRecord.Items?.[0].data).toEqual({ + ...payload, + invocation: 1, + }); + expect(idempotencyRecord.Items?.[0].status).toEqual('COMPLETED'); - it( - 'uses the provided custom idempotency record attributes', - async () => { - const payload = { foo: 'bar' }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload)) - .digest('base64'); - const logs = await invokeFunction({ - functionName: functionCustomConfig, - times: 1, - invocationMode: 'SEQUENTIAL', - payload: payload, - }); - - const functionLogs = logs.map((log) => log.getFunctionLogs()); - - const idempotencyRecord = await dynamoDBClient.send( - new ScanCommand({ - TableName: tableNameCustomConfig, - }) - ); - expect(idempotencyRecord.Items?.[0]).toStrictEqual({ - customId: `${functionCustomConfig}#${payloadHash}`, - dataAttr: 'bar', - statusAttr: 'COMPLETED', - expiryAttr: expect.any(Number), - inProgressExpiryAttr: expect.any(Number), - }); - - expect(functionLogs[0]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'bar', - }) - ); - }, - TEST_CASE_TIMEOUT - ); + try { + // During the first invocation the handler should be called, so the logs should contain 1 log + expect(functionLogs[0]).toHaveLength(2); + expect(functionLogs[0][0]).toContain('Task timed out after'); + } catch { + // During the first invocation the function should timeout so the logs should not contain any log and the report log should contain a timeout message + expect(functionLogs[0]).toHaveLength(0); + expect(logs[0].getReportLog()).toMatch(/Status: timeout$/); + } - it( - 'takes the data index argument into account when making the function idempotent', - async () => { - const payload = [{ id: '1234' }, { id: '5678' }]; - const payloadHash = createHash('md5') - .update(JSON.stringify('bar')) - .digest('base64'); - - const logs = await invokeFunction({ - functionName: functionNameDataIndex, - times: 2, - invocationMode: 'SEQUENTIAL', - payload: payload, - }); - - const functionLogs = logs.map((log) => log.getFunctionLogs()); - - const idempotencyRecord = await dynamoDBClient.send( - new ScanCommand({ - TableName: tableNameDataIndex, + expect(functionLogs[1]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'bar', + function_name: functionNameTimeout, + }) + ); + }); + + it('recovers from an expired idempotency record and processes the next request', async () => { + const payload = { + foo: 'baz', + }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload.foo)) + .digest('base64'); + + // Act + const logs = [ + ( + await invokeFunction({ + functionName: functionNameExpired, + times: 1, + invocationMode: 'SEQUENTIAL', + payload: { ...payload, invocation: 0 }, }) - ); - expect(idempotencyRecord.Items).toHaveLength(1); - expect(idempotencyRecord.Items?.[0].id).toEqual( - `${functionNameDataIndex}#${payloadHash}` - ); - expect(idempotencyRecord.Items?.[0].data).toEqual( - 'idempotent result: bar' - ); - expect(idempotencyRecord.Items?.[0].status).toEqual('COMPLETED'); - // During the first invocation the handler should be called, so the logs should contain 1 log - expect(functionLogs[0]).toHaveLength(1); - // We test the content of the log as well as the presence of fields from the context, this - // ensures that the all the arguments are passed to the handler when made idempotent - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( - expect.objectContaining({ - message: 'Got test event', - id: '1234', - foo: 'bar', + )[0], + ]; + // Wait for the idempotency record to expire + await new Promise((resolve) => setTimeout(resolve, 2000)); + logs.push( + ( + await invokeFunction({ + functionName: functionNameExpired, + times: 1, + invocationMode: 'SEQUENTIAL', + payload: { ...payload, invocation: 1 }, }) - ); - }, - TEST_CASE_TIMEOUT - ); + )[0] + ); + const functionLogs = logs.map((log) => log.getFunctionLogs()); + + // Assess + const idempotencyRecords = await dynamoDBClient.send( + new ScanCommand({ + TableName: tableNameExpired, + }) + ); + expect(idempotencyRecords.Items).toHaveLength(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameExpired}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual({ + ...payload, + invocation: 1, + }); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + + // Both invocations should be successful and the logs should contain 1 log each + expect(functionLogs[0]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'baz', + function_name: functionNameExpired, + }) + ); + // During the second invocation the handler should be called and complete, so the logs should + // contain 1 log + expect(functionLogs[1]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'baz', + function_name: functionNameExpired, + }) + ); + }); + + it('uses the provided custom idempotency record attributes', async () => { + const payload = { foo: 'bar' }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload)) + .digest('base64'); + const logs = await invokeFunction({ + functionName: functionCustomConfig, + times: 1, + invocationMode: 'SEQUENTIAL', + payload: payload, + }); + + const functionLogs = logs.map((log) => log.getFunctionLogs()); + + const idempotencyRecord = await dynamoDBClient.send( + new ScanCommand({ + TableName: tableNameCustomConfig, + }) + ); + expect(idempotencyRecord.Items?.[0]).toStrictEqual({ + customId: `${functionCustomConfig}#${payloadHash}`, + dataAttr: 'bar', + statusAttr: 'COMPLETED', + expiryAttr: expect.any(Number), + inProgressExpiryAttr: expect.any(Number), + }); + + expect(functionLogs[0]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'bar', + }) + ); + }); + + it('takes the data index argument into account when making the function idempotent', async () => { + const payload = [{ id: '1234' }, { id: '5678' }]; + const payloadHash = createHash('md5') + .update(JSON.stringify('bar')) + .digest('base64'); + + const logs = await invokeFunction({ + functionName: functionNameDataIndex, + times: 2, + invocationMode: 'SEQUENTIAL', + payload: payload, + }); + + const functionLogs = logs.map((log) => log.getFunctionLogs()); + + const idempotencyRecord = await dynamoDBClient.send( + new ScanCommand({ + TableName: tableNameDataIndex, + }) + ); + expect(idempotencyRecord.Items).toHaveLength(1); + expect(idempotencyRecord.Items?.[0].id).toEqual( + `${functionNameDataIndex}#${payloadHash}` + ); + expect(idempotencyRecord.Items?.[0].data).toEqual('idempotent result: bar'); + expect(idempotencyRecord.Items?.[0].status).toEqual('COMPLETED'); + // During the first invocation the handler should be called, so the logs should contain 1 log + expect(functionLogs[0]).toHaveLength(1); + // We test the content of the log as well as the presence of fields from the context, this + // ensures that the all the arguments are passed to the handler when made idempotent + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( + expect.objectContaining({ + message: 'Got test event', + id: '1234', + foo: 'bar', + }) + ); + }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/idempotency/tests/e2e/makeHandlerIdempotent.test.ts b/packages/idempotency/tests/e2e/makeHandlerIdempotent.test.ts index f6a2cfbf63..9b92a56af8 100644 --- a/packages/idempotency/tests/e2e/makeHandlerIdempotent.test.ts +++ b/packages/idempotency/tests/e2e/makeHandlerIdempotent.test.ts @@ -10,12 +10,7 @@ import { ScanCommand } from '@aws-sdk/lib-dynamodb'; import { Duration } from 'aws-cdk-lib'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { IdempotencyTestNodejsFunctionAndDynamoTable } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; const ddb = new DynamoDBClient({}); @@ -110,255 +105,238 @@ describe('Idempotency E2E tests, middy middleware usage', () => { tableNameTimeout = testStack.findAndGetStackOutputValue('timeoutTable'); functionNameExpired = testStack.findAndGetStackOutputValue('expiredFn'); tableNameExpired = testStack.findAndGetStackOutputValue('expiredTable'); - }, SETUP_TIMEOUT); - - it( - 'returns the same result and runs the handler once when called multiple times', - async () => { - // Prepare - const payload = { - foo: 'bar', - }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload)) - .digest('base64'); + }); - // Act - const logs = await invokeFunction({ - functionName: functionNameDefault, - times: 2, - invocationMode: 'SEQUENTIAL', - payload, - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + it('returns the same result and runs the handler once when called multiple times', async () => { + // Prepare + const payload = { + foo: 'bar', + }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload)) + .digest('base64'); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameDefault, - }) - ); - expect(idempotencyRecords.Items?.length).toEqual(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameDefault}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + // Act + const logs = await invokeFunction({ + functionName: functionNameDefault, + times: 2, + invocationMode: 'SEQUENTIAL', + payload, + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - // During the first invocation the handler should be called, so the logs should contain 1 log - expect(functionLogs[0]).toHaveLength(1); - // We test the content of the log as well as the presence of fields from the context, this - // ensures that the all the arguments are passed to the handler when made idempotent - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( - expect.objectContaining({ - message: 'foo', - details: 'bar', - function_name: functionNameDefault, - }) - ); - // During the second invocation the handler should not be called, so the logs should be empty - expect(functionLogs[1]).toHaveLength(0); - }, - TEST_CASE_TIMEOUT - ); + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameDefault, + }) + ); + expect(idempotencyRecords.Items?.length).toEqual(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameDefault}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - it( - 'handles parallel invocations correctly', - async () => { - // Prepare - const payload = { - foo: 'bar', - }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload)) - .digest('base64'); + // During the first invocation the handler should be called, so the logs should contain 1 log + expect(functionLogs[0]).toHaveLength(1); + // We test the content of the log as well as the presence of fields from the context, this + // ensures that the all the arguments are passed to the handler when made idempotent + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( + expect.objectContaining({ + message: 'foo', + details: 'bar', + function_name: functionNameDefault, + }) + ); + // During the second invocation the handler should not be called, so the logs should be empty + expect(functionLogs[1]).toHaveLength(0); + }); - // Act - const logs = await invokeFunction({ - functionName: functionNameDefaultParallel, - times: 2, - invocationMode: 'PARALLEL', - payload, - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + it('handles parallel invocations correctly', async () => { + // Prepare + const payload = { + foo: 'bar', + }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload)) + .digest('base64'); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameDefaultParallel, - }) - ); - expect(idempotencyRecords.Items?.length).toEqual(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameDefaultParallel}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + // Act + const logs = await invokeFunction({ + functionName: functionNameDefaultParallel, + times: 2, + invocationMode: 'PARALLEL', + payload, + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - /** - * Since the requests are sent in parallel we don't know which one will be processed first, - * however we expect that only on of them will be processed by the handler, while the other - * one will be rejected with IdempotencyAlreadyInProgressError. - * - * We filter the logs to find which one was successful and which one failed, then we check - * that they contain the expected logs. - */ - const successfulInvocationLogs = functionLogs.find( - (functionLog) => - functionLog.find((log) => log.includes('Processed event')) !== - undefined - ); - const failedInvocationLogs = functionLogs.find( - (functionLog) => - functionLog.find((log) => - log.includes('There is already an execution in progress') - ) !== undefined - ); - expect(successfulInvocationLogs).toHaveLength(1); - expect(failedInvocationLogs).toHaveLength(1); - }, - TEST_CASE_TIMEOUT - ); + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameDefaultParallel, + }) + ); + expect(idempotencyRecords.Items?.length).toEqual(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameDefaultParallel}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - it( - 'recovers from a timed out request and processes the next one', - async () => { - // Prepare - const payload = { - foo: 'bar', - }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload.foo)) - .digest('base64'); + /** + * Since the requests are sent in parallel we don't know which one will be processed first, + * however we expect that only on of them will be processed by the handler, while the other + * one will be rejected with IdempotencyAlreadyInProgressError. + * + * We filter the logs to find which one was successful and which one failed, then we check + * that they contain the expected logs. + */ + const successfulInvocationLogs = functionLogs.find( + (functionLog) => + functionLog.find((log) => log.includes('Processed event')) !== undefined + ); + const failedInvocationLogs = functionLogs.find( + (functionLog) => + functionLog.find((log) => + log.includes('There is already an execution in progress') + ) !== undefined + ); + expect(successfulInvocationLogs).toHaveLength(1); + expect(failedInvocationLogs).toHaveLength(1); + }); - // Act - const logs = await invokeFunction({ - functionName: functionNameTimeout, - times: 2, - invocationMode: 'SEQUENTIAL', - payload: Array.from({ length: 2 }, (_, index) => ({ - ...payload, - invocation: index, - })), - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + it('recovers from a timed out request and processes the next one', async () => { + // Prepare + const payload = { + foo: 'bar', + }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload.foo)) + .digest('base64'); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameTimeout, - }) - ); - expect(idempotencyRecords.Items?.length).toEqual(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameTimeout}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual({ + // Act + const logs = await invokeFunction({ + functionName: functionNameTimeout, + times: 2, + invocationMode: 'SEQUENTIAL', + payload: Array.from({ length: 2 }, (_, index) => ({ ...payload, - invocation: 1, - }); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - - try { - // During the first invocation the handler should be called, so the logs should contain 1 log - expect(functionLogs[0]).toHaveLength(2); - expect(functionLogs[0][0]).toContain('Task timed out after'); - } catch { - // During the first invocation the function should timeout so the logs should not contain any log and the report log should contain a timeout message - expect(functionLogs[0]).toHaveLength(0); - expect(logs[0].getReportLog()).toMatch(/Status: timeout$/); - } + invocation: index, + })), + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - // During the second invocation the handler should be called and complete, so the logs should - // contain 1 log - expect(functionLogs[1]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'bar', - function_name: functionNameTimeout, - }) - ); - }, - TEST_CASE_TIMEOUT - ); + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameTimeout, + }) + ); + expect(idempotencyRecords.Items?.length).toEqual(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameTimeout}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual({ + ...payload, + invocation: 1, + }); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - it( - 'recovers from an expired idempotency record and processes the next request', - async () => { - // Prepare - const payload = { - foo: 'bar', - }; - const payloadHash = createHash('md5') - .update(JSON.stringify(payload.foo)) - .digest('base64'); + try { + // During the first invocation the handler should be called, so the logs should contain 1 log + expect(functionLogs[0]).toHaveLength(2); + expect(functionLogs[0][0]).toContain('Task timed out after'); + } catch { + // During the first invocation the function should timeout so the logs should not contain any log and the report log should contain a timeout message + expect(functionLogs[0]).toHaveLength(0); + expect(logs[0].getReportLog()).toMatch(/Status: timeout$/); + } - // Act - const logs = [ - ( - await invokeFunction({ - functionName: functionNameExpired, - times: 1, - invocationMode: 'SEQUENTIAL', - payload: { ...payload, invocation: 0 }, - }) - )[0], - ]; - // Wait for the idempotency record to expire - await new Promise((resolve) => setTimeout(resolve, 2000)); - logs.push( - ( - await invokeFunction({ - functionName: functionNameExpired, - times: 1, - invocationMode: 'SEQUENTIAL', - payload: { ...payload, invocation: 1 }, - }) - )[0] - ); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + // During the second invocation the handler should be called and complete, so the logs should + // contain 1 log + expect(functionLogs[1]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'bar', + function_name: functionNameTimeout, + }) + ); + }); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameExpired, - }) - ); - expect(idempotencyRecords.Items?.length).toEqual(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameExpired}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual({ - ...payload, - invocation: 1, - }); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + it('recovers from an expired idempotency record and processes the next request', async () => { + // Prepare + const payload = { + foo: 'bar', + }; + const payloadHash = createHash('md5') + .update(JSON.stringify(payload.foo)) + .digest('base64'); - // Both invocations should be successful and the logs should contain 1 log each - expect(functionLogs[0]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'bar', - function_name: functionNameExpired, + // Act + const logs = [ + ( + await invokeFunction({ + functionName: functionNameExpired, + times: 1, + invocationMode: 'SEQUENTIAL', + payload: { ...payload, invocation: 0 }, }) - ); - // During the second invocation the handler should be called and complete, so the logs should - // contain 1 log - expect(functionLogs[1]).toHaveLength(1); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( - expect.objectContaining({ - message: 'Processed event', - details: 'bar', - function_name: functionNameExpired, + )[0], + ]; + // Wait for the idempotency record to expire + await new Promise((resolve) => setTimeout(resolve, 2000)); + logs.push( + ( + await invokeFunction({ + functionName: functionNameExpired, + times: 1, + invocationMode: 'SEQUENTIAL', + payload: { ...payload, invocation: 1 }, }) - ); - }, - TEST_CASE_TIMEOUT - ); + )[0] + ); + const functionLogs = logs.map((log) => log.getFunctionLogs()); + + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameExpired, + }) + ); + expect(idempotencyRecords.Items?.length).toEqual(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameExpired}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual({ + ...payload, + invocation: 1, + }); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + + // Both invocations should be successful and the logs should contain 1 log each + expect(functionLogs[0]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'bar', + function_name: functionNameExpired, + }) + ); + // During the second invocation the handler should be called and complete, so the logs should + // contain 1 log + expect(functionLogs[1]).toHaveLength(1); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[1][0])).toEqual( + expect.objectContaining({ + message: 'Processed event', + details: 'bar', + function_name: functionNameExpired, + }) + ); + }); afterAll(async () => { await testStack.destroy(); - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/idempotency/tests/e2e/makeIdempotent.test.ts b/packages/idempotency/tests/e2e/makeIdempotent.test.ts index d510a0c893..c87bc9f6b7 100644 --- a/packages/idempotency/tests/e2e/makeIdempotent.test.ts +++ b/packages/idempotency/tests/e2e/makeIdempotent.test.ts @@ -10,12 +10,7 @@ import { ScanCommand } from '@aws-sdk/lib-dynamodb'; import { AttributeType } from 'aws-cdk-lib/aws-dynamodb'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { IdempotencyTestNodejsFunctionAndDynamoTable } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants'; +import { RESOURCE_NAME_PREFIX } from './constants'; describe('Idempotency E2E tests, wrapper function usage', () => { const testStack = new TestStack({ @@ -99,239 +94,224 @@ describe('Idempotency E2E tests, wrapper function usage', () => { testStack.findAndGetStackOutputValue('handlerFn'); tableNameLambdaHandler = testStack.findAndGetStackOutputValue('handlerTable'); - }, SETUP_TIMEOUT); + }); - it( - 'when called twice with the same payload, it returns the same result', - async () => { - // Prepare - const payload = { - records: [ - { foo: 'bar', id: 1 }, - { foo: 'baz', id: 2 }, - { foo: 'bar', id: 1 }, - ], - }; - const payloadHashes = payload.records.map((record) => - createHash('md5').update(JSON.stringify(record)).digest('base64') - ); + it('when called twice with the same payload, it returns the same result', async () => { + // Prepare + const payload = { + records: [ + { foo: 'bar', id: 1 }, + { foo: 'baz', id: 2 }, + { foo: 'bar', id: 1 }, + ], + }; + const payloadHashes = payload.records.map((record) => + createHash('md5').update(JSON.stringify(record)).digest('base64') + ); - // Act - const logs = await invokeFunction({ - functionName: functionNameDefault, - times: 2, - invocationMode: 'SEQUENTIAL', - payload, - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + // Act + const logs = await invokeFunction({ + functionName: functionNameDefault, + times: 2, + invocationMode: 'SEQUENTIAL', + payload, + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameDefault, - }) - ); - // Since records 1 and 3 have the same payload, only 2 records should be created - expect(idempotencyRecords?.Items?.length).toEqual(2); - const idempotencyRecordsItems = [ - idempotencyRecords.Items?.find( - (record) => record.id === `${functionNameDefault}#${payloadHashes[0]}` - ), - idempotencyRecords.Items?.find( - (record) => record.id === `${functionNameDefault}#${payloadHashes[1]}` - ), - ]; + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameDefault, + }) + ); + // Since records 1 and 3 have the same payload, only 2 records should be created + expect(idempotencyRecords?.Items?.length).toEqual(2); + const idempotencyRecordsItems = [ + idempotencyRecords.Items?.find( + (record) => record.id === `${functionNameDefault}#${payloadHashes[0]}` + ), + idempotencyRecords.Items?.find( + (record) => record.id === `${functionNameDefault}#${payloadHashes[1]}` + ), + ]; - expect(idempotencyRecordsItems?.[0]).toStrictEqual({ - id: `${functionNameDefault}#${payloadHashes[0]}`, - data: 'Processing done: bar', - status: 'COMPLETED', - expiration: expect.any(Number), - in_progress_expiration: expect.any(Number), - }); + expect(idempotencyRecordsItems?.[0]).toStrictEqual({ + id: `${functionNameDefault}#${payloadHashes[0]}`, + data: 'Processing done: bar', + status: 'COMPLETED', + expiration: expect.any(Number), + in_progress_expiration: expect.any(Number), + }); - expect(idempotencyRecordsItems?.[1]).toStrictEqual({ - id: `${functionNameDefault}#${payloadHashes[1]}`, - data: 'Processing done: baz', - status: 'COMPLETED', - expiration: expect.any(Number), - in_progress_expiration: expect.any(Number), - }); + expect(idempotencyRecordsItems?.[1]).toStrictEqual({ + id: `${functionNameDefault}#${payloadHashes[1]}`, + data: 'Processing done: baz', + status: 'COMPLETED', + expiration: expect.any(Number), + in_progress_expiration: expect.any(Number), + }); - expect(functionLogs[0]).toHaveLength(2); - }, - TEST_CASE_TIMEOUT - ); + expect(functionLogs[0]).toHaveLength(2); + }); - it( - 'creates a DynamoDB item with the correct attributes', - async () => { - // Prepare - const payload = { - records: [ - { foo: 'bar', id: 1 }, - { foo: 'baq', id: 2 }, - { foo: 'bar', id: 3 }, - ], - }; - const payloadHashes = payload.records.map((record) => - createHash('md5').update(JSON.stringify(record)).digest('base64') - ); - const validationHashes = payload.records.map((record) => - createHash('md5').update(JSON.stringify(record.foo)).digest('base64') - ); + it('creates a DynamoDB item with the correct attributes', async () => { + // Prepare + const payload = { + records: [ + { foo: 'bar', id: 1 }, + { foo: 'baq', id: 2 }, + { foo: 'bar', id: 3 }, + ], + }; + const payloadHashes = payload.records.map((record) => + createHash('md5').update(JSON.stringify(record)).digest('base64') + ); + const validationHashes = payload.records.map((record) => + createHash('md5').update(JSON.stringify(record.foo)).digest('base64') + ); - // Act - const logs = await invokeFunction({ - functionName: functionNameCustomConfig, - times: 2, - invocationMode: 'SEQUENTIAL', - payload, - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + // Act + const logs = await invokeFunction({ + functionName: functionNameCustomConfig, + times: 2, + invocationMode: 'SEQUENTIAL', + payload, + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameCustomConfig, - }) - ); - /** - * Each record should have a corresponding entry in the persistence store, - * if so then we retrieve the records based on their custom IDs - * The records are retrieved in the same order as the payload records. - */ - expect(idempotencyRecords.Items?.length).toEqual(3); - const idempotencyRecordsItems = [ - idempotencyRecords.Items?.find( - (record) => - record.customId === - `${functionNameCustomConfig}#${payloadHashes[0]}` - ), - idempotencyRecords.Items?.find( - (record) => - record.customId === - `${functionNameCustomConfig}#${payloadHashes[1]}` - ), - idempotencyRecords.Items?.find( - (record) => - record.customId === - `${functionNameCustomConfig}#${payloadHashes[2]}` - ), - ]; + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameCustomConfig, + }) + ); + /** + * Each record should have a corresponding entry in the persistence store, + * if so then we retrieve the records based on their custom IDs + * The records are retrieved in the same order as the payload records. + */ + expect(idempotencyRecords.Items?.length).toEqual(3); + const idempotencyRecordsItems = [ + idempotencyRecords.Items?.find( + (record) => + record.customId === `${functionNameCustomConfig}#${payloadHashes[0]}` + ), + idempotencyRecords.Items?.find( + (record) => + record.customId === `${functionNameCustomConfig}#${payloadHashes[1]}` + ), + idempotencyRecords.Items?.find( + (record) => + record.customId === `${functionNameCustomConfig}#${payloadHashes[2]}` + ), + ]; - expect(idempotencyRecordsItems?.[0]).toStrictEqual({ - customId: `${functionNameCustomConfig}#${payloadHashes[0]}`, - dataAttr: payload.records[0], - statusAttr: 'COMPLETED', - expiryAttr: expect.any(Number), - inProgressExpiryAttr: expect.any(Number), - validationKeyAttr: validationHashes[0], - }); + expect(idempotencyRecordsItems?.[0]).toStrictEqual({ + customId: `${functionNameCustomConfig}#${payloadHashes[0]}`, + dataAttr: payload.records[0], + statusAttr: 'COMPLETED', + expiryAttr: expect.any(Number), + inProgressExpiryAttr: expect.any(Number), + validationKeyAttr: validationHashes[0], + }); - expect(idempotencyRecordsItems?.[1]).toStrictEqual({ - customId: `${functionNameCustomConfig}#${payloadHashes[1]}`, - dataAttr: payload.records[1], - statusAttr: 'COMPLETED', - expiryAttr: expect.any(Number), - inProgressExpiryAttr: expect.any(Number), - validationKeyAttr: validationHashes[1], - }); + expect(idempotencyRecordsItems?.[1]).toStrictEqual({ + customId: `${functionNameCustomConfig}#${payloadHashes[1]}`, + dataAttr: payload.records[1], + statusAttr: 'COMPLETED', + expiryAttr: expect.any(Number), + inProgressExpiryAttr: expect.any(Number), + validationKeyAttr: validationHashes[1], + }); - expect(idempotencyRecordsItems?.[2]).toStrictEqual({ - customId: `${functionNameCustomConfig}#${payloadHashes[2]}`, - dataAttr: payload.records[2], - statusAttr: 'COMPLETED', - expiryAttr: expect.any(Number), - inProgressExpiryAttr: expect.any(Number), - validationKeyAttr: validationHashes[2], - }); + expect(idempotencyRecordsItems?.[2]).toStrictEqual({ + customId: `${functionNameCustomConfig}#${payloadHashes[2]}`, + dataAttr: payload.records[2], + statusAttr: 'COMPLETED', + expiryAttr: expect.any(Number), + inProgressExpiryAttr: expect.any(Number), + validationKeyAttr: validationHashes[2], + }); - // During the first invocation, the processing function should have been called 3 times (once for each record) - expect(functionLogs[0]).toHaveLength(3); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( - expect.objectContaining({ - baz: 0, // index of recursion in handler, assess that all function arguments are preserved - record: payload.records[0], - message: 'Got test event', - }) - ); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][1])).toEqual( - expect.objectContaining({ - baz: 1, - record: payload.records[1], - message: 'Got test event', - }) - ); - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][2])).toEqual( - expect.objectContaining({ - baz: 2, - record: payload.records[2], - message: 'Got test event', - }) - ); + // During the first invocation, the processing function should have been called 3 times (once for each record) + expect(functionLogs[0]).toHaveLength(3); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( + expect.objectContaining({ + baz: 0, // index of recursion in handler, assess that all function arguments are preserved + record: payload.records[0], + message: 'Got test event', + }) + ); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][1])).toEqual( + expect.objectContaining({ + baz: 1, + record: payload.records[1], + message: 'Got test event', + }) + ); + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][2])).toEqual( + expect.objectContaining({ + baz: 2, + record: payload.records[2], + message: 'Got test event', + }) + ); - // During the second invocation, the processing function should have been called 0 times (all records are idempotent) - expect(functionLogs[1]).toHaveLength(0); - }, - TEST_CASE_TIMEOUT - ); + // During the second invocation, the processing function should have been called 0 times (all records are idempotent) + expect(functionLogs[1]).toHaveLength(0); + }); - it( - 'calls the wrapped function once and always returns the same result when called multiple times', - async () => { - // Prepare - const payload = { - body: JSON.stringify({ - foo: 'bar', - }), - }; - const payloadHash = createHash('md5') - .update(JSON.stringify('bar')) - .digest('base64'); + it('calls the wrapped function once and always returns the same result when called multiple times', async () => { + // Prepare + const payload = { + body: JSON.stringify({ + foo: 'bar', + }), + }; + const payloadHash = createHash('md5') + .update(JSON.stringify('bar')) + .digest('base64'); - // Act - const logs = await invokeFunction({ - functionName: functionNameLambdaHandler, - times: 2, - invocationMode: 'SEQUENTIAL', - payload, - }); - const functionLogs = logs.map((log) => log.getFunctionLogs()); + // Act + const logs = await invokeFunction({ + functionName: functionNameLambdaHandler, + times: 2, + invocationMode: 'SEQUENTIAL', + payload, + }); + const functionLogs = logs.map((log) => log.getFunctionLogs()); - // Assess - const idempotencyRecords = await ddb.send( - new ScanCommand({ - TableName: tableNameLambdaHandler, - }) - ); - expect(idempotencyRecords.Items?.length).toEqual(1); - expect(idempotencyRecords.Items?.[0].id).toEqual( - `${functionNameLambdaHandler}#${payloadHash}` - ); - expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); - expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); + // Assess + const idempotencyRecords = await ddb.send( + new ScanCommand({ + TableName: tableNameLambdaHandler, + }) + ); + expect(idempotencyRecords.Items?.length).toEqual(1); + expect(idempotencyRecords.Items?.[0].id).toEqual( + `${functionNameLambdaHandler}#${payloadHash}` + ); + expect(idempotencyRecords.Items?.[0].data).toEqual('bar'); + expect(idempotencyRecords.Items?.[0].status).toEqual('COMPLETED'); - // During the first invocation the handler should be called, so the logs should contain 1 log - expect(functionLogs[0]).toHaveLength(1); - // We test the content of the log as well as the presence of fields from the context, this - // ensures that the all the arguments are passed to the handler when made idempotent - expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( - expect.objectContaining({ - message: 'foo', - details: 'bar', - function_name: functionNameLambdaHandler, - }) - ); - // During the second invocation the handler should not be called, so the logs should be empty - expect(functionLogs[1]).toHaveLength(0); - }, - TEST_CASE_TIMEOUT - ); + // During the first invocation the handler should be called, so the logs should contain 1 log + expect(functionLogs[0]).toHaveLength(1); + // We test the content of the log as well as the presence of fields from the context, this + // ensures that the all the arguments are passed to the handler when made idempotent + expect(TestInvocationLogs.parseFunctionLog(functionLogs[0][0])).toEqual( + expect.objectContaining({ + message: 'foo', + details: 'bar', + function_name: functionNameLambdaHandler, + }) + ); + // During the second invocation the handler should not be called, so the logs should be empty + expect(functionLogs[1]).toHaveLength(0); + }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/idempotency/vitest.config.ts b/packages/idempotency/vitest.config.ts index 9f1196ef1f..baa5cf7463 100644 --- a/packages/idempotency/vitest.config.ts +++ b/packages/idempotency/vitest.config.ts @@ -4,5 +4,7 @@ export default defineProject({ test: { environment: 'node', setupFiles: ['../testing/src/setupEnv.ts'], + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, }); diff --git a/packages/logger/package.json b/packages/logger/package.json index acf8b56457..093551c0be 100644 --- a/packages/logger/package.json +++ b/packages/logger/package.json @@ -62,6 +62,10 @@ "types": [ "lib/cjs/types/index.d.ts", "lib/esm/types/index.d.ts" + ], + "correlationId": [ + "lib/cjs/correlationId.d.ts", + "lib/esm/correlationId.d.ts" ] } }, diff --git a/packages/logger/tests/e2e/advancedUses.test.FunctionCode.ts b/packages/logger/tests/e2e/advancedUses.test.FunctionCode.ts new file mode 100644 index 0000000000..1e601c6097 --- /dev/null +++ b/packages/logger/tests/e2e/advancedUses.test.FunctionCode.ts @@ -0,0 +1,63 @@ +import { Logger } from '@aws-lambda-powertools/logger'; +import { + correlationPaths, + search, +} from '@aws-lambda-powertools/logger/correlationId'; +import { injectLambdaContext } from '@aws-lambda-powertools/logger/middleware'; +import type { Context } from 'aws-lambda'; +import middy from 'middy5'; + +const logger = new Logger({ + logLevel: 'DEBUG', + logBufferOptions: { + enabled: true, + flushOnErrorLog: true, + }, + correlationIdSearchFn: search, +}); + +logger.debug('a never buffered debug log'); + +export const handlerManual = async (event: unknown) => { + logger.addContext({} as Context); // we want only the cold start value + logger.setCorrelationId(event, correlationPaths.EVENT_BRIDGE); + + logger.debug('a buffered debug log'); + logger.info('an info log'); + try { + throw new Error('ops'); + } catch (error) { + logger.error('Uncaught error detected, flushing log buffer before exit', { + error, + }); + } finally { + logger.clearBuffer(); + } +}; + +export const handlerMiddy = middy() + .use( + injectLambdaContext(logger, { + correlationIdPath: correlationPaths.EVENT_BRIDGE, + flushBufferOnUncaughtError: true, + }) + ) + .handler(async () => { + logger.debug('a buffered debug log'); + logger.info('an info log'); + throw new Error('ops'); + }); + +class Lambda { + @logger.injectLambdaContext({ + correlationIdPath: correlationPaths.EVENT_BRIDGE, + flushBufferOnUncaughtError: true, + }) + public async handler(_event: unknown, _context: Context) { + logger.debug('a buffered debug log'); + logger.info('an info log'); + throw new Error('ops'); + } +} +const lambda = new Lambda(); +export const handlerDecorator = lambda.handler.bind(lambda); diff --git a/packages/logger/tests/e2e/advancedUses.test.ts b/packages/logger/tests/e2e/advancedUses.test.ts new file mode 100644 index 0000000000..c41aaec409 --- /dev/null +++ b/packages/logger/tests/e2e/advancedUses.test.ts @@ -0,0 +1,163 @@ +import { join } from 'node:path'; +import { + TestInvocationLogs, + TestStack, + invokeFunction, +} from '@aws-lambda-powertools/testing-utils'; +import { afterAll, beforeAll, describe, expect, it } from 'vitest'; +import { LoggerTestNodejsFunction } from '../helpers/resources.js'; +import { RESOURCE_NAME_PREFIX, STACK_OUTPUT_LOG_GROUP } from './constants.js'; + +/** + * In this e2e test for Logger, we test a number of advanced use cases: + * - Log buffering enabled with flush on error (both manually on logger.error and automatically on uncaught error) + * - Correlation ID injection (both manually and automatically) + * + * The test is split into three cases: + * - Manual instrumentation + * - Middy middleware + * - Decorator + */ +describe('Logger E2E - Advanced uses', () => { + const testStack = new TestStack({ + stackNameProps: { + stackNamePrefix: RESOURCE_NAME_PREFIX, + testName: 'Advanced', + }, + }); + + // Location of the lambda function code + const lambdaFunctionCodeFilePath = join( + __dirname, + 'advancedUses.test.FunctionCode.ts' + ); + + const invocationCount = 2; + const invocationLogs = new Map(); + const manualCase = 'Manual'; + const middyCase = 'Middy'; + const decoratorCase = 'Decorator'; + + beforeAll(async () => { + invocationLogs.set(manualCase, []); + invocationLogs.set(middyCase, []); + invocationLogs.set(decoratorCase, []); + for (const caseKey of invocationLogs.keys()) { + new LoggerTestNodejsFunction( + testStack, + { + entry: lambdaFunctionCodeFilePath, + handler: `handler${caseKey}`, + }, + { + logGroupOutputKey: STACK_OUTPUT_LOG_GROUP, + nameSuffix: caseKey, + createAlias: true, + } + ); + } + + await testStack.deploy(); + + for (const caseKey of invocationLogs.keys()) { + const functionArn = testStack.findAndGetStackOutputValue(caseKey); + const logs = await invokeFunction({ + functionName: functionArn, + times: invocationCount, + invocationMode: 'SEQUENTIAL', + payload: [ + { + id: 1, + }, + { + id: 2, + }, + ], + }); + invocationLogs.set(caseKey, logs); + } + }); + + it.each([ + { + caseKey: manualCase, + }, + { + caseKey: middyCase, + }, + { + caseKey: decoratorCase, + }, + ])('$caseKey instrumentation', ({ caseKey }) => { + for (let i = 0; i < invocationCount; i++) { + const isFirstInvocation = i === 0; + // Get log messages of the i-th invocation + const fnLogs = invocationLogs.get(caseKey)?.at(i)?.getFunctionLogs(); + if (!fnLogs || fnLogs.length === 0) { + throw new Error(`Failed to get logs for ${caseKey} invocation ${i}`); + } + // When using decorator & middleware, we are actually throwing an error + // which is logged by the runtime, so we need to filter out the logs that are + // not JSON formatted + const logs = fnLogs.filter((log) => { + try { + JSON.parse(log); + return true; + } catch (error) { + return false; + } + }); + + if (isFirstInvocation) { + // Logs outside of the function handler are only present on the first invocation + expect(TestInvocationLogs.parseFunctionLog(logs[0])).toEqual( + expect.objectContaining({ + level: 'DEBUG', + message: 'a never buffered debug log', + }) + ); + } + // Since we have an extra log (above) on the first invocation, we need to + // adjust the index of the logs we are checking + const logIndexOffset = isFirstInvocation ? 1 : 0; + const correlationId = i + 1; + expect( + TestInvocationLogs.parseFunctionLog(logs[0 + logIndexOffset]) + ).toEqual( + expect.objectContaining({ + level: 'INFO', + message: 'an info log', + correlation_id: correlationId, + }) + ); + expect( + TestInvocationLogs.parseFunctionLog(logs[1 + logIndexOffset]) + ).toEqual( + expect.objectContaining({ + level: 'DEBUG', + message: 'a buffered debug log', + correlation_id: correlationId, + }) + ); + expect( + TestInvocationLogs.parseFunctionLog(logs.at(-1) as string) + ).toEqual( + expect.objectContaining({ + level: 'ERROR', + message: 'Uncaught error detected, flushing log buffer before exit', + correlation_id: correlationId, + error: expect.objectContaining({ + name: 'Error', + message: 'ops', + }), + }) + ); + } + }); + + afterAll(async () => { + if (!process.env.DISABLE_TEARDOWN) { + await testStack.destroy(); + } + }); +}); diff --git a/packages/logger/tests/e2e/basicFeatures.middy.test.ts b/packages/logger/tests/e2e/basicFeatures.middy.test.ts index 9f373c718d..04a6eb5200 100644 --- a/packages/logger/tests/e2e/basicFeatures.middy.test.ts +++ b/packages/logger/tests/e2e/basicFeatures.middy.test.ts @@ -9,10 +9,7 @@ import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { LoggerTestNodejsFunction } from '../helpers/resources.js'; import { RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, STACK_OUTPUT_LOG_GROUP, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, XRAY_TRACE_ID_REGEX, commonEnvironmentVars, } from './constants.js'; @@ -65,297 +62,227 @@ describe('Logger E2E tests, basic functionalities middy usage', () => { }); console.log('logGroupName', logGroupName); - }, SETUP_TIMEOUT); + }); describe('Log level filtering', () => { - it( - 'should filter log based on POWERTOOLS_LOG_LEVEL (INFO) environment variable in Lambda', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation and filter by level - const debugLogs = invocationLogs[i].getFunctionLogs('DEBUG'); - // Check that no log message below INFO level is logged - expect(debugLogs.length).toBe(0); - } - }, - TEST_CASE_TIMEOUT - ); + it('should filter log based on POWERTOOLS_LOG_LEVEL (INFO) environment variable in Lambda', async () => { + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation and filter by level + const debugLogs = invocationLogs[i].getFunctionLogs('DEBUG'); + // Check that no log message below INFO level is logged + expect(debugLogs.length).toBe(0); + } + }); }); describe('Context data', () => { - it( - 'should inject context info in each log', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - // Check that the context is logged on every log - for (const message of logMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - expect(log).toHaveProperty('function_arn'); - expect(log).toHaveProperty('function_memory_size'); - expect(log).toHaveProperty('function_name'); - expect(log).toHaveProperty('function_request_id'); - expect(log).toHaveProperty('timestamp'); - } + it('should inject context info in each log', async () => { + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + // Check that the context is logged on every log + for (const message of logMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + expect(log).toHaveProperty('function_arn'); + expect(log).toHaveProperty('function_memory_size'); + expect(log).toHaveProperty('function_name'); + expect(log).toHaveProperty('function_request_id'); + expect(log).toHaveProperty('timestamp'); } - }, - TEST_CASE_TIMEOUT - ); + } + }); - it( - 'should include coldStart equal to TRUE only on the first invocation, FALSE otherwise', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - // Check that cold start is logged correctly on every log - for (const message of logMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - if (i === 0) { - expect(log.cold_start).toBe(true); - } else { - expect(log.cold_start).toBe(false); - } + it('should include coldStart equal to TRUE only on the first invocation, FALSE otherwise', async () => { + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + // Check that cold start is logged correctly on every log + for (const message of logMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + if (i === 0) { + expect(log.cold_start).toBe(true); + } else { + expect(log.cold_start).toBe(false); } } - }, - TEST_CASE_TIMEOUT - ); + } + }); }); - describe('Log event', () => { - it( - 'should log the event as the first log of each invocation only', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - - for (const [index, message] of logMessages.entries()) { - const log = TestInvocationLogs.parseFunctionLog(message); - // Check that the event is logged on the first log - if (index === 0) { - expect(log).toHaveProperty('event'); - expect(log.event).toStrictEqual( - expect.objectContaining({ foo: 'bar' }) - ); - // Check that the event is not logged again on the rest of the logs - } else { - expect(log).not.toHaveProperty('event'); - } - } - } - }, - TEST_CASE_TIMEOUT - ); + it('logs the event for every invocation, only once, and without keys from previous invocations', async () => { + const { RUNTIME_ADDED_KEY: runtimeAddedKey } = commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + + const eventLog = logMessages.filter((log) => + log.includes('Lambda invocation event') + ); + + // Check that the event log is logged only once + expect(eventLog).toHaveLength(1); + const log = TestInvocationLogs.parseFunctionLog(eventLog[0]); + // Check that the event log is logged correctly + expect(log).toHaveProperty('event'); + expect(log.event).toStrictEqual(expect.objectContaining({ foo: 'bar' })); + // Check that the event log does not contain keys from previous invocations + expect(log).not.toHaveProperty(runtimeAddedKey); + } }); describe('Persistent additional log keys and values', () => { - it( - 'should contain persistent value in every log', - async () => { - const { - PERSISTENT_KEY: persistentKey, - PERSISTENT_VALUE: persistentValue, - } = commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - - for (const message of logMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - // Check that the persistent key is present in every log - expect(log).toHaveProperty(persistentKey); - expect(log[persistentKey]).toBe(persistentValue); - } + it('should contain persistent value in every log', async () => { + const { + PERSISTENT_KEY: persistentKey, + PERSISTENT_VALUE: persistentValue, + } = commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + + for (const message of logMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + // Check that the persistent key is present in every log + expect(log).toHaveProperty(persistentKey); + expect(log[persistentKey]).toBe(persistentValue); } - }, - TEST_CASE_TIMEOUT - ); - - it( - 'should not contain persistent keys that were removed on runtime', - async () => { - const { REMOVABLE_KEY: removableKey, REMOVABLE_VALUE: removableValue } = - commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - - for (const [index, message] of logMessages.entries()) { - const log = TestInvocationLogs.parseFunctionLog(message); - // Check that at the time of logging the event, which happens before the handler, - // the key was still present - if (index === 0) { - expect(log).toHaveProperty(removableKey); - expect(log[removableKey]).toBe(removableValue); - // Check that all other logs that happen at runtime do not contain the key - } else { - expect(log).not.toHaveProperty(removableValue); - } - } - } - }, - TEST_CASE_TIMEOUT - ); - - it( - 'should not leak any persistent keys added runtime since clearState is enabled', - async () => { - const { RUNTIME_ADDED_KEY: runtimeAddedKey } = commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); + } + }); - for (const [index, message] of logMessages.entries()) { - const log = TestInvocationLogs.parseFunctionLog(message); - // Check that at the time of logging the event, which happens before the handler, - // the key is NOT present - if (index === 0) { - expect(log).not.toHaveProperty(runtimeAddedKey); - } else { - // Check that all other logs that happen at runtime do contain the key - expect(log).toHaveProperty(runtimeAddedKey); - // Check that the value is the same for all logs - expect(log[runtimeAddedKey]).toEqual('bar'); - } + it('should not contain persistent keys that were removed on runtime', async () => { + const { REMOVABLE_KEY: removableKey, REMOVABLE_VALUE: removableValue } = + commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + + for (const [index, message] of logMessages.entries()) { + const log = TestInvocationLogs.parseFunctionLog(message); + // Check that at the time of logging the event, which happens before the handler, + // the key was still present + if (index === 0) { + expect(log).toHaveProperty(removableKey); + expect(log[removableKey]).toBe(removableValue); + // Check that all other logs that happen at runtime do not contain the key + } else { + expect(log).not.toHaveProperty(removableValue); } } - }, - TEST_CASE_TIMEOUT - ); + } + }); }); describe('One-time additional log keys and values', () => { - it( - 'should log additional keys and value only once', - async () => { - const { - SINGLE_LOG_ITEM_KEY: singleLogItemKey, - SINGLE_LOG_ITEM_VALUE: singleLogItemValue, - } = commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - // Check that the additional log is logged only once - const logMessagesWithAdditionalLog = logMessages.filter((log) => - log.includes(singleLogItemKey) - ); - expect(logMessagesWithAdditionalLog).toHaveLength(1); - // Check that the additional log is logged correctly - const parsedLog = TestInvocationLogs.parseFunctionLog( - logMessagesWithAdditionalLog[0] - ); - expect(parsedLog[singleLogItemKey]).toBe(singleLogItemValue); - } - }, - TEST_CASE_TIMEOUT - ); + it('should log additional keys and value only once', async () => { + const { + SINGLE_LOG_ITEM_KEY: singleLogItemKey, + SINGLE_LOG_ITEM_VALUE: singleLogItemValue, + } = commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + // Check that the additional log is logged only once + const logMessagesWithAdditionalLog = logMessages.filter((log) => + log.includes(singleLogItemKey) + ); + expect(logMessagesWithAdditionalLog).toHaveLength(1); + // Check that the additional log is logged correctly + const parsedLog = TestInvocationLogs.parseFunctionLog( + logMessagesWithAdditionalLog[0] + ); + expect(parsedLog[singleLogItemKey]).toBe(singleLogItemValue); + } + }); }); describe('Error logging', () => { - it( - 'should log error only once', - async () => { - const { ERROR_MSG: errorMsg } = commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation filtered by error level - const logMessages = invocationLogs[i].getFunctionLogs('ERROR'); - - // Check that the error is logged only once - expect(logMessages).toHaveLength(1); - - // Check that the error is logged correctly - const errorLog = TestInvocationLogs.parseFunctionLog(logMessages[0]); - expect(errorLog).toHaveProperty('error'); - expect(errorLog.error).toStrictEqual( - expect.objectContaining({ - location: expect.any(String), - name: 'Error', - message: errorMsg, - stack: expect.anything(), - }) - ); - } - }, - TEST_CASE_TIMEOUT - ); + it('should log error only once', async () => { + const { ERROR_MSG: errorMsg } = commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation filtered by error level + const logMessages = invocationLogs[i].getFunctionLogs('ERROR'); + + // Check that the error is logged only once + expect(logMessages).toHaveLength(1); + + // Check that the error is logged correctly + const errorLog = TestInvocationLogs.parseFunctionLog(logMessages[0]); + expect(errorLog).toHaveProperty('error'); + expect(errorLog.error).toStrictEqual( + expect.objectContaining({ + location: expect.any(String), + name: 'Error', + message: errorMsg, + stack: expect.anything(), + }) + ); + } + }); }); describe('Arbitrary object logging', () => { - it( - 'should log additional arbitrary object only once', - async () => { - const { - ARBITRARY_OBJECT_KEY: objectKey, - ARBITRARY_OBJECT_DATA: objectData, - } = commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - // Get the log messages that contains the arbitrary object - const filteredLogs = logMessages.filter((log) => - log.includes(objectData) - ); - // Check that the arbitrary object is logged only once - expect(filteredLogs).toHaveLength(1); - const logObject = TestInvocationLogs.parseFunctionLog( - filteredLogs[0] - ); - // Check that the arbitrary object is logged correctly - expect(logObject).toHaveProperty(objectKey); - const arbitrary = logObject[objectKey] as APIGatewayAuthorizerResult; - expect(arbitrary.principalId).toBe(objectData); - expect(arbitrary.policyDocument).toEqual( - expect.objectContaining({ - Version: 'Version 1', - Statement: [ - { - Effect: 'Allow', - Action: 'geo:*', - Resource: '*', - }, - ], - }) - ); - } - }, - TEST_CASE_TIMEOUT - ); + it('should log additional arbitrary object only once', async () => { + const { + ARBITRARY_OBJECT_KEY: objectKey, + ARBITRARY_OBJECT_DATA: objectData, + } = commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + // Get the log messages that contains the arbitrary object + const filteredLogs = logMessages.filter((log) => + log.includes(objectData) + ); + // Check that the arbitrary object is logged only once + expect(filteredLogs).toHaveLength(1); + const logObject = TestInvocationLogs.parseFunctionLog(filteredLogs[0]); + // Check that the arbitrary object is logged correctly + expect(logObject).toHaveProperty(objectKey); + const arbitrary = logObject[objectKey] as APIGatewayAuthorizerResult; + expect(arbitrary.principalId).toBe(objectData); + expect(arbitrary.policyDocument).toEqual( + expect.objectContaining({ + Version: 'Version 1', + Statement: [ + { + Effect: 'Allow', + Action: 'geo:*', + Resource: '*', + }, + ], + }) + ); + } + }); }); describe('X-Ray Trace ID injection', () => { - it( - 'should inject & parse the X-Ray Trace ID of the current invocation into every log', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - - // Check that the X-Ray Trace ID is logged on every log - const traceIds: string[] = []; - for (const message of logMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - expect(log).toHaveProperty('xray_trace_id'); - expect(log.xray_trace_id).toMatch(XRAY_TRACE_ID_REGEX); - traceIds.push(log.xray_trace_id as string); - } + it('should inject & parse the X-Ray Trace ID of the current invocation into every log', async () => { + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + + // Check that the X-Ray Trace ID is logged on every log + const traceIds: string[] = []; + for (const message of logMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + expect(log).toHaveProperty('xray_trace_id'); + expect(log.xray_trace_id).toMatch(XRAY_TRACE_ID_REGEX); + traceIds.push(log.xray_trace_id as string); } - }, - TEST_CASE_TIMEOUT - ); + } + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/logger/tests/e2e/childLogger.manual.test.ts b/packages/logger/tests/e2e/childLogger.manual.test.ts index a52584ab25..57dd63c926 100644 --- a/packages/logger/tests/e2e/childLogger.manual.test.ts +++ b/packages/logger/tests/e2e/childLogger.manual.test.ts @@ -8,10 +8,7 @@ import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { LoggerTestNodejsFunction } from '../helpers/resources.js'; import { RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, STACK_OUTPUT_LOG_GROUP, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, commonEnvironmentVars, } from './constants.js'; @@ -57,105 +54,89 @@ describe('Logger E2E tests, child logger', () => { }); console.log('logGroupName', logGroupName); - }, SETUP_TIMEOUT); + }); describe('Child logger', () => { - it( - 'should not log at same level of parent because of its own logLevel', - async () => { - const { PARENT_LOG_MSG: parentLogMsg, CHILD_LOG_MSG: childLogMsg } = - commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation and filter by level - const infoLogs = invocationLogs[i].getFunctionLogs('INFO'); - - const parentInfoLogs = infoLogs.filter((message) => - message.includes(parentLogMsg) - ); - const childInfoLogs = infoLogs.filter((message) => - message.includes(childLogMsg) - ); - - expect(parentInfoLogs).toHaveLength(infoLogs.length); - expect(childInfoLogs).toHaveLength(0); - } - }, - TEST_CASE_TIMEOUT - ); + it('should not log at same level of parent because of its own logLevel', async () => { + const { PARENT_LOG_MSG: parentLogMsg, CHILD_LOG_MSG: childLogMsg } = + commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation and filter by level + const infoLogs = invocationLogs[i].getFunctionLogs('INFO'); + + const parentInfoLogs = infoLogs.filter((message) => + message.includes(parentLogMsg) + ); + const childInfoLogs = infoLogs.filter((message) => + message.includes(childLogMsg) + ); + + expect(parentInfoLogs).toHaveLength(infoLogs.length); + expect(childInfoLogs).toHaveLength(0); + } + }); - it( - 'should log only level passed to a child', - async () => { - const { CHILD_LOG_MSG: childLogMsg } = commonEnvironmentVars; - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - - // Filter child logs by level - const errorChildLogs = logMessages.filter( - (message) => - message.includes('ERROR') && message.includes(childLogMsg) - ); - - // Check that the child logger only logged once (the other) - // log was filtered out by the child logger because of its logLevel - expect(errorChildLogs).toHaveLength(1); - } - }, - TEST_CASE_TIMEOUT - ); + it('should log only level passed to a child', async () => { + const { CHILD_LOG_MSG: childLogMsg } = commonEnvironmentVars; + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + + // Filter child logs by level + const errorChildLogs = logMessages.filter( + (message) => + message.includes('ERROR') && message.includes(childLogMsg) + ); + + // Check that the child logger only logged once (the other) + // log was filtered out by the child logger because of its logLevel + expect(errorChildLogs).toHaveLength(1); + } + }); - it( - 'should NOT inject context into the child logger', - async () => { - const { CHILD_LOG_MSG: childLogMsg } = commonEnvironmentVars; - - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); - - // Filter child logs by level - const childLogMessages = logMessages.filter((message) => - message.includes(childLogMsg) - ); - - // Check that the context is not present in any of the child logs - for (const message of childLogMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - expect(log).not.toHaveProperty('function_arn'); - expect(log).not.toHaveProperty('function_memory_size'); - expect(log).not.toHaveProperty('function_name'); - expect(log).not.toHaveProperty('function_request_id'); - } + it('should NOT inject context into the child logger', async () => { + const { CHILD_LOG_MSG: childLogMsg } = commonEnvironmentVars; + + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); + + // Filter child logs by level + const childLogMessages = logMessages.filter((message) => + message.includes(childLogMsg) + ); + + // Check that the context is not present in any of the child logs + for (const message of childLogMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + expect(log).not.toHaveProperty('function_arn'); + expect(log).not.toHaveProperty('function_memory_size'); + expect(log).not.toHaveProperty('function_name'); + expect(log).not.toHaveProperty('function_request_id'); } - }, - TEST_CASE_TIMEOUT - ); + } + }); - it( - 'both logger instances should have the same persistent key/value', - async () => { - const { PERSISTENT_KEY: persistentKey } = commonEnvironmentVars; + it('both logger instances should have the same persistent key/value', async () => { + const { PERSISTENT_KEY: persistentKey } = commonEnvironmentVars; - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); - // Check that all logs have the persistent key/value - for (const message of logMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - expect(log).toHaveProperty(persistentKey); - } + // Check that all logs have the persistent key/value + for (const message of logMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + expect(log).toHaveProperty(persistentKey); } - }, - TEST_CASE_TIMEOUT - ); + } + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/logger/tests/e2e/constants.ts b/packages/logger/tests/e2e/constants.ts index a719a35b83..6770b144ad 100644 --- a/packages/logger/tests/e2e/constants.ts +++ b/packages/logger/tests/e2e/constants.ts @@ -1,10 +1,6 @@ import { randomUUID } from 'node:crypto'; const RESOURCE_NAME_PREFIX = 'Logger'; -const ONE_MINUTE = 60 * 1000; -const TEST_CASE_TIMEOUT = ONE_MINUTE; -const SETUP_TIMEOUT = 7 * ONE_MINUTE; -const TEARDOWN_TIMEOUT = 5 * ONE_MINUTE; const STACK_OUTPUT_LOG_GROUP = 'LogGroupName'; const XRAY_TRACE_ID_REGEX = /^1-[0-9a-f]{8}-[0-9a-f]{24}$/; @@ -28,10 +24,6 @@ const commonEnvironmentVars = { export { RESOURCE_NAME_PREFIX, - ONE_MINUTE, - TEST_CASE_TIMEOUT, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, STACK_OUTPUT_LOG_GROUP, XRAY_TRACE_ID_REGEX, commonEnvironmentVars, diff --git a/packages/logger/tests/e2e/logEventEnvVarSetting.middy.test.ts b/packages/logger/tests/e2e/logEventEnvVarSetting.middy.test.ts index 21c2dad621..3a3efe2609 100644 --- a/packages/logger/tests/e2e/logEventEnvVarSetting.middy.test.ts +++ b/packages/logger/tests/e2e/logEventEnvVarSetting.middy.test.ts @@ -6,13 +6,7 @@ import { } from '@aws-lambda-powertools/testing-utils'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { LoggerTestNodejsFunction } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - STACK_OUTPUT_LOG_GROUP, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX, STACK_OUTPUT_LOG_GROUP } from './constants.js'; describe('Logger E2E tests, log event via env var setting with middy', () => { const testStack = new TestStack({ @@ -63,38 +57,34 @@ describe('Logger E2E tests, log event via env var setting with middy', () => { }); console.log('logGroupName', logGroupName); - }, SETUP_TIMEOUT); + }); describe('Log event', () => { - it( - 'should log the event as the first log of each invocation only', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); + it('should log the event as the first log of each invocation only', async () => { + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); - for (const [index, message] of logMessages.entries()) { - const log = TestInvocationLogs.parseFunctionLog(message); - // Check that the event is logged on the first log - if (index === 0) { - expect(log).toHaveProperty('event'); - expect(log.event).toStrictEqual( - expect.objectContaining({ foo: 'bar' }) - ); - // Check that the event is not logged again on the rest of the logs - } else { - expect(log).not.toHaveProperty('event'); - } + for (const [index, message] of logMessages.entries()) { + const log = TestInvocationLogs.parseFunctionLog(message); + // Check that the event is logged on the first log + if (index === 0) { + expect(log).toHaveProperty('event'); + expect(log.event).toStrictEqual( + expect.objectContaining({ foo: 'bar' }) + ); + // Check that the event is not logged again on the rest of the logs + } else { + expect(log).not.toHaveProperty('event'); } } - }, - TEST_CASE_TIMEOUT - ); + } + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/logger/tests/e2e/sampleRate.decorator.test.ts b/packages/logger/tests/e2e/sampleRate.decorator.test.ts index 011a72d02d..d6e848f06e 100644 --- a/packages/logger/tests/e2e/sampleRate.decorator.test.ts +++ b/packages/logger/tests/e2e/sampleRate.decorator.test.ts @@ -7,13 +7,7 @@ import { } from '@aws-lambda-powertools/testing-utils'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { LoggerTestNodejsFunction } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - STACK_OUTPUT_LOG_GROUP, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX, STACK_OUTPUT_LOG_GROUP } from './constants.js'; describe('Logger E2E tests, sample rate and injectLambdaContext()', () => { const testStack = new TestStack({ @@ -62,73 +56,65 @@ describe('Logger E2E tests, sample rate and injectLambdaContext()', () => { }); console.log('logGroupName', logGroupName); - }, SETUP_TIMEOUT); + }); describe('Enabling sample rate', () => { - it( - 'should log all levels based on given sample rate, not just ERROR', - async () => { - // Fetch log streams from all invocations - let countSampled = 0; - let countNotSampled = 0; + it('should log all levels based on given sample rate, not just ERROR', async () => { + // Fetch log streams from all invocations + let countSampled = 0; + let countNotSampled = 0; - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs(); + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs(); - if (logMessages.length === 1 && logMessages[0].includes('ERROR')) { - countNotSampled++; - } else if ( - (logMessages.length === 5 && - logMessages[0].includes( - 'Setting log level to DEBUG due to sampling rate' - )) || - logMessages.length === 4 - ) { - countSampled++; - } else { - console.error(`Log group ${logGroupName} contains missing log`); - throw new Error( - 'Sampled log should have either 1 error log or 5 logs of all levels' - ); - } + if (logMessages.length === 1 && logMessages[0].includes('ERROR')) { + countNotSampled++; + } else if ( + (logMessages.length === 5 && + logMessages[0].includes( + 'Setting log level to DEBUG due to sampling rate' + )) || + logMessages.length === 4 + ) { + countSampled++; + } else { + console.error(`Log group ${logGroupName} contains missing log`); + throw new Error( + 'Sampled log should have either 1 error log or 5 logs of all levels' + ); } + } - // Given that we set rate to 0.5. The chance that we get all invocationCount sampled - // (or not sampled) is less than 0.5^20 - expect(countSampled).toBeGreaterThan(0); - expect(countNotSampled).toBeGreaterThan(0); - }, - TEST_CASE_TIMEOUT - ); + // Given that we set rate to 0.5. The chance that we get all invocationCount sampled + // (or not sampled) is less than 0.5^20 + expect(countSampled).toBeGreaterThan(0); + expect(countNotSampled).toBeGreaterThan(0); + }); }); describe('Decorator injectLambdaContext()', () => { - it( - 'should inject Lambda context into every log emitted', - async () => { - for (let i = 0; i < invocationCount; i++) { - // Get log messages of the invocation - const logMessages = invocationLogs[i].getFunctionLogs('ERROR'); + it('should inject Lambda context into every log emitted', async () => { + for (let i = 0; i < invocationCount; i++) { + // Get log messages of the invocation + const logMessages = invocationLogs[i].getFunctionLogs('ERROR'); - // Check that the context is logged on every log - for (const message of logMessages) { - const log = TestInvocationLogs.parseFunctionLog(message); - expect(log).toHaveProperty('function_arn'); - expect(log).toHaveProperty('function_memory_size'); - expect(log).toHaveProperty('function_name'); - expect(log).toHaveProperty('function_request_id'); - expect(log).toHaveProperty('timestamp'); - } + // Check that the context is logged on every log + for (const message of logMessages) { + const log = TestInvocationLogs.parseFunctionLog(message); + expect(log).toHaveProperty('function_arn'); + expect(log).toHaveProperty('function_memory_size'); + expect(log).toHaveProperty('function_name'); + expect(log).toHaveProperty('function_request_id'); + expect(log).toHaveProperty('timestamp'); } - }, - TEST_CASE_TIMEOUT - ); + } + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/logger/vitest.config.ts b/packages/logger/vitest.config.ts index 9f1196ef1f..baa5cf7463 100644 --- a/packages/logger/vitest.config.ts +++ b/packages/logger/vitest.config.ts @@ -4,5 +4,7 @@ export default defineProject({ test: { environment: 'node', setupFiles: ['../testing/src/setupEnv.ts'], + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, }); diff --git a/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts b/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts index 58398e562f..39ce36248a 100644 --- a/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts +++ b/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts @@ -10,14 +10,7 @@ import { import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { getMetrics, sortDimensions } from '../helpers/metricsUtils.js'; import { MetricsTestNodejsFunction } from '../helpers/resources.js'; -import { - ONE_MINUTE, - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, - commonEnvironmentVars, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX, commonEnvironmentVars } from './constants.js'; describe('Metrics E2E tests, basic features decorator usage', () => { const testStack = new TestStack({ @@ -66,133 +59,123 @@ describe('Metrics E2E tests, basic features decorator usage', () => { times: invocations, invocationMode: 'SEQUENTIAL', }); - }, SETUP_TIMEOUT); + }); describe('ColdStart metrics', () => { - it( - 'captures the ColdStart Metric', - async () => { - const { - EXPECTED_NAMESPACE: expectedNamespace, - EXPECTED_DEFAULT_DIMENSIONS: expectedDefaultDimensions, - } = commonEnvironmentVars; - - const expectedDimensions = [ - { Name: 'service', Value: expectedServiceName }, - { Name: 'function_name', Value: fnNameBasicFeatures }, - { - Name: Object.keys(expectedDefaultDimensions)[0], - Value: expectedDefaultDimensions.MyDimension, - }, - ]; - // Check coldstart metric dimensions - const coldStartMetrics = await getMetrics( - cloudwatchClient, - expectedNamespace, - 'ColdStart', - 1 - ); - - expect(coldStartMetrics.Metrics?.length).toBe(1); - const coldStartMetric = coldStartMetrics.Metrics?.[0]; - expect(sortDimensions(coldStartMetric?.Dimensions)).toStrictEqual( - sortDimensions(expectedDimensions) - ); - - // Check coldstart metric value - const adjustedStartTime = new Date(startTime.getTime() - ONE_MINUTE); - const endTime = new Date(new Date().getTime() + ONE_MINUTE); - const coldStartMetricStat = await cloudwatchClient.send( - new GetMetricStatisticsCommand({ - Namespace: expectedNamespace, - StartTime: adjustedStartTime, - Dimensions: expectedDimensions, - EndTime: endTime, - Period: 60, - MetricName: 'ColdStart', - Statistics: ['Sum'], - }) - ); - - // Despite lambda has been called twice, coldstart metric sum should only be 1 - const singleDataPoint = coldStartMetricStat.Datapoints - ? coldStartMetricStat.Datapoints[0] - : {}; - expect(singleDataPoint?.Sum).toBe(1); - }, - TEST_CASE_TIMEOUT - ); + it('captures the ColdStart Metric', async () => { + const { + EXPECTED_NAMESPACE: expectedNamespace, + EXPECTED_DEFAULT_DIMENSIONS: expectedDefaultDimensions, + } = commonEnvironmentVars; + + const expectedDimensions = [ + { Name: 'service', Value: expectedServiceName }, + { Name: 'function_name', Value: fnNameBasicFeatures }, + { + Name: Object.keys(expectedDefaultDimensions)[0], + Value: expectedDefaultDimensions.MyDimension, + }, + ]; + // Check coldstart metric dimensions + const coldStartMetrics = await getMetrics( + cloudwatchClient, + expectedNamespace, + 'ColdStart', + 1 + ); + + expect(coldStartMetrics.Metrics?.length).toBe(1); + const coldStartMetric = coldStartMetrics.Metrics?.[0]; + expect(sortDimensions(coldStartMetric?.Dimensions)).toStrictEqual( + sortDimensions(expectedDimensions) + ); + + // Check coldstart metric value + const adjustedStartTime = new Date(startTime.getTime() - 60 * 1000); + const endTime = new Date(new Date().getTime() + 60 * 1000); + const coldStartMetricStat = await cloudwatchClient.send( + new GetMetricStatisticsCommand({ + Namespace: expectedNamespace, + StartTime: adjustedStartTime, + Dimensions: expectedDimensions, + EndTime: endTime, + Period: 60, + MetricName: 'ColdStart', + Statistics: ['Sum'], + }) + ); + + // Despite lambda has been called twice, coldstart metric sum should only be 1 + const singleDataPoint = coldStartMetricStat.Datapoints + ? coldStartMetricStat.Datapoints[0] + : {}; + expect(singleDataPoint?.Sum).toBe(1); + }); }); describe('Default and extra dimensions', () => { - it( - 'produces a Metric with the default and extra one dimensions', - async () => { - const { - EXPECTED_NAMESPACE: expectedNamespace, - EXPECTED_METRIC_NAME: expectedMetricName, - EXPECTED_METRIC_VALUE: expectedMetricValue, - EXPECTED_DEFAULT_DIMENSIONS: expectedDefaultDimensions, - EXPECTED_EXTRA_DIMENSION: expectedExtraDimension, - } = commonEnvironmentVars; - - // Check metric dimensions - const metrics = await getMetrics( - cloudwatchClient, - expectedNamespace, - expectedMetricName, - 1 - ); - - expect(metrics.Metrics?.length).toBe(1); - const metric = metrics.Metrics?.[0]; - const expectedDimensions = [ - { Name: 'service', Value: expectedServiceName }, - { - Name: Object.keys(expectedDefaultDimensions)[0], - Value: expectedDefaultDimensions.MyDimension, - }, - { - Name: Object.keys(expectedExtraDimension)[0], - Value: expectedExtraDimension.MyExtraDimension, - }, - ]; - expect(sortDimensions(metric?.Dimensions)).toStrictEqual( - sortDimensions(expectedDimensions) - ); - - // Check coldstart metric value - const adjustedStartTime = new Date( - startTime.getTime() - 3 * ONE_MINUTE - ); - const endTime = new Date(new Date().getTime() + ONE_MINUTE); - const metricStat = await cloudwatchClient.send( - new GetMetricStatisticsCommand({ - Namespace: expectedNamespace, - StartTime: adjustedStartTime, - Dimensions: expectedDimensions, - EndTime: endTime, - Period: 60, - MetricName: expectedMetricName, - Statistics: ['Sum'], - }) - ); - - // Since lambda has been called twice in this test and potentially more in others, metric sum should be at least of expectedMetricValue * invocationCount - const singleDataPoint = metricStat.Datapoints - ? metricStat.Datapoints[0] - : {}; - expect(singleDataPoint?.Sum).toBeGreaterThanOrEqual( - Number.parseInt(expectedMetricValue) * invocations - ); - }, - TEST_CASE_TIMEOUT - ); + it('produces a Metric with the default and extra one dimensions', async () => { + const { + EXPECTED_NAMESPACE: expectedNamespace, + EXPECTED_METRIC_NAME: expectedMetricName, + EXPECTED_METRIC_VALUE: expectedMetricValue, + EXPECTED_DEFAULT_DIMENSIONS: expectedDefaultDimensions, + EXPECTED_EXTRA_DIMENSION: expectedExtraDimension, + } = commonEnvironmentVars; + + // Check metric dimensions + const metrics = await getMetrics( + cloudwatchClient, + expectedNamespace, + expectedMetricName, + 1 + ); + + expect(metrics.Metrics?.length).toBe(1); + const metric = metrics.Metrics?.[0]; + const expectedDimensions = [ + { Name: 'service', Value: expectedServiceName }, + { + Name: Object.keys(expectedDefaultDimensions)[0], + Value: expectedDefaultDimensions.MyDimension, + }, + { + Name: Object.keys(expectedExtraDimension)[0], + Value: expectedExtraDimension.MyExtraDimension, + }, + ]; + expect(sortDimensions(metric?.Dimensions)).toStrictEqual( + sortDimensions(expectedDimensions) + ); + + // Check coldstart metric value + const adjustedStartTime = new Date(startTime.getTime() - 3 * 60 * 1000); + const endTime = new Date(new Date().getTime() + 60 * 1000); + const metricStat = await cloudwatchClient.send( + new GetMetricStatisticsCommand({ + Namespace: expectedNamespace, + StartTime: adjustedStartTime, + Dimensions: expectedDimensions, + EndTime: endTime, + Period: 60, + MetricName: expectedMetricName, + Statistics: ['Sum'], + }) + ); + + // Since lambda has been called twice in this test and potentially more in others, metric sum should be at least of expectedMetricValue * invocationCount + const singleDataPoint = metricStat.Datapoints + ? metricStat.Datapoints[0] + : {}; + expect(singleDataPoint?.Sum).toBeGreaterThanOrEqual( + Number.parseInt(expectedMetricValue) * invocations + ); + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/metrics/tests/e2e/basicFeatures.manual.test.ts b/packages/metrics/tests/e2e/basicFeatures.manual.test.ts index 5272ee8578..694d444847 100644 --- a/packages/metrics/tests/e2e/basicFeatures.manual.test.ts +++ b/packages/metrics/tests/e2e/basicFeatures.manual.test.ts @@ -10,14 +10,7 @@ import { import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { getMetrics, sortDimensions } from '../helpers/metricsUtils.js'; import { MetricsTestNodejsFunction } from '../helpers/resources.js'; -import { - ONE_MINUTE, - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, - commonEnvironmentVars, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX, commonEnvironmentVars } from './constants.js'; describe('Metrics E2E tests, manual usage', () => { const testStack = new TestStack({ @@ -64,121 +57,111 @@ describe('Metrics E2E tests, manual usage', () => { times: invocations, invocationMode: 'SEQUENTIAL', }); - }, SETUP_TIMEOUT); + }); describe('ColdStart metrics', () => { - it( - 'captures the ColdStart Metric', - async () => { - const { EXPECTED_NAMESPACE: expectedNamespace } = commonEnvironmentVars; - - // Check coldstart metric dimensions - const coldStartMetrics = await getMetrics( - cloudwatchClient, - expectedNamespace, - 'ColdStart', - 1 - ); - expect(coldStartMetrics.Metrics?.length).toBe(1); - const coldStartMetric = coldStartMetrics.Metrics?.[0]; - expect(coldStartMetric?.Dimensions).toStrictEqual([ - { Name: 'service', Value: expectedServiceName }, - ]); - - // Check coldstart metric value - const adjustedStartTime = new Date(startTime.getTime() - 60 * 1000); - const endTime = new Date(new Date().getTime() + 60 * 1000); - const coldStartMetricStat = await cloudwatchClient.send( - new GetMetricStatisticsCommand({ - Namespace: expectedNamespace, - StartTime: adjustedStartTime, - Dimensions: [{ Name: 'service', Value: expectedServiceName }], - EndTime: endTime, - Period: 60, - MetricName: 'ColdStart', - Statistics: ['Sum'], - }) - ); - - // Despite lambda has been called twice, coldstart metric sum should only be 1 - const singleDataPoint = coldStartMetricStat.Datapoints - ? coldStartMetricStat.Datapoints[0] - : {}; - expect(singleDataPoint?.Sum).toBe(1); - }, - TEST_CASE_TIMEOUT - ); + it('captures the ColdStart Metric', async () => { + const { EXPECTED_NAMESPACE: expectedNamespace } = commonEnvironmentVars; + + // Check coldstart metric dimensions + const coldStartMetrics = await getMetrics( + cloudwatchClient, + expectedNamespace, + 'ColdStart', + 1 + ); + expect(coldStartMetrics.Metrics?.length).toBe(1); + const coldStartMetric = coldStartMetrics.Metrics?.[0]; + expect(coldStartMetric?.Dimensions).toStrictEqual([ + { Name: 'service', Value: expectedServiceName }, + ]); + + // Check coldstart metric value + const adjustedStartTime = new Date(startTime.getTime() - 60 * 1000); + const endTime = new Date(new Date().getTime() + 60 * 1000); + const coldStartMetricStat = await cloudwatchClient.send( + new GetMetricStatisticsCommand({ + Namespace: expectedNamespace, + StartTime: adjustedStartTime, + Dimensions: [{ Name: 'service', Value: expectedServiceName }], + EndTime: endTime, + Period: 60, + MetricName: 'ColdStart', + Statistics: ['Sum'], + }) + ); + + // Despite lambda has been called twice, coldstart metric sum should only be 1 + const singleDataPoint = coldStartMetricStat.Datapoints + ? coldStartMetricStat.Datapoints[0] + : {}; + expect(singleDataPoint?.Sum).toBe(1); + }); }); describe('Default and extra dimensions', () => { - it( - 'produces a Metric with the default and extra one dimensions', - async () => { - const { - EXPECTED_NAMESPACE: expectedNamespace, - EXPECTED_METRIC_NAME: expectedMetricName, - EXPECTED_METRIC_VALUE: expectedMetricValue, - EXPECTED_DEFAULT_DIMENSIONS: expectedDefaultDimensions, - EXPECTED_EXTRA_DIMENSION: expectedExtraDimension, - } = commonEnvironmentVars; - - // Check metric dimensions - const metrics = await getMetrics( - cloudwatchClient, - expectedNamespace, - expectedMetricName, - 1 - ); - - expect(metrics.Metrics?.length).toBe(1); - const metric = metrics.Metrics?.[0]; - const expectedDimensions = [ - { Name: 'service', Value: expectedServiceName }, - { - Name: Object.keys(expectedDefaultDimensions)[0], - Value: expectedDefaultDimensions.MyDimension, - }, - { - Name: Object.keys(expectedExtraDimension)[0], - Value: expectedExtraDimension.MyExtraDimension, - }, - ]; - expect(sortDimensions(metric?.Dimensions)).toStrictEqual( - sortDimensions(expectedDimensions) - ); - - // Check coldstart metric value - const adjustedStartTime = new Date( - startTime.getTime() - 3 * ONE_MINUTE - ); - const endTime = new Date(new Date().getTime() + ONE_MINUTE); - const metricStat = await cloudwatchClient.send( - new GetMetricStatisticsCommand({ - Namespace: expectedNamespace, - StartTime: adjustedStartTime, - Dimensions: expectedDimensions, - EndTime: endTime, - Period: 60, - MetricName: expectedMetricName, - Statistics: ['Sum'], - }) - ); - - // Since lambda has been called twice in this test and potentially more in others, metric sum should be at least of expectedMetricValue * invocationCount - const singleDataPoint = metricStat.Datapoints - ? metricStat.Datapoints[0] - : {}; - expect(singleDataPoint.Sum).toBeGreaterThanOrEqual( - Number.parseInt(expectedMetricValue) * invocations - ); - }, - TEST_CASE_TIMEOUT - ); + it('produces a Metric with the default and extra one dimensions', async () => { + const { + EXPECTED_NAMESPACE: expectedNamespace, + EXPECTED_METRIC_NAME: expectedMetricName, + EXPECTED_METRIC_VALUE: expectedMetricValue, + EXPECTED_DEFAULT_DIMENSIONS: expectedDefaultDimensions, + EXPECTED_EXTRA_DIMENSION: expectedExtraDimension, + } = commonEnvironmentVars; + + // Check metric dimensions + const metrics = await getMetrics( + cloudwatchClient, + expectedNamespace, + expectedMetricName, + 1 + ); + + expect(metrics.Metrics?.length).toBe(1); + const metric = metrics.Metrics?.[0]; + const expectedDimensions = [ + { Name: 'service', Value: expectedServiceName }, + { + Name: Object.keys(expectedDefaultDimensions)[0], + Value: expectedDefaultDimensions.MyDimension, + }, + { + Name: Object.keys(expectedExtraDimension)[0], + Value: expectedExtraDimension.MyExtraDimension, + }, + ]; + expect(sortDimensions(metric?.Dimensions)).toStrictEqual( + sortDimensions(expectedDimensions) + ); + + // Check coldstart metric value + const adjustedStartTime = new Date(startTime.getTime() - 3 * 60 * 1000); + const endTime = new Date(new Date().getTime() + 60 * 1000); + const metricStat = await cloudwatchClient.send( + new GetMetricStatisticsCommand({ + Namespace: expectedNamespace, + StartTime: adjustedStartTime, + Dimensions: expectedDimensions, + EndTime: endTime, + Period: 60, + MetricName: expectedMetricName, + Statistics: ['Sum'], + }) + ); + + // Since lambda has been called twice in this test and potentially more in others, metric sum should be at least of expectedMetricValue * invocationCount + const singleDataPoint = metricStat.Datapoints + ? metricStat.Datapoints[0] + : {}; + expect(singleDataPoint.Sum).toBeGreaterThanOrEqual( + Number.parseInt(expectedMetricValue) * invocations + ); + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/metrics/tests/e2e/constants.ts b/packages/metrics/tests/e2e/constants.ts index dbffabf33a..7837b7e551 100644 --- a/packages/metrics/tests/e2e/constants.ts +++ b/packages/metrics/tests/e2e/constants.ts @@ -2,10 +2,6 @@ import { randomUUID } from 'node:crypto'; import { MetricUnit } from '../../src/index.js'; const RESOURCE_NAME_PREFIX = 'Metrics'; -const ONE_MINUTE = 60 * 1000; -const TEST_CASE_TIMEOUT = 3 * ONE_MINUTE; -const SETUP_TIMEOUT = 7 * ONE_MINUTE; -const TEARDOWN_TIMEOUT = 5 * ONE_MINUTE; const commonEnvironmentVars = { EXPECTED_METRIC_NAME: 'MyMetric', @@ -21,11 +17,4 @@ const commonEnvironmentVars = { POWERTOOLS_SERVICE_NAME: 'metrics-e2e-testing', }; -export { - RESOURCE_NAME_PREFIX, - ONE_MINUTE, - TEST_CASE_TIMEOUT, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - commonEnvironmentVars, -}; +export { RESOURCE_NAME_PREFIX, commonEnvironmentVars }; diff --git a/packages/metrics/vitest.config.ts b/packages/metrics/vitest.config.ts index 9f1196ef1f..baa5cf7463 100644 --- a/packages/metrics/vitest.config.ts +++ b/packages/metrics/vitest.config.ts @@ -4,5 +4,7 @@ export default defineProject({ test: { environment: 'node', setupFiles: ['../testing/src/setupEnv.ts'], + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, }); diff --git a/packages/parameters/tests/e2e/appConfigProvider.class.test.ts b/packages/parameters/tests/e2e/appConfigProvider.class.test.ts index 3bd7e3a9b7..5159600f79 100644 --- a/packages/parameters/tests/e2e/appConfigProvider.class.test.ts +++ b/packages/parameters/tests/e2e/appConfigProvider.class.test.ts @@ -8,12 +8,7 @@ import { TestNodejsFunction } from '@aws-lambda-powertools/testing-utils/resourc import { toBase64 } from '@smithy/util-base64'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { TestAppConfigWithProfiles } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; /** * This test suite deploys a CDK stack with a Lambda function and a number of AppConfig parameters. @@ -172,7 +167,7 @@ describe('Parameters E2E tests, AppConfig provider', () => { invocationLogs = await invokeFunctionOnce({ functionName, }); - }, SETUP_TIMEOUT); + }); describe('AppConfigProvider usage', () => { // Test 1 - get a single parameter as-is (no transformation - should return an Uint8Array) @@ -222,62 +217,50 @@ describe('Parameters E2E tests, AppConfig provider', () => { // Test 5 - get parameter twice with middleware, which counts the number // of requests, we check later if we only called AppConfig API once - it( - 'should retrieve single parameter cached', - () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[4]); + it('should retrieve single parameter cached', () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[4]); - expect(testLog).toStrictEqual({ - test: 'get-cached', - value: 1, - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-cached', + value: 1, + }); + }); // Test 6 - get parameter twice, but force fetch 2nd time, // we count number of SDK requests and check that we made two API calls - it( - 'should retrieve single parameter twice without caching', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[5]); + it('should retrieve single parameter twice without caching', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[5]); - expect(testLog).toStrictEqual({ - test: 'get-forced', - value: 2, - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-forced', + value: 2, + }); + }); // Test 7 - get parameter twice, using maxAge to avoid primary cache // we count number of SDK requests and check that we made two API calls // and check that the values match - it( - 'should retrieve single parameter twice, with expiration between and matching values', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[6]); - const result = freeFormPlainTextValue; + it('should retrieve single parameter twice, with expiration between and matching values', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[6]); + const result = freeFormPlainTextValue; - expect(testLog).toStrictEqual({ - test: 'get-expired', - value: { - counter: 2, - result1: result, - result2: result, - }, - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-expired', + value: { + counter: 2, + result1: result, + result2: result, + }, + }); + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/parameters/tests/e2e/constants.ts b/packages/parameters/tests/e2e/constants.ts index b9b9e6ef30..42e1837795 100644 --- a/packages/parameters/tests/e2e/constants.ts +++ b/packages/parameters/tests/e2e/constants.ts @@ -1,5 +1 @@ export const RESOURCE_NAME_PREFIX = 'Parameters'; -export const ONE_MINUTE = 60 * 1000; -export const TEST_CASE_TIMEOUT = 3 * ONE_MINUTE; -export const SETUP_TIMEOUT = 7 * ONE_MINUTE; -export const TEARDOWN_TIMEOUT = 5 * ONE_MINUTE; diff --git a/packages/parameters/tests/e2e/dynamoDBProvider.class.test.ts b/packages/parameters/tests/e2e/dynamoDBProvider.class.test.ts index 17c56600d2..333bbb7fce 100644 --- a/packages/parameters/tests/e2e/dynamoDBProvider.class.test.ts +++ b/packages/parameters/tests/e2e/dynamoDBProvider.class.test.ts @@ -8,12 +8,7 @@ import { TestNodejsFunction } from '@aws-lambda-powertools/testing-utils/resourc import { AttributeType } from 'aws-cdk-lib/aws-dynamodb'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { TestDynamodbTableWithItems } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; /** * This test suite deploys a CDK stack with a Lambda function and a number of DynamoDB tables. @@ -264,68 +259,52 @@ describe('Parameters E2E tests, dynamoDB provider', () => { invocationLogs = await invokeFunctionOnce({ functionName, }); - }, SETUP_TIMEOUT); + }); describe('DynamoDBProvider usage', () => { // Test 1 - get a single parameter with default options (keyAttr: 'id', valueAttr: 'value') - it( - 'should retrieve a single parameter', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[0]); + it('should retrieve a single parameter', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[0]); - expect(testLog).toStrictEqual({ - test: 'get', - value: 'foo', - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get', + value: 'foo', + }); + }); // Test 2 - get multiple parameters with default options (keyAttr: 'id', sortAttr: 'sk', valueAttr: 'value') - it( - 'should retrieve multiple parameters', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[1]); + it('should retrieve multiple parameters', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[1]); - expect(testLog).toStrictEqual({ - test: 'get-multiple', - value: { config: 'bar', key: 'baz' }, - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-multiple', + value: { config: 'bar', key: 'baz' }, + }); + }); // Test 3 - get a single parameter with custom options (keyAttr: 'key', valueAttr: 'val') - it( - 'should retrieve a single parameter', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[2]); + it('should retrieve a single parameter', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[2]); - expect(testLog).toStrictEqual({ - test: 'get-custom', - value: 'foo', - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-custom', + value: 'foo', + }); + }); // Test 4 - get multiple parameters with custom options (keyAttr: 'key', sortAttr: 'sort', valueAttr: 'val') - it( - 'should retrieve multiple parameters', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[3]); + it('should retrieve multiple parameters', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[3]); - expect(testLog).toStrictEqual({ - test: 'get-multiple-custom', - value: { config: 'bar', key: 'baz' }, - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-multiple-custom', + value: { config: 'bar', key: 'baz' }, + }); + }); // Test 5 - get a single parameter with json transform it('should retrieve a single parameter with json transform', async () => { @@ -390,5 +369,5 @@ describe('Parameters E2E tests, dynamoDB provider', () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/parameters/tests/e2e/secretsProvider.class.test.ts b/packages/parameters/tests/e2e/secretsProvider.class.test.ts index 4acbd65eba..87467a9b06 100644 --- a/packages/parameters/tests/e2e/secretsProvider.class.test.ts +++ b/packages/parameters/tests/e2e/secretsProvider.class.test.ts @@ -8,12 +8,7 @@ import { TestNodejsFunction } from '@aws-lambda-powertools/testing-utils/resourc import { SecretValue } from 'aws-cdk-lib'; import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { TestSecret } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; /** * Collection of e2e tests for SecretsProvider utility. @@ -138,50 +133,38 @@ describe('Parameters E2E tests, Secrets Manager provider', () => { invocationLogs = await invokeFunctionOnce({ functionName, }); - }, SETUP_TIMEOUT); + }); describe('SecretsProvider usage', () => { - it( - 'should retrieve a secret as plain string', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[0]); - - expect(testLog).toStrictEqual({ - test: 'get-plain', - value: 'foo', - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve a secret as plain string', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[0]); + + expect(testLog).toStrictEqual({ + test: 'get-plain', + value: 'foo', + }); + }); - it( - 'should retrieve a secret using transform json option', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[1]); + it('should retrieve a secret using transform json option', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[1]); - expect(testLog).toStrictEqual({ - test: 'get-transform-json', - value: { foo: 'bar' }, - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-transform-json', + value: { foo: 'bar' }, + }); + }); - it( - 'should retrieve a secret using transform binary option', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[2]); + it('should retrieve a secret using transform binary option', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[2]); - expect(testLog).toStrictEqual({ - test: 'get-transform-binary', - value: 'foo', - }); - }, - TEST_CASE_TIMEOUT - ); + expect(testLog).toStrictEqual({ + test: 'get-transform-binary', + value: 'foo', + }); + }); }); it('should retrieve a secret twice with cached value', async () => { @@ -210,5 +193,5 @@ describe('Parameters E2E tests, Secrets Manager provider', () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/parameters/tests/e2e/ssmProvider.class.test.ts b/packages/parameters/tests/e2e/ssmProvider.class.test.ts index 71d7f75a1d..fa5bf8931b 100644 --- a/packages/parameters/tests/e2e/ssmProvider.class.test.ts +++ b/packages/parameters/tests/e2e/ssmProvider.class.test.ts @@ -10,12 +10,7 @@ import { TestSecureStringParameter, TestStringParameter, } from '../helpers/resources.js'; -import { - RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, - TEST_CASE_TIMEOUT, -} from './constants.js'; +import { RESOURCE_NAME_PREFIX } from './constants.js'; /** * This test suite deploys a CDK stack with a Lambda function and a number of SSM parameters. @@ -189,200 +184,160 @@ describe('Parameters E2E tests, SSM provider', () => { invocationLogs = await invokeFunctionOnce({ functionName, }); - }, SETUP_TIMEOUT); + }); describe('SSMProvider usage', () => { // Test 1 - get a single parameter by name with default options - it( - 'should retrieve a single parameter', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[0]); - - expect(testLog).toStrictEqual({ - test: 'get', - value: paramAValue, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve a single parameter', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[0]); + + expect(testLog).toStrictEqual({ + test: 'get', + value: paramAValue, + }); + }); // Test 2 - get a single parameter by name with decrypt - it( - 'should retrieve a single parameter with decryption', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[1]); - - expect(testLog).toStrictEqual({ - test: 'get-decrypt', - value: paramEncryptedAValue, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve a single parameter with decryption', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[1]); + + expect(testLog).toStrictEqual({ + test: 'get-decrypt', + value: paramEncryptedAValue, + }); + }); // Test 3 - get multiple parameters by path with default options - it( - 'should retrieve multiple parameters', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[2]); - const expectedParameterNameA = paramA.substring( - paramA.lastIndexOf('/') + 1 - ); - const expectedParameterNameB = paramB.substring( - paramB.lastIndexOf('/') + 1 - ); - - expect(testLog).toStrictEqual({ - test: 'get-multiple', - value: { - [expectedParameterNameA]: paramAValue, - [expectedParameterNameB]: paramBValue, - }, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve multiple parameters', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[2]); + const expectedParameterNameA = paramA.substring( + paramA.lastIndexOf('/') + 1 + ); + const expectedParameterNameB = paramB.substring( + paramB.lastIndexOf('/') + 1 + ); + + expect(testLog).toStrictEqual({ + test: 'get-multiple', + value: { + [expectedParameterNameA]: paramAValue, + [expectedParameterNameB]: paramBValue, + }, + }); + }); // Test 4 - get multiple parameters by path recursively // (aka. get all parameters under a path recursively) i.e. // given /param, retrieve /param/get/a and /param/get/b (note path depth) - it( - 'should retrieve multiple parameters recursively', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[3]); - const expectedParameterNameA = paramA.substring( - paramA.lastIndexOf('/') + 1 - ); - const expectedParameterNameB = paramB.substring( - paramB.lastIndexOf('/') + 1 - ); - - expect(testLog).toStrictEqual({ - test: 'get-multiple-recursive', - value: { - [expectedParameterNameA]: paramAValue, - [expectedParameterNameB]: paramBValue, - }, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve multiple parameters recursively', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[3]); + const expectedParameterNameA = paramA.substring( + paramA.lastIndexOf('/') + 1 + ); + const expectedParameterNameB = paramB.substring( + paramB.lastIndexOf('/') + 1 + ); + + expect(testLog).toStrictEqual({ + test: 'get-multiple-recursive', + value: { + [expectedParameterNameA]: paramAValue, + [expectedParameterNameB]: paramBValue, + }, + }); + }); - it( - 'should retrieve multiple parameters with decryption', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[4]); - const expectedParameterNameA = paramEncryptedA.substring( - paramEncryptedA.lastIndexOf('/') + 1 - ); - const expectedParameterNameB = paramEncryptedB.substring( - paramEncryptedB.lastIndexOf('/') + 1 - ); - - expect(testLog).toStrictEqual({ - test: 'get-multiple-decrypt', - value: { - [expectedParameterNameA]: paramEncryptedAValue, - [expectedParameterNameB]: paramEncryptedBValue, - }, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve multiple parameters with decryption', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[4]); + const expectedParameterNameA = paramEncryptedA.substring( + paramEncryptedA.lastIndexOf('/') + 1 + ); + const expectedParameterNameB = paramEncryptedB.substring( + paramEncryptedB.lastIndexOf('/') + 1 + ); + + expect(testLog).toStrictEqual({ + test: 'get-multiple-decrypt', + value: { + [expectedParameterNameA]: paramEncryptedAValue, + [expectedParameterNameB]: paramEncryptedBValue, + }, + }); + }); // Test 6 - get multiple parameters by name with default options - it( - 'should retrieve multiple parameters by name', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[5]); - - expect(testLog).toStrictEqual({ - test: 'get-multiple-by-name', - value: { - [paramA]: paramAValue, - [paramB]: paramBValue, - }, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve multiple parameters by name', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[5]); + + expect(testLog).toStrictEqual({ + test: 'get-multiple-by-name', + value: { + [paramA]: paramAValue, + [paramB]: paramBValue, + }, + }); + }); // Test 7 - get multiple parameters by name, some of them encrypted and some not - it( - 'should retrieve multiple parameters by name with mixed decryption', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[6]); - - expect(testLog).toStrictEqual({ - test: 'get-multiple-by-name-mixed-decrypt', - value: { - [paramEncryptedA]: paramEncryptedAValue, - [paramEncryptedB]: paramEncryptedBValue, - [paramA]: paramAValue, - }, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve multiple parameters by name with mixed decryption', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[6]); + + expect(testLog).toStrictEqual({ + test: 'get-multiple-by-name-mixed-decrypt', + value: { + [paramEncryptedA]: paramEncryptedAValue, + [paramEncryptedB]: paramEncryptedBValue, + [paramA]: paramAValue, + }, + }); + }); // Test 8 - get parameter twice with middleware, which counts the number // of requests, we check later if we only called SSM API once - it( - 'should retrieve single parameter cached', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[7]); - - expect(testLog).toStrictEqual({ - test: 'get-cached', - value: 1, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve single parameter cached', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[7]); + + expect(testLog).toStrictEqual({ + test: 'get-cached', + value: 1, + }); + }); // Test 9 - get parameter twice, but force fetch 2nd time, // we count number of SDK requests and check that we made two API calls - it( - 'should retrieve single parameter twice without caching', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[8]); - - expect(testLog).toStrictEqual({ - test: 'get-forced', - value: 2, - }); - }, - TEST_CASE_TIMEOUT - ); + it('should retrieve single parameter twice without caching', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[8]); + + expect(testLog).toStrictEqual({ + test: 'get-forced', + value: 2, + }); + }); // Test 10 - store and overwrite single parameter - it( - 'should store and overwrite single parameter', - async () => { - const logs = invocationLogs.getFunctionLogs(); - const testLog = TestInvocationLogs.parseFunctionLog(logs[9]); - - expect(testLog).toStrictEqual({ - test: 'set', - value: 'overwritten', - }); - }, - TEST_CASE_TIMEOUT - ); + it('should store and overwrite single parameter', async () => { + const logs = invocationLogs.getFunctionLogs(); + const testLog = TestInvocationLogs.parseFunctionLog(logs[9]); + + expect(testLog).toStrictEqual({ + test: 'set', + value: 'overwritten', + }); + }); }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); }); diff --git a/packages/parameters/vitest.config.ts b/packages/parameters/vitest.config.ts index 9f1196ef1f..baa5cf7463 100644 --- a/packages/parameters/vitest.config.ts +++ b/packages/parameters/vitest.config.ts @@ -4,5 +4,7 @@ export default defineProject({ test: { environment: 'node', setupFiles: ['../testing/src/setupEnv.ts'], + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, }); diff --git a/packages/testing/src/TestInvocationLogs.ts b/packages/testing/src/TestInvocationLogs.ts index 2de21918ae..45c189aa5f 100644 --- a/packages/testing/src/TestInvocationLogs.ts +++ b/packages/testing/src/TestInvocationLogs.ts @@ -4,6 +4,7 @@ import type { FunctionLog } from './types.js'; const CloudWatchLogKeywords = { END: 'END RequestId', INIT_START: 'INIT_START', + INIT_REPORT: 'INIT_REPORT', REPORT: 'REPORT RequestId', START: 'START RequestId', XRAY: 'XRAY TraceId', @@ -99,13 +100,15 @@ class TestInvocationLogs { } /** - * Return the index of the log that contains `INIT_START` - * @param logs - * @returns {number} index of the log that contains `INIT_START` + * Return the index of the log that contains `INIT_START` or `INIT_REPORT` + * + * @param logs - Array of logs */ public static getInitLogIndex(logs: string[]): number { - return logs.findIndex((log) => - log.startsWith(CloudWatchLogKeywords.INIT_START) + return logs.findIndex( + (log) => + log.startsWith(CloudWatchLogKeywords.INIT_START) || + log.startsWith(CloudWatchLogKeywords.INIT_REPORT) ); } diff --git a/packages/testing/src/resources/TestNodejsFunction.ts b/packages/testing/src/resources/TestNodejsFunction.ts index 01a383153f..e20ee86183 100644 --- a/packages/testing/src/resources/TestNodejsFunction.ts +++ b/packages/testing/src/resources/TestNodejsFunction.ts @@ -1,6 +1,6 @@ import { randomUUID } from 'node:crypto'; -import { CfnOutput, type CfnResource, Duration } from 'aws-cdk-lib'; -import { Tracing } from 'aws-cdk-lib/aws-lambda'; +import { CfnOutput, Duration } from 'aws-cdk-lib'; +import { Alias, Tracing } from 'aws-cdk-lib/aws-lambda'; import { NodejsFunction, OutputFormat } from 'aws-cdk-lib/aws-lambda-nodejs'; import { LogGroup, RetentionDays } from 'aws-cdk-lib/aws-logs'; import type { TestStack } from '../TestStack.js'; @@ -56,8 +56,18 @@ class TestNodejsFunction extends NodejsFunction { logGroup, }); + let outputValue = this.functionName; + if (extraProps.createAlias) { + const dev = new Alias(this, 'dev', { + aliasName: 'dev', + version: this.currentVersion, + provisionedConcurrentExecutions: 1, + }); + outputValue = dev.functionArn; + } + new CfnOutput(this, extraProps.nameSuffix, { - value: this.functionName, + value: outputValue, }); } } diff --git a/packages/testing/src/setupEnv.ts b/packages/testing/src/setupEnv.ts index 0a0c83b84a..9caf9e724f 100644 --- a/packages/testing/src/setupEnv.ts +++ b/packages/testing/src/setupEnv.ts @@ -377,3 +377,4 @@ if ( process.env._HANDLER = 'index.handler'; process.env.POWERTOOLS_SERVICE_NAME = 'hello-world'; process.env.AWS_XRAY_LOGGING_LEVEL = 'silent'; +process.env.AWS_LAMBDA_INITIALIZATION_TYPE = 'on-demand'; diff --git a/packages/testing/src/types.ts b/packages/testing/src/types.ts index 454c2d87b4..432f01e42f 100644 --- a/packages/testing/src/types.ts +++ b/packages/testing/src/types.ts @@ -19,6 +19,12 @@ interface ExtraTestProps { * @default 'CJS' */ outputFormat?: 'CJS' | 'ESM'; + /** + * Whether to create an alias for the function. + * + * @default false + */ + createAlias?: boolean; } type TestDynamodbTableProps = Omit< diff --git a/packages/tracer/tests/e2e/constants.ts b/packages/tracer/tests/e2e/constants.ts index 77ea5c9ed2..e4db5268cf 100644 --- a/packages/tracer/tests/e2e/constants.ts +++ b/packages/tracer/tests/e2e/constants.ts @@ -1,10 +1,5 @@ // Prefix for all resources created by the E2E tests const RESOURCE_NAME_PREFIX = 'Tracer'; -// Constants relating time to be used in the tests -const ONE_MINUTE = 60 * 1_000; -const TEST_CASE_TIMEOUT = 5 * ONE_MINUTE; -const SETUP_TIMEOUT = 7 * ONE_MINUTE; -const TEARDOWN_TIMEOUT = 5 * ONE_MINUTE; // Expected values for custom annotations, metadata, and response const EXPECTED_ANNOTATION_KEY = 'myAnnotation'; @@ -17,10 +12,6 @@ const EXPECTED_SUBSEGMENT_NAME = '### mySubsegment'; export { RESOURCE_NAME_PREFIX, - ONE_MINUTE, - TEST_CASE_TIMEOUT, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, EXPECTED_ANNOTATION_KEY, EXPECTED_ANNOTATION_VALUE, EXPECTED_METADATA_KEY, diff --git a/packages/tracer/tests/e2e/decorator.test.ts b/packages/tracer/tests/e2e/decorator.test.ts index 8a87282431..260dfae659 100644 --- a/packages/tracer/tests/e2e/decorator.test.ts +++ b/packages/tracer/tests/e2e/decorator.test.ts @@ -8,8 +8,6 @@ import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { invokeAllTestCases } from '../helpers/invokeAllTests.js'; import { RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, EXPECTED_ANNOTATION_KEY as expectedCustomAnnotationKey, EXPECTED_ANNOTATION_VALUE as expectedCustomAnnotationValue, EXPECTED_ERROR_MESSAGE as expectedCustomErrorMessage, @@ -82,13 +80,13 @@ describe('Tracer E2E tests, decorator instrumentation', () => { */ expectedSegmentsCount: 4, }); - }, SETUP_TIMEOUT); + }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); it('should generate all trace data correctly', async () => { // Assess diff --git a/packages/tracer/tests/e2e/manual.test.ts b/packages/tracer/tests/e2e/manual.test.ts index 80fa766be1..52dbab375e 100644 --- a/packages/tracer/tests/e2e/manual.test.ts +++ b/packages/tracer/tests/e2e/manual.test.ts @@ -8,8 +8,6 @@ import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { invokeAllTestCases } from '../helpers/invokeAllTests.js'; import { RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, EXPECTED_ANNOTATION_KEY as expectedCustomAnnotationKey, EXPECTED_ANNOTATION_VALUE as expectedCustomAnnotationValue, EXPECTED_ERROR_MESSAGE as expectedCustomErrorMessage, @@ -79,13 +77,13 @@ describe('Tracer E2E tests, manual instantiation', () => { */ expectedSegmentsCount: 2, }); - }, SETUP_TIMEOUT); + }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); it('should generate all trace data correctly', async () => { // Assess diff --git a/packages/tracer/tests/e2e/middy.test.ts b/packages/tracer/tests/e2e/middy.test.ts index 0a33d4377e..361c350166 100644 --- a/packages/tracer/tests/e2e/middy.test.ts +++ b/packages/tracer/tests/e2e/middy.test.ts @@ -8,8 +8,6 @@ import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { invokeAllTestCases } from '../helpers/invokeAllTests.js'; import { RESOURCE_NAME_PREFIX, - SETUP_TIMEOUT, - TEARDOWN_TIMEOUT, EXPECTED_ANNOTATION_KEY as expectedCustomAnnotationKey, EXPECTED_ANNOTATION_VALUE as expectedCustomAnnotationValue, EXPECTED_ERROR_MESSAGE as expectedCustomErrorMessage, @@ -81,13 +79,13 @@ describe('Tracer E2E tests, middy instrumentation', () => { */ expectedSegmentsCount: 4, }); - }, SETUP_TIMEOUT); + }); afterAll(async () => { if (!process.env.DISABLE_TEARDOWN) { await testStack.destroy(); } - }, TEARDOWN_TIMEOUT); + }); it('should generate all trace data correctly', () => { // Assess diff --git a/packages/tracer/vitest.config.ts b/packages/tracer/vitest.config.ts index 9f1196ef1f..baa5cf7463 100644 --- a/packages/tracer/vitest.config.ts +++ b/packages/tracer/vitest.config.ts @@ -4,5 +4,7 @@ export default defineProject({ test: { environment: 'node', setupFiles: ['../testing/src/setupEnv.ts'], + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, }); diff --git a/vitest.config.ts b/vitest.config.ts index 5d81a103f8..78c6de7f3e 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -20,5 +20,7 @@ export default defineConfig({ ], }, setupFiles: ['./packages/testing/src/setupEnv.ts'], + hookTimeout: 1_000 * 60 * 10, // 10 minutes + testTimeout: 1_000 * 60 * 3, // 3 minutes }, });