From cb026369692618212dde4d165dc1e847553e4df0 Mon Sep 17 00:00:00 2001 From: Andrea Amorosi Date: Sun, 24 Nov 2024 20:07:16 +0100 Subject: [PATCH 1/6] test(metrics): migrate tests to vitest --- packages/metrics/jest.config.cjs | 31 -- packages/metrics/package.json | 17 +- packages/metrics/src/Metrics.ts | 4 +- .../e2e/basicFeatures.decorators.test.ts | 28 +- .../tests/e2e/basicFeatures.manual.test.ts | 28 +- .../metrics/tests/helpers/metricsUtils.ts | 28 +- packages/metrics/tests/tsconfig.json | 8 +- .../tests/unit/coldStartMetric.test.ts | 98 ++++++ .../tests/unit/creatingMetrics.test.ts | 266 +++++++++++++++ .../tests/unit/customTimestamp.test.ts | 128 +++++++ .../metrics/tests/unit/dimensions.test.ts | 312 ++++++++++++++++++ .../tests/unit/initializeMetrics.test.ts | 156 +++++++++ .../metrics/tests/unit/logMetrics.test.ts | 266 +++++++++++++++ packages/metrics/tests/unit/metadata.test.ts | 83 +++++ packages/metrics/vitest.config.ts | 8 + packages/testing/src/setupEnv.ts | 223 +++++++++++++ vitest.config.ts | 2 +- 17 files changed, 1566 insertions(+), 120 deletions(-) delete mode 100644 packages/metrics/jest.config.cjs create mode 100644 packages/metrics/tests/unit/coldStartMetric.test.ts create mode 100644 packages/metrics/tests/unit/creatingMetrics.test.ts create mode 100644 packages/metrics/tests/unit/customTimestamp.test.ts create mode 100644 packages/metrics/tests/unit/dimensions.test.ts create mode 100644 packages/metrics/tests/unit/initializeMetrics.test.ts create mode 100644 packages/metrics/tests/unit/logMetrics.test.ts create mode 100644 packages/metrics/tests/unit/metadata.test.ts create mode 100644 packages/metrics/vitest.config.ts diff --git a/packages/metrics/jest.config.cjs b/packages/metrics/jest.config.cjs deleted file mode 100644 index 073bb4ee90..0000000000 --- a/packages/metrics/jest.config.cjs +++ /dev/null @@ -1,31 +0,0 @@ -module.exports = { - displayName: { - name: 'Powertools for AWS Lambda (TypeScript) utility: METRICS', - color: 'green', - }, - runner: 'groups', - preset: 'ts-jest', - moduleNameMapper: { - '^(\\.{1,2}/.*)\\.js$': '$1', - }, - transform: { - '^.+\\.ts?$': 'ts-jest', - }, - moduleFileExtensions: ['js', 'ts'], - collectCoverageFrom: ['**/src/**/*.ts', '!**/node_modules/**'], - testMatch: ['**/?(*.)+(spec|test).ts'], - roots: ['/src', '/tests'], - testPathIgnorePatterns: ['/node_modules/'], - testEnvironment: 'node', - coveragePathIgnorePatterns: ['/node_modules/', '/types/'], - coverageThreshold: { - global: { - statements: 100, - branches: 100, - functions: 100, - lines: 100, - }, - }, - coverageReporters: ['json-summary', 'text', 'lcov'], - setupFiles: ['/tests/helpers/populateEnvironmentVariables.ts'], -}; diff --git a/packages/metrics/package.json b/packages/metrics/package.json index 006f37a8b0..3a3f36c863 100644 --- a/packages/metrics/package.json +++ b/packages/metrics/package.json @@ -10,12 +10,15 @@ "access": "public" }, "scripts": { - "test": "npm run test:unit", - "test:unit": "jest --group=unit --detectOpenHandles --coverage --verbose", - "test:e2e:nodejs18x": "RUNTIME=nodejs18x jest --group=e2e", - "test:e2e:nodejs20x": "RUNTIME=nodejs20x jest --group=e2e", - "test:e2e:nodejs22x": "RUNTIME=nodejs22x jest --group=e2e", - "test:e2e": "jest --group=e2e", + "test": "vitest --run tests/unit", + "test:unit": "vitest --run tests/unit", + "test:unit:coverage": "vitest --run tests/unit --coverage.enabled --coverage.thresholds.100 --coverage.include='src/**'", + "test:unit:types": "echo 'Not Implemented'", + "test:unit:watch": "vitest tests/unit", + "test:e2e:nodejs18x": "RUNTIME=nodejs18x vitest --run tests/e2e", + "test:e2e:nodejs20x": "RUNTIME=nodejs20x vitest --run tests/e2e", + "test:e2e:nodejs22x": "RUNTIME=nodejs22x vitest --run tests/e2e", + "test:e2e": "vitest --run tests/e2e", "watch": "jest --group=unit --watch ", "build:cjs": "tsc --build tsconfig.json && echo '{ \"type\": \"commonjs\" }' > lib/cjs/package.json", "build:esm": "tsc --build tsconfig.esm.json && echo '{ \"type\": \"module\" }' > lib/esm/package.json", @@ -96,4 +99,4 @@ "serverless", "nodejs" ] -} +} \ No newline at end of file diff --git a/packages/metrics/src/Metrics.ts b/packages/metrics/src/Metrics.ts index cd3b77f22d..755ae791e6 100644 --- a/packages/metrics/src/Metrics.ts +++ b/packages/metrics/src/Metrics.ts @@ -827,9 +827,9 @@ class Metrics extends Utility implements MetricsInterface { /** * @deprecated Use {@link Metrics.setThrowOnEmptyMetrics | `setThrowOnEmptyMetrics()`} instead. */ - public throwOnEmptyMetrics(): void { + /* v8 ignore start */ public throwOnEmptyMetrics(): void { this.shouldThrowOnEmptyMetrics = true; - } + } /* v8 ignore stop */ /** * Gets the current number of dimensions count. diff --git a/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts b/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts index 08b70d5154..58398e562f 100644 --- a/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts +++ b/packages/metrics/tests/e2e/basicFeatures.decorators.test.ts @@ -1,8 +1,3 @@ -/** - * Test metrics standard functions - * - * @group e2e/metrics/decorator - */ import { join } from 'node:path'; import { TestStack, @@ -12,6 +7,7 @@ import { CloudWatchClient, GetMetricStatisticsCommand, } from '@aws-sdk/client-cloudwatch'; +import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { getMetrics, sortDimensions } from '../helpers/metricsUtils.js'; import { MetricsTestNodejsFunction } from '../helpers/resources.js'; import { @@ -74,7 +70,7 @@ describe('Metrics E2E tests, basic features decorator usage', () => { describe('ColdStart metrics', () => { it( - 'should capture ColdStart Metric', + 'captures the ColdStart Metric', async () => { const { EXPECTED_NAMESPACE: expectedNamespace, @@ -106,15 +102,6 @@ describe('Metrics E2E tests, basic features decorator usage', () => { // Check coldstart metric value const adjustedStartTime = new Date(startTime.getTime() - ONE_MINUTE); const endTime = new Date(new Date().getTime() + ONE_MINUTE); - console.log( - `Manual command: aws cloudwatch get-metric-statistics --namespace ${expectedNamespace} --metric-name ColdStart --start-time ${Math.floor( - adjustedStartTime.getTime() / 1000 - )} --end-time ${Math.floor( - endTime.getTime() / 1000 - )} --statistics 'Sum' --period 60 --dimensions '${JSON.stringify( - expectedDimensions - )}'` - ); const coldStartMetricStat = await cloudwatchClient.send( new GetMetricStatisticsCommand({ Namespace: expectedNamespace, @@ -139,7 +126,7 @@ describe('Metrics E2E tests, basic features decorator usage', () => { describe('Default and extra dimensions', () => { it( - 'should produce a Metric with the default and extra one dimensions', + 'produces a Metric with the default and extra one dimensions', async () => { const { EXPECTED_NAMESPACE: expectedNamespace, @@ -179,15 +166,6 @@ describe('Metrics E2E tests, basic features decorator usage', () => { startTime.getTime() - 3 * ONE_MINUTE ); const endTime = new Date(new Date().getTime() + ONE_MINUTE); - console.log( - `Manual command: aws cloudwatch get-metric-statistics --namespace ${expectedNamespace} --metric-name ${expectedMetricName} --start-time ${Math.floor( - adjustedStartTime.getTime() / 1000 - )} --end-time ${Math.floor( - endTime.getTime() / 1000 - )} --statistics 'Sum' --period 60 --dimensions '${JSON.stringify( - expectedDimensions - )}'` - ); const metricStat = await cloudwatchClient.send( new GetMetricStatisticsCommand({ Namespace: expectedNamespace, diff --git a/packages/metrics/tests/e2e/basicFeatures.manual.test.ts b/packages/metrics/tests/e2e/basicFeatures.manual.test.ts index b37fc374b7..5272ee8578 100644 --- a/packages/metrics/tests/e2e/basicFeatures.manual.test.ts +++ b/packages/metrics/tests/e2e/basicFeatures.manual.test.ts @@ -1,8 +1,3 @@ -/** - * Test metrics standard functions - * - * @group e2e/metrics/standardFunctions - */ import { join } from 'node:path'; import { TestStack, @@ -12,6 +7,7 @@ import { CloudWatchClient, GetMetricStatisticsCommand, } from '@aws-sdk/client-cloudwatch'; +import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { getMetrics, sortDimensions } from '../helpers/metricsUtils.js'; import { MetricsTestNodejsFunction } from '../helpers/resources.js'; import { @@ -72,7 +68,7 @@ describe('Metrics E2E tests, manual usage', () => { describe('ColdStart metrics', () => { it( - 'should capture ColdStart Metric', + 'captures the ColdStart Metric', async () => { const { EXPECTED_NAMESPACE: expectedNamespace } = commonEnvironmentVars; @@ -92,15 +88,6 @@ describe('Metrics E2E tests, manual usage', () => { // Check coldstart metric value const adjustedStartTime = new Date(startTime.getTime() - 60 * 1000); const endTime = new Date(new Date().getTime() + 60 * 1000); - console.log( - `Manual command: aws cloudwatch get-metric-statistics --namespace ${expectedNamespace} --metric-name ColdStart --start-time ${Math.floor( - adjustedStartTime.getTime() / 1000 - )} --end-time ${Math.floor( - endTime.getTime() / 1000 - )} --statistics 'Sum' --period 60 --dimensions '${JSON.stringify([ - { Name: 'service', Value: expectedServiceName }, - ])}'` - ); const coldStartMetricStat = await cloudwatchClient.send( new GetMetricStatisticsCommand({ Namespace: expectedNamespace, @@ -125,7 +112,7 @@ describe('Metrics E2E tests, manual usage', () => { describe('Default and extra dimensions', () => { it( - 'should produce a Metric with the default and extra one dimensions', + 'produces a Metric with the default and extra one dimensions', async () => { const { EXPECTED_NAMESPACE: expectedNamespace, @@ -165,15 +152,6 @@ describe('Metrics E2E tests, manual usage', () => { startTime.getTime() - 3 * ONE_MINUTE ); const endTime = new Date(new Date().getTime() + ONE_MINUTE); - console.log( - `Manual command: aws cloudwatch get-metric-statistics --namespace ${expectedNamespace} --metric-name ${expectedMetricName} --start-time ${Math.floor( - adjustedStartTime.getTime() / 1000 - )} --end-time ${Math.floor( - endTime.getTime() / 1000 - )} --statistics 'Sum' --period 60 --dimensions '${JSON.stringify( - expectedDimensions - )}'` - ); const metricStat = await cloudwatchClient.send( new GetMetricStatisticsCommand({ Namespace: expectedNamespace, diff --git a/packages/metrics/tests/helpers/metricsUtils.ts b/packages/metrics/tests/helpers/metricsUtils.ts index d68afc2793..35bf3407d7 100644 --- a/packages/metrics/tests/helpers/metricsUtils.ts +++ b/packages/metrics/tests/helpers/metricsUtils.ts @@ -1,14 +1,10 @@ -import type { LambdaInterface } from '@aws-lambda-powertools/commons/types'; import { type CloudWatchClient, type Dimension, ListMetricsCommand, type ListMetricsCommandOutput, } from '@aws-sdk/client-cloudwatch'; -import type { Context, Handler } from 'aws-lambda'; import promiseRetry from 'promise-retry'; -import { MetricUnit, type Metrics } from '../../src/index.js'; -import type { ExtraOptions } from '../../src/types/index.js'; const getMetrics = async ( cloudWatchClient: CloudWatchClient, @@ -43,29 +39,7 @@ const getMetrics = async ( }, retryOptions); }; -const setupDecoratorLambdaHandler = ( - metrics: Metrics, - options: ExtraOptions = {} -): Handler => { - class LambdaFunction implements LambdaInterface { - @metrics.logMetrics(options) - public async handler( - _event: TEvent, - _context: Context - ): Promise { - metrics.addMetric('decorator-lambda-test-metric', MetricUnit.Count, 1); - - return 'Lambda invoked!'; - } - } - - const handlerClass = new LambdaFunction(); - const handler = handlerClass.handler.bind(handlerClass); - - return handler; -}; - const sortDimensions = (dimensions?: Dimension[]): Dimension[] | undefined => dimensions?.sort((a, b) => (a.Name || '').localeCompare(b?.Name || '')); -export { getMetrics, setupDecoratorLambdaHandler, sortDimensions }; +export { getMetrics, sortDimensions }; diff --git a/packages/metrics/tests/tsconfig.json b/packages/metrics/tests/tsconfig.json index 45ba862a85..76e267d72e 100644 --- a/packages/metrics/tests/tsconfig.json +++ b/packages/metrics/tests/tsconfig.json @@ -1,8 +1,12 @@ { "extends": "../tsconfig.json", "compilerOptions": { - "rootDir": "../", + "rootDir": "../../", "noEmit": true }, - "include": ["../src/**/*", "./**/*"] + "include": [ + "../../testing/src/setupEnv.ts", + "../src/**/*", + "./**/*" + ] } diff --git a/packages/metrics/tests/unit/coldStartMetric.test.ts b/packages/metrics/tests/unit/coldStartMetric.test.ts new file mode 100644 index 0000000000..b94f999c03 --- /dev/null +++ b/packages/metrics/tests/unit/coldStartMetric.test.ts @@ -0,0 +1,98 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { COLD_START_METRIC, DEFAULT_NAMESPACE } from '../../src/constants.js'; +import { MetricUnit, Metrics } from '../../src/index.js'; + +describe('ColdStart metric', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + }); + + it('emits a cold start metric', () => { + // Prepare + const metrics = new Metrics({ + namespace: DEFAULT_NAMESPACE, + }); + + // Act + metrics.captureColdStartMetric(); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + service: 'hello-world', + [COLD_START_METRIC]: 1, + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service']], + Metrics: [{ Name: COLD_START_METRIC, Unit: MetricUnit.Count }], + Namespace: DEFAULT_NAMESPACE, + }) + ); + }); + + it('includes default dimensions in the cold start metric', () => { + // Prepare + const defaultDimensions = { MyDimension: 'MyValue' }; + const metrics = new Metrics({ + namespace: DEFAULT_NAMESPACE, + defaultDimensions, + }); + + // Act + metrics.captureColdStartMetric(); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + service: 'hello-world', + [COLD_START_METRIC]: 1, + MyDimension: 'MyValue', + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'MyDimension']], + }) + ); + }); + + it('includes the function name in the cold start metric', () => { + // Prepare + const functionName = 'my-function'; + const metrics = new Metrics({ + namespace: DEFAULT_NAMESPACE, + }); + metrics.setFunctionName(functionName); + + // Act + metrics.captureColdStartMetric(); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + service: 'hello-world', + [COLD_START_METRIC]: 1, + function_name: 'my-function', + }) + ); + }); + + it('emits the metric only once', () => { + // Prepare + const metrics = new Metrics({ + namespace: DEFAULT_NAMESPACE, + }); + + // Act + metrics.captureColdStartMetric(); + metrics.captureColdStartMetric(); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + }); +}); diff --git a/packages/metrics/tests/unit/creatingMetrics.test.ts b/packages/metrics/tests/unit/creatingMetrics.test.ts new file mode 100644 index 0000000000..d241fa0a98 --- /dev/null +++ b/packages/metrics/tests/unit/creatingMetrics.test.ts @@ -0,0 +1,266 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { + DEFAULT_NAMESPACE, + MAX_METRICS_SIZE, + MetricResolution, +} from '../../src/constants.js'; +import { MetricUnit, Metrics } from '../../src/index.js'; + +describe('Creating metrics', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + }); + + it('creates a compliant CloudWatch EMF metric', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + serviceName: 'hello-world', + defaultDimensions: { environment: 'test' }, + namespace: DEFAULT_NAMESPACE, + }); + const timestamp = Date.now() + 1000; + + // Act + metrics.setTimestamp(timestamp); + metrics.addMetadata('cost-center', '1234'); + metrics.addDimension('commit', '1234'); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith({ + _aws: { + Timestamp: timestamp, + CloudWatchMetrics: [ + { + Namespace: DEFAULT_NAMESPACE, + Dimensions: [['service', 'environment', 'commit']], + Metrics: [{ Name: 'test', Unit: 'Count' }], + }, + ], + }, + service: 'hello-world', + environment: 'test', + 'cost-center': '1234', + commit: '1234', + test: 1, + }); + }); + + it('stores metrics until flushed', () => { + // Prepare + const metrics = new Metrics({ singleMetric: false }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + metrics.addMetric('test', MetricUnit.Count, 2); + metrics.addMetric('another', MetricUnit.Bytes, 3); + metrics.publishStoredMetrics(); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + test: [1, 2], + another: 3, + service: 'hello-world', + }) + ); + expect(console.log).toHaveEmittedNthMetricWith( + 1, + expect.objectContaining({ + Metrics: [ + { + Name: 'test', + Unit: 'Count', + }, + { + Name: 'another', + Unit: 'Bytes', + }, + ], + }) + ); + }); + + it('clears stored metrics when calling clearMetrics', () => { + // Prepare + const metrics = new Metrics({ singleMetric: false }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + metrics.addMetric('test', MetricUnit.Count, 2); + metrics.clearMetrics(); + metrics.addMetric('another', MetricUnit.Count, 3); + metrics.publishStoredMetrics(); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + another: 3, + service: 'hello-world', + }) + ); + expect(console.log).not.toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + test: [1, 2], + }) + ); + }); + + it('clears stored metrics after publishing', () => { + // Prepare + const metrics = new Metrics({ singleMetric: false }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + metrics.addMetric('test', MetricUnit.Count, 2); + metrics.publishStoredMetrics(); + metrics.addMetric('another', MetricUnit.Count, 3); + metrics.publishStoredMetrics(); + + // Assess + expect(console.log).toHaveBeenCalledTimes(2); + expect(console.log).not.toHaveEmittedNthEMFWith( + 2, + expect.objectContaining({ + test: [1, 2], + service: 'hello-world', + }) + ); + }); + + it('throws when adding the same metric name with different units', () => { + // Prepare + const metrics = new Metrics({ singleMetric: false }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + expect(() => metrics.addMetric('test', MetricUnit.Seconds, 2)).toThrowError( + 'Metric "test" has already been added with unit "Count", but we received unit "Seconds". Did you mean to use metric unit "Count"?' + ); + }); + + it('includes the storage resolution when provided', () => { + // Prepare + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1, MetricResolution.High); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthMetricWith( + 1, + expect.objectContaining({ + Metrics: [ + { + Name: 'test', + Unit: 'Count', + StorageResolution: 1, + }, + ], + }) + ); + }); + + it('logs a warning when flushing metrics on an empty buffer', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + + // Act + metrics.publishStoredMetrics(); + + // Assess + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenNthCalledWith( + 1, + 'No application metrics to publish. The cold-start metric may be published if enabled. If application metrics should never be empty, consider using `throwOnEmptyMetrics`' + ); + }); + + it('throws when flushing metrics on an empty buffer with throwOnEmptyMetrics enabled', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + + // Act & Assess + metrics.setThrowOnEmptyMetrics(true); + expect(() => metrics.publishStoredMetrics()).toThrowError( + 'The number of metrics recorded must be higher than zero' + ); + }); + + it('flushes the buffer automatically when the buffer is full', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + }); + + // Act + for (let i = 0; i < MAX_METRICS_SIZE; i++) { + metrics.addMetric(`test-${i}`, MetricUnit.Count, i); + } + metrics.addMetric('another', MetricUnit.Count, 1); + metrics.publishStoredMetrics(); + + // Assess + expect(console.log).toHaveBeenCalledTimes(2); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + service: 'hello-world', + 'test-0': 0, + 'test-99': 99, + }) + ); + expect(console.log).toHaveEmittedNthEMFWith( + 2, + expect.objectContaining({ + another: 1, + service: 'hello-world', + }) + ); + expect(console.log).toHaveEmittedNthEMFWith( + 2, + expect.not.objectContaining({ + 'test-1': 1, + }) + ); + }); + + it('flushes the buffer automatically when the max values per metric is reached', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + }); + + // Act + for (let i = 0; i < MAX_METRICS_SIZE; i++) { + metrics.addMetric('test', MetricUnit.Count, i); + } + metrics.publishStoredMetrics(); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + service: 'hello-world', + test: Array.from({ length: MAX_METRICS_SIZE }, (_, i) => i), + }) + ); + }); +}); diff --git a/packages/metrics/tests/unit/customTimestamp.test.ts b/packages/metrics/tests/unit/customTimestamp.test.ts new file mode 100644 index 0000000000..84e432e3a1 --- /dev/null +++ b/packages/metrics/tests/unit/customTimestamp.test.ts @@ -0,0 +1,128 @@ +import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; +import { + DEFAULT_NAMESPACE, + EMF_MAX_TIMESTAMP_FUTURE_AGE, + EMF_MAX_TIMESTAMP_PAST_AGE, +} from '../../src/constants.js'; +import { MetricUnit, Metrics } from '../../src/index.js'; + +describe('Setting custom timestamp', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + vi.useFakeTimers().setSystemTime(new Date()); + }); + + afterAll(() => { + vi.useRealTimers(); + }); + + it.each([ + { value: Date.now() - 2000, name: 'epoch' }, + { value: new Date(Date.now()), name: 'Date object' }, + ])('uses the provided timestamp when one is set ($name)', ({ value }) => { + // Prepare + const metrics = new Metrics({ singleMetric: true }); + metrics.setTimestamp(value); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + _aws: expect.objectContaining({ + Timestamp: value instanceof Date ? value.getTime() : value, + }), + }) + ); + }); + + it('logs a warning when the provided timestamp is too far in the past', () => { + // Prepare + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.setTimestamp(Date.now() - EMF_MAX_TIMESTAMP_PAST_AGE - 1000); + + // Assess + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + + 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' + ); + }); + + it('logs a warning when the provided timestamp is too far in the future', () => { + // Prepare + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.setTimestamp(Date.now() + EMF_MAX_TIMESTAMP_FUTURE_AGE + 1000); + + // Assess + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + + 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' + ); + }); + + it('logs a warning when the provided timestamp is not a number or Date object', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + namespace: DEFAULT_NAMESPACE, + }); + + // Act + metrics.setTimestamp(Number.NaN); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + + 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' + ); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + _aws: expect.objectContaining({ + Timestamp: 0, + }), + }) + ); + }); + + it('logs a warning when the provided timestamp is not an integer', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + namespace: DEFAULT_NAMESPACE, + }); + + // Act + metrics.setTimestamp(Date.now() + 0.5); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + + 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' + ); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + _aws: expect.objectContaining({ + Timestamp: 0, + }), + }) + ); + }); +}); diff --git a/packages/metrics/tests/unit/dimensions.test.ts b/packages/metrics/tests/unit/dimensions.test.ts new file mode 100644 index 0000000000..b4d99d87b6 --- /dev/null +++ b/packages/metrics/tests/unit/dimensions.test.ts @@ -0,0 +1,312 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { DEFAULT_NAMESPACE, MAX_DIMENSION_COUNT } from '../../src/constants.js'; +import { MetricUnit, Metrics } from '../../src/index.js'; + +describe('Working with dimensions', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + }); + + it('adds default dimensions to the metric via constructor', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'test', + }, + }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'hello-world', environment: 'test' }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'environment']], + }) + ); + }); + + it('adds default dimensions to the metric via method', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'prod', + }, + }); + + // Act + metrics.setDefaultDimensions({ environment: 'test', commit: '1234' }); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + service: 'hello-world', + environment: 'test', + commit: '1234', + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'environment', 'commit']], + }) + ); + }); + + it('adds one dimension to the metric', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + metrics.addDimension('environment', 'test'); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'hello-world', environment: 'test' }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'environment']], + }) + ); + }); + + it('adds multiple dimensions to the metric', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + metrics.addDimensions({ environment: 'test', commit: '1234' }); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + service: 'hello-world', + environment: 'test', + commit: '1234', + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'environment', 'commit']], + }) + ); + }); + + it('overrides an existing dimension with the same name', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'test', + }, + }); + + // Act + metrics.addDimension('environment', 'prod'); + metrics.addDimension('commit', '1234'); + metrics.addDimension('commit', '5678'); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + service: 'hello-world', + environment: 'prod', + commit: '5678', + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'environment', 'commit']], + }) + ); + }); + + it('clears default dimensions', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'test', + }, + }); + + // Act + metrics.addDimension('commit', '1234'); + metrics.clearDefaultDimensions(); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.not.objectContaining({ + environment: 'test', + service: 'hello-world', + }) + ); + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + commit: '1234', + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['commit']], + }) + ); + }); + + it('clears all non-default dimensions', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'test', + }, + }); + + // Act + metrics.addDimension('commit', '1234'); + metrics.clearDimensions(); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.not.objectContaining({ + commit: '1234', + }) + ); + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ + environment: 'test', + service: 'hello-world', + }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ + Dimensions: [['service', 'environment']], + }) + ); + }); + + it('clears standard dimensions after publishing the metric', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'test', + }, + }); + + // Act + metrics.addDimension('commit', '1234'); + metrics.addMetric('test', MetricUnit.Count, 1); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ commit: '1234', environment: 'test' }) + ); + expect(console.log).toHaveEmittedNthMetricWith( + 1, + expect.objectContaining({ + Dimensions: [['service', 'environment', 'commit']], + }) + ); + expect(console.log).toHaveEmittedNthEMFWith( + 2, + expect.not.objectContaining({ commit: '1234' }) + ); + expect(console.log).toHaveEmittedNthEMFWith( + 2, + expect.objectContaining({ environment: 'test' }) + ); + expect(console.log).toHaveEmittedNthMetricWith( + 2, + expect.objectContaining({ + Dimensions: [['service', 'environment']], + }) + ); + }); + + it('throws when the number of dimensions exceeds the limit', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + defaultDimensions: { + environment: 'test', + }, + }); + + // Act & Assess + let i = 1; + // We start with 2 dimensions because the default dimension & service name are already added + for (i = 2; i < MAX_DIMENSION_COUNT; i++) { + metrics.addDimension(`dimension-${i}`, 'test'); + } + expect(() => metrics.addDimension('extra', 'test')).toThrowError( + `The number of metric dimensions must be lower than ${MAX_DIMENSION_COUNT}` + ); + }); + + it('throws when the number of dimensions exceeds the limit after adding default dimensions', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + // We start with 1 dimension because service name is already added + for (let i = 1; i < MAX_DIMENSION_COUNT - 1; i++) { + metrics.setDefaultDimensions({ [`dimension-${i}`]: 'test' }); + } + expect(() => metrics.setDefaultDimensions({ extra: 'test' })).toThrowError( + 'Max dimension count hit' + ); + }); + + it.each([ + { value: undefined, name: 'undefined' }, + { value: null, name: 'null' }, + { + value: '', + name: 'empty string', + }, + ])('skips invalid dimension values ($name)', ({ value }) => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + namespace: DEFAULT_NAMESPACE, + }); + + // Act & Assess + metrics.addDimension('test', value as string); + metrics.addMetric('test', MetricUnit.Count, 1); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + `The dimension test doesn't meet the requirements and won't be added. Ensure the dimension name and value are non empty strings` + ); + expect(console.log).toHaveEmittedEMFWith( + expect.not.objectContaining({ test: value }) + ); + expect(console.log).toHaveEmittedMetricWith( + expect.not.objectContaining({ Dimensions: [['test']] }) + ); + }); +}); diff --git a/packages/metrics/tests/unit/initializeMetrics.test.ts b/packages/metrics/tests/unit/initializeMetrics.test.ts new file mode 100644 index 0000000000..04742f9f15 --- /dev/null +++ b/packages/metrics/tests/unit/initializeMetrics.test.ts @@ -0,0 +1,156 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { DEFAULT_NAMESPACE } from '../../src/constants.js'; +import { MetricUnit, Metrics } from '../../src/index.js'; +import type { ConfigServiceInterface } from '../../src/types/index.js'; + +describe('Initialize Metrics', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + }); + + it('uses the default service name when none is provided', () => { + // Prepare + process.env.POWERTOOLS_SERVICE_NAME = undefined; + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'service_undefined' }) + ); + }); + + it('uses the service name provided in the constructor', () => { + // Prepare + process.env.POWERTOOLS_SERVICE_NAME = undefined; + const metrics = new Metrics({ + singleMetric: true, + serviceName: 'hello-world-from-constructor', + }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'hello-world-from-constructor' }) + ); + }); + + it('uses the service name provided in the environment variables', () => { + // Prepare + process.env.POWERTOOLS_SERVICE_NAME = 'hello-world-from-env'; + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'hello-world-from-env' }) + ); + }); + + it('uses the default namespace when none is provided', () => { + // Prepare + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ Namespace: DEFAULT_NAMESPACE }) + ); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + 'Namespace should be defined, default used' + ); + }); + + it('uses the namespace provided in the constructor', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + namespace: 'hello-world-namespace', + }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ Namespace: 'hello-world-namespace' }) + ); + }); + + it('uses the namespace provided in the environment variables', () => { + // Prepare + process.env.POWERTOOLS_METRICS_NAMESPACE = 'hello-world-namespace-from-env'; + const metrics = new Metrics({ singleMetric: true }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedMetricWith( + expect.objectContaining({ Namespace: 'hello-world-namespace-from-env' }) + ); + }); + + it('uses the custom config service provided', () => { + // Prepare + const configService = { + getNamespace(): string { + return 'custom-namespace'; + }, + getServiceName(): string { + return 'custom-service'; + }, + isDevMode(): boolean { + return false; + }, + isValueTrue(value: string): boolean { + return value === 'true'; + }, + }; + const metrics = new Metrics({ + singleMetric: true, + customConfigService: configService as unknown as ConfigServiceInterface, + }); + + // Act + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ service: 'custom-service' }) + ); + expect(console.log).toHaveEmittedNthMetricWith( + 1, + expect.objectContaining({ Namespace: 'custom-namespace' }) + ); + }); + + it("doesn't use the global console object by default", () => { + // Prepare + process.env.POWERTOOLS_DEV = undefined; + const metrics = new Metrics(); + + // Assess + // biome-ignore lint/complexity/useLiteralKeys: we need to access the internal console object + expect(metrics['console']).not.toEqual(console); + }); +}); diff --git a/packages/metrics/tests/unit/logMetrics.test.ts b/packages/metrics/tests/unit/logMetrics.test.ts new file mode 100644 index 0000000000..149bdc47de --- /dev/null +++ b/packages/metrics/tests/unit/logMetrics.test.ts @@ -0,0 +1,266 @@ +import { cleanupMiddlewares } from '@aws-lambda-powertools/commons'; +import middy from '@middy/core'; +import type { Context } from 'aws-lambda'; +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { COLD_START_METRIC, DEFAULT_NAMESPACE } from '../../src/constants.js'; +import { MetricUnit, Metrics } from '../../src/index.js'; +import { logMetrics } from '../../src/middleware/middy.js'; + +describe('LogMetrics decorator & Middy.js middleware', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + }); + + it('captures the cold start metric on the first invocation', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + vi.spyOn(metrics, 'publishStoredMetrics'); + class Test { + readonly #metricName: string; + + public constructor(name: string) { + this.#metricName = name; + } + + @metrics.logMetrics({ captureColdStartMetric: true }) + async handler(_event: unknown, _context: Context) { + this.addGreetingMetric(); + } + + addGreetingMetric() { + metrics.addMetric(this.#metricName, MetricUnit.Count, 1); + } + } + const lambda = new Test('greetings'); + const handler = lambda.handler.bind(lambda); + + // Act + await handler({}, {} as Context); + await handler({}, {} as Context); + + // Assess + expect(metrics.publishStoredMetrics).toHaveBeenCalledTimes(2); + expect(console.log).toHaveBeenCalledTimes(3); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + [COLD_START_METRIC]: 1, + service: 'hello-world', + }) + ); + expect(console.log).toHaveEmittedNthMetricWith( + 2, + expect.objectContaining({ + Metrics: [ + { + Name: 'greetings', + Unit: 'Count', + }, + ], + }) + ); + expect(console.log).toHaveEmittedNthMetricWith( + 3, + expect.objectContaining({ + Metrics: [ + { + Name: 'greetings', + Unit: 'Count', + }, + ], + }) + ); + }); + + it('captures the cold start metric on the first invocation when using the Middy.js middleware', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + vi.spyOn(metrics, 'publishStoredMetrics'); + const handler = middy(async () => { + metrics.addMetric('greetings', MetricUnit.Count, 1); + }).use(logMetrics(metrics, { captureColdStartMetric: true })); + + // Act + await handler({}, {} as Context); + await handler({}, {} as Context); + + // Assess + expect(metrics.publishStoredMetrics).toHaveBeenCalledTimes(2); + expect(console.log).toHaveBeenCalledTimes(3); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + [COLD_START_METRIC]: 1, + service: 'hello-world', + }) + ); + }); + + it('includes default dimensions passed in the decorator', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + class Test { + @metrics.logMetrics({ defaultDimensions: { environment: 'test' } }) + async handler(_event: unknown, _context: Context) { + metrics.addMetric('test', MetricUnit.Count, 1); + } + } + const lambda = new Test(); + const handler = lambda.handler.bind(lambda); + + // Act + await handler({}, {} as Context); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + test: 1, + service: 'hello-world', + environment: 'test', + }) + ); + }); + + it('includes default dimensions passed in the decorator when using the Middy.js middleware', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + vi.spyOn(metrics, 'publishStoredMetrics'); + const handler = middy(async () => { + metrics.addMetric('greetings', MetricUnit.Count, 1); + }).use( + logMetrics(metrics, { + defaultDimensions: { + environment: 'test', + }, + }) + ); + + // Act + await handler({}, {} as Context); + + // Assess + expect(console.log).toHaveBeenCalledTimes(1); + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ + greetings: 1, + service: 'hello-world', + environment: 'test', + }) + ); + }); + + it("doesn't swallow errors when the decorated function throws", async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + class Test { + @metrics.logMetrics() + async handler(_event: unknown, _context: Context) { + throw new Error('Something went wrong'); + } + } + const lambda = new Test(); + const handler = lambda.handler.bind(lambda); + + // Act & Assess + await expect(handler({}, {} as Context)).rejects.toThrowError( + 'Something went wrong' + ); + }); + + it('throws when no metrics are added and throwOnEmptyMetrics is true', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + class Test { + @metrics.logMetrics({ throwOnEmptyMetrics: true }) + async handler(_event: unknown, _context: Context) { + return 'Hello, world!'; + } + } + const lambda = new Test(); + const handler = lambda.handler.bind(lambda); + + // Act & Assess + await expect(handler({}, {} as Context)).rejects.toThrowError( + 'The number of metrics recorded must be higher than zero' + ); + }); + + it('throws when no metrics are added and throwOnEmptyMetrics is true when using the Middy.js middleware', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + const handler = middy(async () => {}).use( + logMetrics([metrics], { throwOnEmptyMetrics: true }) + ); + + // Act & Assess + expect(() => handler({}, {} as Context)).rejects.toThrowError( + 'The number of metrics recorded must be higher than zero' + ); + }); + + it('flushes the metrics even when a previous middleware returns early', async () => { + // Prepare + const metrics = new Metrics({ + singleMetric: false, + namespace: DEFAULT_NAMESPACE, + }); + vi.spyOn(metrics, 'publishStoredMetrics'); + const myCustomMiddleware = (): middy.MiddlewareObj => { + const before = async ( + request: middy.Request + ): Promise => { + // Return early on the second invocation + if (request.event.idx === 1) { + // Cleanup Powertools resources + await cleanupMiddlewares(request); + + // Then return early + return 'foo'; + } + }; + + return { + before, + }; + }; + const handler = middy(() => { + metrics.addMetric('successfulBooking', MetricUnit.Count, 1); + }) + .use(logMetrics(metrics)) + .use(myCustomMiddleware()); + + // Act + await handler({ idx: 0 }, {} as Context); + await handler({ idx: 1 }, {} as Context); + + // Assess + expect(metrics.publishStoredMetrics).toHaveBeenCalledTimes(2); + }); +}); diff --git a/packages/metrics/tests/unit/metadata.test.ts b/packages/metrics/tests/unit/metadata.test.ts new file mode 100644 index 0000000000..db826aebb8 --- /dev/null +++ b/packages/metrics/tests/unit/metadata.test.ts @@ -0,0 +1,83 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { MetricUnit, Metrics } from '../../src/index.js'; + +describe('Working with metadata', () => { + const ENVIRONMENT_VARIABLES = process.env; + + beforeEach(() => { + process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + vi.resetAllMocks(); + }); + + it('adds metadata to the metric', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + metrics.addMetadata('cost-center', '1234'); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'hello-world', 'cost-center': '1234' }) + ); + }); + + it('overwrites metadata with the same key', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + metrics.addMetadata('cost-center', '1234'); + metrics.addMetadata('cost-center', '5678'); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.objectContaining({ service: 'hello-world', 'cost-center': '5678' }) + ); + }); + + it('clears the metadata', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + metrics.addMetadata('cost-center', '1234'); + metrics.clearMetadata(); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedEMFWith( + expect.not.objectContaining({ 'cost-center': '1234' }) + ); + }); + + it('clears the metadata after adding a metric', () => { + // Prepare + const metrics = new Metrics({ + singleMetric: true, + }); + + // Act + metrics.addMetadata('cost-center', '1234'); + metrics.addMetric('test', MetricUnit.Count, 1); + metrics.addMetric('test', MetricUnit.Count, 1); + + // Assess + expect(console.log).toHaveEmittedNthEMFWith( + 1, + expect.objectContaining({ 'cost-center': '1234' }) + ); + expect(console.log).toHaveEmittedNthEMFWith( + 2, + expect.not.objectContaining({ 'cost-center': '1234' }) + ); + }); +}); diff --git a/packages/metrics/vitest.config.ts b/packages/metrics/vitest.config.ts new file mode 100644 index 0000000000..9f1196ef1f --- /dev/null +++ b/packages/metrics/vitest.config.ts @@ -0,0 +1,8 @@ +import { defineProject } from 'vitest/config'; + +export default defineProject({ + test: { + environment: 'node', + setupFiles: ['../testing/src/setupEnv.ts'], + }, +}); diff --git a/packages/testing/src/setupEnv.ts b/packages/testing/src/setupEnv.ts index abc8a63691..2d20623802 100644 --- a/packages/testing/src/setupEnv.ts +++ b/packages/testing/src/setupEnv.ts @@ -37,6 +37,143 @@ expect.extend({ expected, }; }, + toHaveEmittedEMFWith(received, expected) { + const calls = received.mock.calls; + const messages = new Array(calls.length); + if (calls.length === 0) { + return { + message: () => + 'Expected function to have emitted EMF with provided object', + pass: false, + actual: 'No EMF emitted', + expected, + }; + } + for (const [idx, call] of calls.entries()) { + const [rawMessage] = call; + try { + messages[idx] = JSON.parse(rawMessage); + } catch (error) { + messages[idx] = rawMessage; + } + if (this.equals(messages[idx], expected)) { + return { + message: () => '', + pass: true, + }; + } + } + + return { + message: () => + 'Expected function to have emitted EMF with provided object', + pass: false, + actual: messages, + expected, + }; + }, + toHaveEmittedNthEMFWith(received, nth, expected) { + const call = received.mock.calls[nth - 1]; + if (!call) { + return { + message: () => + `Expected function to have emitted EMF with provided object during ${nth} call`, + pass: false, + actual: 'No EMF found at index', + expected, + }; + } + const [rawMessage] = call; + + const message = JSON.parse(rawMessage); + if (this.equals(message, expected)) { + return { + message: () => '', + pass: true, + }; + } + + return { + message: () => + 'Expected function to have emitted EMF with provided object', + pass: false, + actual: message, + expected, + }; + }, + toHaveEmittedMetricWith(received, expected) { + const calls = received.mock.calls; + const emfs = []; + if (calls.length === 0) { + return { + message: () => + 'Expected function to have emitted metric with provided object', + pass: false, + actual: 'No metric emitted', + expected, + }; + } + for (const [idx, call] of calls.entries()) { + const [rawMessage] = call; + try { + emfs[idx] = JSON.parse(rawMessage); + } catch (error) { + emfs[idx] = rawMessage; + } + const metrics = emfs[idx]._aws.CloudWatchMetrics; + if (metrics) { + for (const metric of metrics) { + if (this.equals(metric, expected)) { + return { + message: () => '', + pass: true, + }; + } + } + } + } + + return { + message: () => + 'Expected function to have emitted metric with provided object', + pass: false, + actual: emfs, + expected, + }; + }, + toHaveEmittedNthMetricWith(received, nth, expected) { + const call = received.mock.calls[nth - 1]; + if (!call) { + return { + message: () => + `Expected function to have emitted metric with provided object during ${nth} call`, + pass: false, + actual: 'No metric found at index', + expected, + }; + } + const [rawMessage] = call; + const message = JSON.parse(rawMessage); + const metrics = message._aws.CloudWatchMetrics; + if (metrics) { + for (const metric of metrics) { + if (this.equals(metric, expected)) { + return { + message: () => '', + pass: true, + }; + } + } + } + + return { + message: () => + 'Expected function to have emitted metric with provided object', + pass: false, + actual: message, + expected, + }; + }, toHaveLoggedNth(received, nth, expected) { const call = received.mock.calls[nth - 1]; if (!call) { @@ -107,6 +244,92 @@ declare module 'vitest' { * @param expected - The expected log message */ toHaveLoggedNth(nth: number, expected: Record): void; + /** + * Asserts that the function has emitted the expected EMF blob + * + * @example + * ```ts + * vi.spyOn(console, 'log').mockReturnValue(); + * + * expect(console.log).toHaveEmittedEMFWith( + * expect.objectContaining({ + * service: 'Hello, world!', + * }) + * ); + * ``` + * + * @param expected - The expected EMF message + */ + toHaveEmittedEMFWith(expected: Record): void; + /** + * Asserts that the function has emitted the expected EMF blob + * during the specific nth call. + * + * @example + * ```ts + * vi.spyOn(console, 'log').mockReturnValue(); + * + * expect(console.log).toHaveEmittedNthEMFWith( + * 1, + * expect.objectContaining({ + * service: 'Hello, world!', + * }) + * ); + * ``` + * + * @param nth - The index of the call to check + * @param expected - The expected EMF message + */ + toHaveEmittedNthEMFWith( + nth: number, + expected: Record + ): void; + /** + * Asserts that the function has emitted the expected metric + * + * A metric is the object within the `_aws.CloudWatchMetrics` key + * of the emitted EMF blob. + * + * @example + * ```ts + * vi.spyOn(console, 'log').mockReturnValue(); + * + * expect(console.log).toHaveEmittedMetricWith( + * expect.objectContaining({ + * service: 'Hello, world!', + * }) + * ); + * ``` + * + * @param expected - The expected metric + */ + toHaveEmittedMetricWith(expected: Record): void; + /** + * Asserts that the function has emitted the expected metric + * during the specific nth call. + * + * A metric is the object within the `_aws.CloudWatchMetrics` key + * of the emitted EMF blob. + * + * @example + * ```ts + * vi.spyOn(console, 'log').mockReturnValue(); + * + * expect(console.log).toHaveEmittedNthMetricWith( + * 1, + * expect.objectContaining({ + * service: 'Hello, world!', + * }) + * ); + * ``` + * + * @param nth - The index of the call to check + * @param expected - The expected metric + */ + toHaveEmittedNthMetricWith( + nth: number, + expected: Record + ): void; } interface AsymmetricMatchersContaining extends CustomMatcher {} } diff --git a/vitest.config.ts b/vitest.config.ts index 06d877a213..e8056e9a46 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -19,7 +19,7 @@ export default defineConfig({ 'packages/idempotency/src/types/**', 'packages/jmespath/src/types.ts', 'packages/logger/src/types/**', - 'packages/metrics/**', + 'packages/metrics/src/types/**', 'packages/parameters/src/types/**', 'packages/parser/src/types/**', 'layers/**', From 0f3bf9c1cd6d87722597728ad8d5f0be177d5dc5 Mon Sep 17 00:00:00 2001 From: Andrea Amorosi Date: Mon, 25 Nov 2024 12:12:44 +0100 Subject: [PATCH 2/6] chore: rebase --- packages/metrics/tests/unit/Metrics.test.ts | 2673 ----------------- .../tests/unit/coldStartMetric.test.ts | 6 +- .../tests/unit/creatingMetrics.test.ts | 6 +- .../tests/unit/customTimestamp.test.ts | 6 +- .../metrics/tests/unit/dimensions.test.ts | 6 +- .../tests/unit/initializeMetrics.test.ts | 62 +- .../metrics/tests/unit/logMetrics.test.ts | 6 +- packages/metrics/tests/unit/metadata.test.ts | 6 +- .../tests/unit/middleware/middy.test.ts | 452 --- 9 files changed, 91 insertions(+), 3132 deletions(-) delete mode 100644 packages/metrics/tests/unit/Metrics.test.ts delete mode 100644 packages/metrics/tests/unit/middleware/middy.test.ts diff --git a/packages/metrics/tests/unit/Metrics.test.ts b/packages/metrics/tests/unit/Metrics.test.ts deleted file mode 100644 index 298a40561e..0000000000 --- a/packages/metrics/tests/unit/Metrics.test.ts +++ /dev/null @@ -1,2673 +0,0 @@ -/** - * Test Metrics class - * - * @group unit/metrics/class - */ -import type { - GenericLogger, - LambdaInterface, -} from '@aws-lambda-powertools/commons/types'; -import context from '@aws-lambda-powertools/testing-utils/context'; -import type { Context, Handler } from 'aws-lambda'; -import { EnvironmentVariablesService } from '../../src/config/EnvironmentVariablesService.js'; -import { - COLD_START_METRIC, - DEFAULT_NAMESPACE, - EMF_MAX_TIMESTAMP_FUTURE_AGE, - EMF_MAX_TIMESTAMP_PAST_AGE, - MAX_DIMENSION_COUNT, - MAX_METRICS_SIZE, - MAX_METRIC_VALUES_SIZE, -} from '../../src/constants.js'; -import { MetricResolution, MetricUnit, Metrics } from '../../src/index.js'; -import type { - ConfigServiceInterface, - Dimensions, - EmfOutput, - MetricsOptions, -} from '../../src/types/index.js'; -import { setupDecoratorLambdaHandler } from '../helpers/metricsUtils.js'; - -jest.mock('node:console', () => ({ - ...jest.requireActual('node:console'), - Console: jest.fn().mockImplementation(() => ({ - log: jest.fn(), - warn: jest.fn(), - debug: jest.fn(), - })), -})); -jest.spyOn(console, 'warn').mockImplementation(() => ({})); -const OriginalDate = Date; -const mockDate = new Date(1466424490000); -/** - * If the constructor is called without arguments, it returns a predefined mock date. - * Otherwise, it delegates to the original Date constructor with the provided arguments. - */ -const dateSpy = jest - .spyOn(global, 'Date') - .mockImplementation((...args: ConstructorParameters) => { - if ((args as unknown[]).length === 0) { - return mockDate; - } - return new OriginalDate(...args); - }); -jest.spyOn(console, 'log').mockImplementation(); -jest.spyOn(console, 'warn').mockImplementation(); - -interface LooseObject { - [key: string]: string; -} - -describe('Class: Metrics', () => { - const ENVIRONMENT_VARIABLES = process.env; - const TEST_NAMESPACE = 'test'; - const event = { - foo: 'bar', - bar: 'baz', - }; - - beforeEach(() => { - jest.clearAllMocks(); - jest.resetModules(); - dateSpy.mockClear(); - process.env = { ...ENVIRONMENT_VARIABLES }; - }); - - describe('Method: constructor', () => { - test('when no constructor parameters are set, creates instance with the options set in the environment variables', () => { - // Prepare - const metricsOptions = undefined; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: undefined, - defaultDimensions: { - service: 'service_undefined', - }, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: false, - metadata: {}, - namespace: 'hello-world', - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when no constructor parameters and no environment variables are set, creates instance with the default properties', () => { - // Prepare - const metricsOptions = undefined; - process.env = {}; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: undefined, - defaultDimensions: { - service: 'service_undefined', - }, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: false, - metadata: {}, - namespace: '', - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when constructor parameters are set, creates instance with the options set in the constructor parameters', () => { - // Prepare - const metricsOptions: MetricsOptions = { - customConfigService: new EnvironmentVariablesService(), - namespace: TEST_NAMESPACE, - serviceName: 'test-service', - singleMetric: true, - defaultDimensions: { - service: 'order', - }, - }; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: expect.any(EnvironmentVariablesService), - defaultDimensions: metricsOptions.defaultDimensions, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: true, - metadata: {}, - namespace: metricsOptions.namespace, - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when custom namespace is passed, creates instance with the correct properties', () => { - // Prepare - const metricsOptions: MetricsOptions = { - namespace: TEST_NAMESPACE, - }; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: undefined, - defaultDimensions: { - service: 'service_undefined', - }, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: false, - metadata: {}, - namespace: metricsOptions.namespace, - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when custom defaultDimensions is passed, creates instance with the correct properties', () => { - // Prepare - const metricsOptions: MetricsOptions = { - defaultDimensions: { - service: 'order', - }, - }; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: undefined, - defaultDimensions: metricsOptions.defaultDimensions, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: false, - metadata: {}, - namespace: 'hello-world', - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when singleMetric is passed, creates instance with the correct properties', () => { - // Prepare - const metricsOptions: MetricsOptions = { - singleMetric: true, - }; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: undefined, - defaultDimensions: { - service: 'service_undefined', - }, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: true, - metadata: {}, - namespace: 'hello-world', - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when custom customConfigService is passed, creates instance with the correct properties', () => { - // Prepare - const configService: ConfigServiceInterface = { - get(name: string): string { - return `a-string-from-${name}`; - }, - getNamespace(): string { - return TEST_NAMESPACE; - }, - getServiceName(): string { - return 'test-service'; - }, - getXrayTraceId(): string | undefined { - return 'test-trace-id'; - }, - getXrayTraceSampled(): boolean { - return true; - }, - isDevMode(): boolean { - return false; - }, - isValueTrue(value: string): boolean { - return value === 'true'; - }, - }; - const metricsOptions: MetricsOptions = { - customConfigService: configService, - }; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: configService, - defaultDimensions: { - service: 'test-service', - }, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: false, - metadata: {}, - namespace: TEST_NAMESPACE, - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - - test('when custom serviceName is passed, creates instance with the correct properties', () => { - // Prepare - const metricsOptions: MetricsOptions = { - serviceName: 'test-service', - }; - - // Act - const metrics: Metrics = new Metrics(metricsOptions); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - coldStart: true, - customConfigService: undefined, - defaultDimensions: { - service: 'test-service', - }, - defaultServiceName: 'service_undefined', - dimensions: {}, - envVarsService: expect.any(EnvironmentVariablesService), - isSingleMetric: false, - metadata: {}, - namespace: 'hello-world', - shouldThrowOnEmptyMetrics: false, - storedMetrics: {}, - }) - ); - }); - }); - - describe('Method: addDimension', () => { - test('when called, it should store dimensions', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - - // Act - metrics.addDimension(dimensionName, dimensionValue); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: { - [dimensionName]: dimensionValue, - }, - }) - ); - }); - - test('it should update existing dimension value if same dimension is added again', () => { - // Prepare - const logger = { - warn: jest.fn(), - } as unknown as GenericLogger; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - logger, - }); - const dimensionName = 'test-dimension'; - - // Act - metrics.addDimension(dimensionName, 'test-value-1'); - metrics.addDimension(dimensionName, 'test-value-2'); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: { - [dimensionName]: 'test-value-2', - }, - }) - ); - expect(logger.warn).toHaveBeenCalledWith( - `Dimension "test-dimension" has already been added. The previous value will be overwritten.` - ); - }); - - test('it should throw error if the number of dimensions exceeds the maximum allowed', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - - // Act & Assess - // Starts from 1 because the service dimension is already added by default - expect(() => { - for (let i = 1; i < MAX_DIMENSION_COUNT; i++) { - metrics.addDimension( - `${dimensionName}-${i}`, - `${dimensionValue}-${i}` - ); - } - }).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['defaultDimensions']).length).toBe(1); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['dimensions']).length).toBe( - MAX_DIMENSION_COUNT - 1 - ); - expect(() => - metrics.addDimension('another-dimension', 'another-dimension-value') - ).toThrowError( - `The number of metric dimensions must be lower than ${MAX_DIMENSION_COUNT}` - ); - }); - - test('it should take consideration of defaultDimensions while throwing error if number of dimensions exceeds the maximum allowed', () => { - // Prepare - const defaultDimensions: LooseObject = { - environment: 'dev', - foo: 'bar', - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - defaultDimensions, - }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - - // Act & Assess - // Starts from 3 because three default dimensions are already set (service, environment, foo) - expect(() => { - for (let i = 3; i < MAX_DIMENSION_COUNT; i++) { - metrics.addDimension( - `${dimensionName}-${i}`, - `${dimensionValue}-${i}` - ); - } - }).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['defaultDimensions']).length).toBe(3); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['dimensions']).length).toBe( - MAX_DIMENSION_COUNT - 3 - ); - expect(() => - metrics.addDimension('another-dimension', 'another-dimension-value') - ).toThrowError( - `The number of metric dimensions must be lower than ${MAX_DIMENSION_COUNT}` - ); - }); - - describe('invalid values should not be added as dimensions', () => { - const testCases = [ - { value: undefined as unknown as string, description: 'undefined' }, - { value: null as unknown as string, description: 'null' }, - { value: '', description: 'empty string' }, - ]; - - for (const { value, description } of testCases) { - it(`it should not add dimension with ${description} value and log a warning`, () => { - // Prepare - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - logger: customLogger, - }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - const testDimensionName = 'test-dimension'; - - // Act - metrics.addDimension(testDimensionName, value); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledWith( - `The dimension ${testDimensionName} doesn't meet the requirements and won't be added. Ensure the dimension name and value are non empty strings` - ); - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: {}, - }) - ); - }); - } - }); - }); - - describe('Method: addDimensions', () => { - test('it should add multiple dimensions', () => { - // Prepare - const dimensionsToBeAdded: LooseObject = { - 'test-dimension-1': 'test-value-1', - 'test-dimension-2': 'test-value-2', - }; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addDimensions(dimensionsToBeAdded); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: dimensionsToBeAdded, - }) - ); - }); - - test('it should update existing dimension value if same dimension is added again', () => { - // Prepare - const dimensionsToBeAdded: LooseObject = { - 'test-dimension-1': 'test-value-1', - 'test-dimension-2': 'test-value-2', - }; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addDimensions(dimensionsToBeAdded); - metrics.addDimensions({ 'test-dimension-1': 'test-value-3' }); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: { - 'test-dimension-1': 'test-value-3', - 'test-dimension-2': 'test-value-2', - }, - }) - ); - }); - - test('it should successfully add up to maximum allowed dimensions without throwing error', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - const dimensionsToBeAdded: LooseObject = {}; - for (let i = 0; i < MAX_DIMENSION_COUNT - 1; i++) { - dimensionsToBeAdded[`${dimensionName}-${i}`] = `${dimensionValue}-${i}`; - } - - // Act & Assess - expect(() => - metrics.addDimensions(dimensionsToBeAdded) - ).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['dimensions']).length).toBe( - MAX_DIMENSION_COUNT - 1 // Starts from 1 because the service dimension is already added by default - ); - }); - - test('it should throw error if number of dimensions exceeds the maximum allowed', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - const dimensionsToBeAdded: LooseObject = {}; - for (let i = 0; i < MAX_DIMENSION_COUNT - 1; i++) { - dimensionsToBeAdded[`${dimensionName}-${i}`] = `${dimensionValue}-${i}`; - } - - // Act & Assess - metrics.addDimensions(dimensionsToBeAdded); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['dimensions']).length).toBe( - MAX_DIMENSION_COUNT - 1 // Starts from 1 because the service dimension is already added by default - ); - expect(() => - metrics.addDimensions({ - 'another-dimension': 'another-dimension-value', - }) - ).toThrowError( - `The number of metric dimensions must be lower than ${MAX_DIMENSION_COUNT}` - ); - }); - - describe('invalid values should not be added as dimensions', () => { - const testCases = [ - { value: undefined as unknown as string, description: 'undefined' }, - { value: null as unknown as string, description: 'null' }, - { value: '', description: 'empty string' }, - ]; - - for (const { value, description } of testCases) { - it(`it should not add dimension with ${description} value and log a warning`, () => { - // Prepare - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - logger: customLogger, - }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - const dimensionsToBeAdded: LooseObject = { - 'test-dimension-1': 'test-value-1', - 'test-dimension-2': 'test-value-2', - }; - const testDimensionName = 'test-dimension'; - - // Act - metrics.addDimensions(dimensionsToBeAdded); - metrics.addDimensions({ [testDimensionName]: value }); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledWith( - `The dimension ${testDimensionName} doesn't meet the requirements and won't be added. Ensure the dimension name and value are non empty strings` - ); - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: { - 'test-dimension-1': 'test-value-1', - 'test-dimension-2': 'test-value-2', - }, - }) - ); - }); - } - }); - }); - - describe('Method: addMetadata', () => { - test('it should add metadata', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetadata('foo', 'bar'); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - metadata: { foo: 'bar' }, - }) - ); - }); - - test('it should update existing metadata value if same metadata is added again', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetadata('foo', 'bar'); - metrics.addMetadata('foo', 'baz'); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - metadata: { foo: 'baz' }, - }) - ); - }); - }); - - describe('Method: addMetric', () => { - test('it should store metrics when called', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const metricName = 'test-metric'; - - // Act - metrics.addMetric(metricName, MetricUnit.Count, 1, MetricResolution.High); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - storedMetrics: { - [metricName]: { - name: metricName, - resolution: MetricResolution.High, - unit: MetricUnit.Count, - value: 1, - }, - }, - }) - ); - }); - - test('it should store multiple metrics when called with multiple metric name', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric( - 'test-metric-1', - MetricUnit.Count, - 1, - MetricResolution.High - ); - metrics.addMetric( - 'test-metric-2', - MetricUnit.Count, - 3, - MetricResolution.High - ); - metrics.addMetric( - 'test-metric-3', - MetricUnit.Count, - 6, - MetricResolution.High - ); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - storedMetrics: { - 'test-metric-1': { - name: 'test-metric-1', - resolution: MetricResolution.High, - unit: MetricUnit.Count, - value: 1, - }, - 'test-metric-2': { - name: 'test-metric-2', - resolution: MetricResolution.High, - unit: MetricUnit.Count, - value: 3, - }, - 'test-metric-3': { - name: 'test-metric-3', - resolution: MetricResolution.High, - unit: MetricUnit.Count, - value: 6, - }, - }, - }) - ); - }); - - test('it should store metrics with standard resolution when called without resolution', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric('test-metric-1', MetricUnit.Count, 1); - metrics.addMetric('test-metric-2', MetricUnit.Seconds, 3); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - storedMetrics: { - 'test-metric-1': { - name: 'test-metric-1', - resolution: MetricResolution.Standard, - unit: MetricUnit.Count, - value: 1, - }, - 'test-metric-2': { - name: 'test-metric-2', - resolution: MetricResolution.Standard, - unit: MetricUnit.Seconds, - value: 3, - }, - }, - }) - ); - }); - - test('it should group the metric values together in an array when trying to add same metric with different values', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const metricName = 'test-metric'; - - // Act - metrics.addMetric(metricName, MetricUnit.Count, 1); - metrics.addMetric(metricName, MetricUnit.Count, 5); - metrics.addMetric(metricName, MetricUnit.Count, 1); - metrics.addMetric(metricName, MetricUnit.Count, 4); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - storedMetrics: { - [metricName]: { - name: metricName, - resolution: MetricResolution.Standard, - unit: MetricUnit.Count, - value: [1, 5, 1, 4], - }, - }, - }) - ); - }); - - test('it should throw an error when trying to add same metric with different unit', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const metricName = 'test-metric'; - - // Act & Assess - expect(() => { - metrics.addMetric(metricName, MetricUnit.Count, 1); - metrics.addMetric(metricName, MetricUnit.Kilobits, 5); - }).toThrowError( - `Metric "${metricName}" has already been added with unit "${MetricUnit.Count}", but we received unit "${MetricUnit.Kilobits}". Did you mean to use metric unit "${MetricUnit.Count}"?` - ); - }); - - test('it should publish metrics if stored metrics count has already reached max metric size threshold & then store remaining metric', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const publishStoredMetricsSpy = jest.spyOn( - metrics, - 'publishStoredMetrics' - ); - const metricName = 'test-metric'; - - // Act & Assess - expect(() => { - for (let i = 0; i < MAX_METRICS_SIZE; i++) { - metrics.addMetric(`${metricName}-${i}`, MetricUnit.Count, i); - } - }).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['storedMetrics']).length).toEqual( - MAX_METRICS_SIZE - ); - metrics.addMetric( - 'another-metric', - MetricUnit.Count, - MAX_METRICS_SIZE + 1 - ); - expect(publishStoredMetricsSpy).toHaveBeenCalledTimes(1); - expect(metrics).toEqual( - expect.objectContaining({ - storedMetrics: { - 'another-metric': { - name: 'another-metric', - resolution: MetricResolution.Standard, - unit: MetricUnit.Count, - value: MAX_METRICS_SIZE + 1, - }, - }, - }) - ); - }); - - test('it should publish metrics when the array of values reaches the maximum size', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const metricName = 'test-metric'; - - // Act - for (let i = 0; i <= MAX_METRIC_VALUES_SIZE; i++) { - metrics.addMetric(`${metricName}`, MetricUnit.Count, i); - } - metrics.publishStoredMetrics(); - - // Assess - // 2 calls to console.log: 1 for the first batch of metrics, 1 for the second batch (explicit call) - expect(consoleSpy).toHaveBeenCalledTimes(2); - const firstMetricsJson = JSON.parse( - consoleSpy.mock.calls[0][0] - ) as EmfOutput; - const secondMetricsJson = JSON.parse( - consoleSpy.mock.calls[1][0] - ) as EmfOutput; - - // The first batch of values should be an array of size MAX_METRIC_VALUES_SIZE - expect(firstMetricsJson[metricName]).toHaveLength(MAX_METRIC_VALUES_SIZE); - // The second should be a single value (the last value added, which is 100 given we start from 0) - expect(secondMetricsJson[metricName]).toEqual(100); - }); - - test('it should not publish metrics if stored metrics count has not reached max metric size threshold', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const publishStoredMetricsSpy = jest.spyOn( - metrics, - 'publishStoredMetrics' - ); - const metricName = 'test-metric'; - - // Act & Assess - expect(() => { - for (let i = 0; i < MAX_METRICS_SIZE - 1; i++) { - metrics.addMetric(`${metricName}-${i}`, MetricUnit.Count, i); - } - }).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['storedMetrics']).length).toEqual( - MAX_METRICS_SIZE - 1 - ); - metrics.addMetric('another-metric', MetricUnit.Count, MAX_METRICS_SIZE); - expect(publishStoredMetricsSpy).toHaveBeenCalledTimes(0); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['storedMetrics']).length).toEqual( - MAX_METRICS_SIZE - ); - }); - - test('it should publish metrics on every call if singleMetric is set to true', () => { - // Prepare - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const publishStoredMetricsSpy = jest.spyOn( - metrics, - 'publishStoredMetrics' - ); - - // Act - metrics.addMetric('test-metric-1', MetricUnit.Count, 1); - metrics.addMetric('test-metric-2', MetricUnit.Bits, 100); - - // Assess - expect(publishStoredMetricsSpy).toHaveBeenCalledTimes(2); - }); - - test('it should not publish metrics on every call if singleMetric is set to false', () => { - // Prepare - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: false, - }); - const publishStoredMetricsSpy = jest.spyOn( - metrics, - 'publishStoredMetrics' - ); - - // Act - metrics.addMetric('test-metric-1', MetricUnit.Count, 1); - metrics.addMetric('test-metric-2', MetricUnit.Bits, 100); - - // Assess - expect(publishStoredMetricsSpy).toHaveBeenCalledTimes(0); - }); - - test('it should not publish metrics on every call if singleMetric is not provided', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const publishStoredMetricsSpy = jest.spyOn( - metrics, - 'publishStoredMetrics' - ); - - // Act - metrics.addMetric('test-metric-1', MetricUnit.Count, 1); - metrics.addMetric('test-metric-2', MetricUnit.Bits, 100); - - // Assess - expect(publishStoredMetricsSpy).toHaveBeenCalledTimes(0); - }); - }); - - describe('Methods: captureColdStartMetric', () => { - test('it should call addMetric with correct parameters', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const singleMetricMock: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const singleMetricSpy = jest - .spyOn(metrics, 'singleMetric') - .mockImplementation(() => singleMetricMock); - const addMetricSpy = jest.spyOn(singleMetricMock, 'addMetric'); - - // Act - metrics.captureColdStartMetric(); - - // Assess - expect(singleMetricSpy).toBeCalledTimes(1); - expect(addMetricSpy).toBeCalledTimes(1); - expect(addMetricSpy).toBeCalledWith( - COLD_START_METRIC, - MetricUnit.Count, - 1 - ); - }); - - test('it should call setDefaultDimensions with correct parameters', () => { - // Prepare - const defaultDimensions: Dimensions = { - foo: 'bar', - service: 'order', - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - defaultDimensions, - }); - const singleMetricMock: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const singleMetricSpy = jest - .spyOn(metrics, 'singleMetric') - .mockImplementation(() => singleMetricMock); - const setDefaultDimensionsSpy = jest.spyOn( - singleMetricMock, - 'setDefaultDimensions' - ); - - // Act - metrics.captureColdStartMetric(); - - // Assess - expect(singleMetricSpy).toBeCalledTimes(1); - expect(setDefaultDimensionsSpy).toBeCalledTimes(1); - expect(setDefaultDimensionsSpy).toBeCalledWith({ - service: defaultDimensions.service, - }); - }); - - test('it should call setDefaultDimensions with correct parameters when defaultDimensions are not set', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const singleMetricMock: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const singleMetricSpy = jest - .spyOn(metrics, 'singleMetric') - .mockImplementation(() => singleMetricMock); - const setDefaultDimensionsSpy = jest.spyOn( - singleMetricMock, - 'setDefaultDimensions' - ); - - // Act - metrics.captureColdStartMetric(); - - // Assess - expect(singleMetricSpy).toBeCalledTimes(1); - expect(setDefaultDimensionsSpy).toBeCalledTimes(1); - expect(setDefaultDimensionsSpy).toBeCalledWith({ - service: 'service_undefined', - }); - }); - - test('it should call addDimension, if functionName is set', () => { - // Prepare - const functionName = 'coldStart'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.setFunctionName(functionName); - const singleMetricMock: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const singleMetricSpy = jest - .spyOn(metrics, 'singleMetric') - .mockImplementation(() => singleMetricMock); - const addDimensionSpy = jest.spyOn(singleMetricMock, 'addDimension'); - - // Act - metrics.captureColdStartMetric(); - - // Assess - expect(singleMetricSpy).toBeCalledTimes(1); - expect(addDimensionSpy).toBeCalledTimes(1); - expect(addDimensionSpy).toBeCalledWith('function_name', functionName); - }); - - test('it should not call addDimension, if functionName is not set', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const singleMetricMock: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const singleMetricSpy = jest - .spyOn(metrics, 'singleMetric') - .mockImplementation(() => singleMetricMock); - const addDimensionSpy = jest.spyOn(singleMetricMock, 'addDimension'); - - // Act - metrics.captureColdStartMetric(); - - // Assess - expect(singleMetricSpy).toBeCalledTimes(1); - expect(addDimensionSpy).toBeCalledTimes(0); - }); - - test('it should not call any function, if there is no cold start', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - jest.spyOn(metrics, 'isColdStart').mockImplementation(() => false); - - const singleMetricMock: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - singleMetric: true, - }); - const singleMetricSpy = jest - .spyOn(metrics, 'singleMetric') - .mockImplementation(() => singleMetricMock); - const addMetricSpy = jest.spyOn(singleMetricMock, 'addMetric'); - const setDefaultDimensionsSpy = jest.spyOn( - singleMetricMock, - 'setDefaultDimensions' - ); - const addDimensionSpy = jest.spyOn(singleMetricMock, 'addDimension'); - - // Act - metrics.captureColdStartMetric(); - - // Assess - expect(singleMetricSpy).toBeCalledTimes(0); - expect(setDefaultDimensionsSpy).toBeCalledTimes(0); - expect(addDimensionSpy).toBeCalledTimes(0); - expect(addMetricSpy).toBeCalledTimes(0); - }); - }); - - describe('Method: clearDefaultDimensions', () => { - test('it should clear all default dimensions', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.setDefaultDimensions({ foo: 'bar' }); - - // Act - metrics.clearDefaultDimensions(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - defaultDimensions: {}, - }) - ); - }); - - test('it should only clear default dimensions', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.setDefaultDimensions({ foo: 'bar' }); - metrics.addDimension('environment', 'dev'); - - // Act - metrics.clearDefaultDimensions(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - defaultDimensions: {}, - dimensions: { - environment: 'dev', - }, - }) - ); - }); - }); - - describe('Method: clearDimensions', () => { - test('it should clear all dimensions', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.addDimension('foo', 'bar'); - - // Act - metrics.clearDimensions(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: {}, - }) - ); - }); - - test('it should only clear dimensions', () => { - // Prepare - const metrics: Metrics = new Metrics({ - defaultDimensions: { environment: 'dev' }, - }); - metrics.addDimension('foo', 'bar'); - - // Act - metrics.clearDimensions(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - dimensions: {}, - defaultDimensions: { - environment: 'dev', - service: 'service_undefined', - }, - }) - ); - }); - }); - - describe('Method: clearMetadata', () => { - test('it should clear all metadata', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.addMetadata('foo', 'bar'); - metrics.addMetadata('test', 'baz'); - - // Act - metrics.clearMetadata(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - metadata: {}, - }) - ); - }); - }); - - describe('Method: clearMetrics', () => { - test('it should clear stored metrics', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const metricName = 'test-metric'; - - // Act - metrics.addMetric(metricName, MetricUnit.Count, 1); - metrics.clearMetrics(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - storedMetrics: {}, - }) - ); - }); - }); - - describe('Method: logMetrics', () => { - let metrics: Metrics; - let publishStoredMetricsSpy: jest.SpyInstance; - let addMetricSpy: jest.SpyInstance; - let captureColdStartMetricSpy: jest.SpyInstance; - let setThrowOnEmptyMetricsSpy: jest.SpyInstance; - let setDefaultDimensionsSpy: jest.SpyInstance; - const decoratorLambdaExpectedReturnValue = 'Lambda invoked!'; - const decoratorLambdaMetric = 'decorator-lambda-test-metric'; - - beforeEach(() => { - metrics = new Metrics({ namespace: TEST_NAMESPACE }); - publishStoredMetricsSpy = jest.spyOn(metrics, 'publishStoredMetrics'); - addMetricSpy = jest.spyOn(metrics, 'addMetric'); - captureColdStartMetricSpy = jest.spyOn(metrics, 'captureColdStartMetric'); - setThrowOnEmptyMetricsSpy = jest.spyOn(metrics, 'setThrowOnEmptyMetrics'); - setDefaultDimensionsSpy = jest.spyOn(metrics, 'setDefaultDimensions'); - }); - - test('it should execute lambda function & publish stored metrics', async () => { - // Prepare - const handler: Handler = setupDecoratorLambdaHandler(metrics); - - // Act - const actualResult = await handler(event, context, () => - console.log('callback') - ); - - // Assess - expect(actualResult).toEqual(decoratorLambdaExpectedReturnValue); - expect(addMetricSpy).toHaveBeenNthCalledWith( - 1, - decoratorLambdaMetric, - MetricUnit.Count, - 1 - ); - expect(publishStoredMetricsSpy).toBeCalledTimes(1); - expect(captureColdStartMetricSpy).not.toBeCalled(); - expect(setThrowOnEmptyMetricsSpy).not.toBeCalled(); - expect(setDefaultDimensionsSpy).not.toBeCalled(); - }); - - test('it should capture cold start metrics, if passed in the options as true', async () => { - // Prepare - const handler: Handler = setupDecoratorLambdaHandler(metrics, { - captureColdStartMetric: true, - }); - - // Act - const actualResult = await handler(event, context, () => - console.log('callback') - ); - - // Assess - expect(actualResult).toEqual(decoratorLambdaExpectedReturnValue); - expect(addMetricSpy).toHaveBeenNthCalledWith( - 1, - decoratorLambdaMetric, - MetricUnit.Count, - 1 - ); - expect(captureColdStartMetricSpy).toBeCalledTimes(1); - expect(publishStoredMetricsSpy).toBeCalledTimes(1); - expect(setThrowOnEmptyMetricsSpy).not.toBeCalled(); - expect(setDefaultDimensionsSpy).not.toBeCalled(); - }); - - test('it should call throwOnEmptyMetrics, if passed in the options as true', async () => { - // Prepare - const handler: Handler = setupDecoratorLambdaHandler(metrics, { - throwOnEmptyMetrics: true, - }); - - // Act - const actualResult = await handler(event, context, () => - console.log('callback') - ); - - // Assess - expect(actualResult).toEqual(decoratorLambdaExpectedReturnValue); - expect(addMetricSpy).toHaveBeenNthCalledWith( - 1, - decoratorLambdaMetric, - MetricUnit.Count, - 1 - ); - expect(setThrowOnEmptyMetricsSpy).toBeCalledTimes(1); - expect(publishStoredMetricsSpy).toBeCalledTimes(1); - expect(captureColdStartMetricSpy).not.toBeCalled(); - expect(setDefaultDimensionsSpy).not.toBeCalled(); - }); - - test('it should set default dimensions if passed in the options as true', async () => { - // Prepare - const defaultDimensions = { - foo: 'bar', - service: 'order', - }; - const handler: Handler = setupDecoratorLambdaHandler(metrics, { - defaultDimensions, - }); - - // Act - const actualResult = await handler(event, context, () => - console.log('callback') - ); - - // Assess - expect(actualResult).toEqual(decoratorLambdaExpectedReturnValue); - expect(addMetricSpy).toHaveBeenNthCalledWith( - 1, - decoratorLambdaMetric, - MetricUnit.Count, - 1 - ); - expect(setDefaultDimensionsSpy).toHaveBeenNthCalledWith( - 1, - defaultDimensions - ); - expect(publishStoredMetricsSpy).toBeCalledTimes(1); - expect(setThrowOnEmptyMetricsSpy).not.toBeCalled(); - expect(captureColdStartMetricSpy).not.toBeCalled(); - }); - - test('it should throw error if lambda handler throws any error', async () => { - // Prepare - const errorMessage = 'Unexpected error occurred!'; - class LambdaFunction implements LambdaInterface { - @metrics.logMetrics() - public async handler( - _event: TEvent, - _context: Context - ): Promise { - throw new Error(errorMessage); - } - } - const handlerClass = new LambdaFunction(); - const handler = handlerClass.handler.bind(handlerClass); - - // Act & Assess - await expect(handler(event, context)).rejects.toThrowError(errorMessage); - }); - }); - - describe('Methods: publishStoredMetrics', () => { - test('it should log warning if no metrics are added & throwOnEmptyMetrics is false', () => { - // Prepare - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - logger: customLogger, - }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.publishStoredMetrics(); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledTimes(1); - expect(consoleWarnSpy).toHaveBeenCalledWith( - 'No application metrics to publish. The cold-start metric may be published if enabled. If application metrics should never be empty, consider using `throwOnEmptyMetrics`' - ); - }); - - test('it should call serializeMetrics && log the stringified return value of serializeMetrics', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.addMetric('test-metric', MetricUnit.Count, 10); - const consoleLogSpy = jest - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - .spyOn(metrics['console'], 'log') - .mockImplementation(); - const mockData: EmfOutput = { - _aws: { - Timestamp: mockDate.getTime(), - CloudWatchMetrics: [ - { - Namespace: 'test', - Dimensions: [['service']], - Metrics: [ - { - Name: 'test-metric', - Unit: MetricUnit.Count, - }, - ], - }, - ], - }, - service: 'service_undefined', - 'test-metric': 10, - }; - const serializeMetricsSpy = jest - .spyOn(metrics, 'serializeMetrics') - .mockImplementation(() => mockData); - - // Act - metrics.publishStoredMetrics(); - - // Assess - expect(serializeMetricsSpy).toBeCalledTimes(1); - expect(consoleLogSpy).toBeCalledTimes(1); - expect(consoleLogSpy).toBeCalledWith(JSON.stringify(mockData)); - }); - - test('it should not log anything if metrics are disabled', () => { - // Prepare - process.env.POWERTOOLS_METRICS_DISABLED = 'true'; - const customLogger = { - log: jest.fn(), - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - logger: customLogger, - }); - const consoleLogSpy = jest.spyOn(customLogger, 'log'); - - // Act - metrics.publishStoredMetrics(); - - // Assess - expect(consoleLogSpy).toHaveBeenCalledTimes(0); - }); - - test('it should call clearMetrics function', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.addMetric('test-metric', MetricUnit.Count, 10); - const clearMetricsSpy = jest.spyOn(metrics, 'clearMetrics'); - - // Act - metrics.publishStoredMetrics(); - - // Assess - expect(clearMetricsSpy).toBeCalledTimes(1); - }); - - test('it should call clearDimensions function', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.addMetric('test-metric', MetricUnit.Count, 10); - const clearDimensionsSpy = jest.spyOn(metrics, 'clearDimensions'); - - // Act - metrics.publishStoredMetrics(); - - // Assess - expect(clearDimensionsSpy).toBeCalledTimes(1); - }); - - test('it should call clearMetadata function', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - metrics.addMetric('test-metric', MetricUnit.Count, 10); - const clearMetadataSpy = jest.spyOn(metrics, 'clearMetadata'); - - // Act - metrics.publishStoredMetrics(); - - // Assess - expect(clearMetadataSpy).toBeCalledTimes(1); - }); - }); - - describe('Method: serializeMetrics', () => { - const defaultServiceName = 'service_undefined'; - - test('it should print warning, if no namespace provided in constructor or environment variable', () => { - // Prepare - process.env.POWERTOOLS_METRICS_NAMESPACE = ''; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).toBeCalledWith( - 'Namespace should be defined, default used' - ); - }); - - test('it should return right object compliant with Cloudwatch EMF', () => { - // Prepare - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - serviceName: 'test-service', - defaultDimensions: { - environment: 'dev', - }, - }); - - // Act - metrics.addMetric('successfulBooking', MetricUnit.Count, 1); - metrics.addMetric('successfulBooking', MetricUnit.Count, 3); - metrics.addMetric( - 'failedBooking', - MetricUnit.Count, - 1, - MetricResolution.High - ); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData).toEqual({ - _aws: { - Timestamp: mockDate.getTime(), - CloudWatchMetrics: [ - { - Namespace: TEST_NAMESPACE, - Dimensions: [['service', 'environment']], - Metrics: [ - { - Name: 'successfulBooking', - Unit: MetricUnit.Count, - }, - { - Name: 'failedBooking', - Unit: MetricUnit.Count, - StorageResolution: 1, - }, - ], - }, - ], - }, - environment: 'dev', - service: 'test-service', - successfulBooking: [1, 3], - failedBooking: 1, - }); - }); - - test('it should log service dimension correctly when passed', () => { - // Prepare - const serviceName = 'test-service'; - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics({ - serviceName: serviceName, - namespace: TEST_NAMESPACE, - }); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData.service).toEqual(serviceName); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: serviceName, - [testMetric]: 10, - }); - }); - - test('it should log service dimension correctly using environment variable when not specified in constructor', () => { - // Prepare - const serviceName = 'hello-world-service'; - process.env.POWERTOOLS_SERVICE_NAME = serviceName; - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData.service).toEqual(serviceName); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: serviceName, - [testMetric]: 10, - }); - }); - - test('it should log default dimensions correctly', () => { - // Prepare - const additionalDimensions = { - foo: 'bar', - env: 'dev', - }; - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics({ - defaultDimensions: additionalDimensions, - namespace: TEST_NAMESPACE, - }); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Dimensions[0].length).toEqual( - 3 - ); - expect(loggedData.service).toEqual(defaultServiceName); - expect(loggedData.foo).toEqual(additionalDimensions.foo); - expect(loggedData.env).toEqual(additionalDimensions.env); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service', 'foo', 'env']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - env: 'dev', - foo: 'bar', - }); - }); - - test('it should log dimensions once when default dimensions are set and addDimension is called', () => { - // Prepare - const additionalDimensions = { - foo: 'bar', - env: 'dev', - }; - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics({ - defaultDimensions: additionalDimensions, - namespace: TEST_NAMESPACE, - }); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.addDimension('foo', 'baz'); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Dimensions[0].length).toEqual( - 3 - ); - expect(loggedData.service).toEqual(defaultServiceName); - expect(loggedData.foo).toEqual('baz'); - expect(loggedData.env).toEqual(additionalDimensions.env); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service', 'foo', 'env']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - env: 'dev', - foo: 'baz', - }); - }); - - test('it should log additional dimensions correctly', () => { - // Prepare - const testMetric = 'test-metric'; - const additionalDimension = { name: 'metric2', value: 'metric2Value' }; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric( - 'test-metric', - MetricUnit.Count, - 10, - MetricResolution.High - ); - metrics.addDimension(additionalDimension.name, additionalDimension.value); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Dimensions[0].length).toEqual( - 2 - ); - expect(loggedData.service).toEqual(defaultServiceName); - expect(loggedData[additionalDimension.name]).toEqual( - additionalDimension.value - ); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service', 'metric2']], - Metrics: [ - { - Name: testMetric, - StorageResolution: 1, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - metric2: 'metric2Value', - }); - }); - - test('it should log additional bulk dimensions correctly', () => { - // Prepare - const testMetric = 'test-metric'; - const additionalDimensions: LooseObject = { - metric2: 'metric2Value', - metric3: 'metric3Value', - }; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric( - testMetric, - MetricUnit.Count, - 10, - MetricResolution.High - ); - metrics.addDimensions(additionalDimensions); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Dimensions[0].length).toEqual( - 3 - ); - expect(loggedData.service).toEqual(defaultServiceName); - for (const key of Object.keys(additionalDimensions)) { - expect(loggedData[key]).toEqual(additionalDimensions[key]); - } - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service', 'metric2', 'metric3']], - Metrics: [ - { - Name: testMetric, - StorageResolution: 1, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - metric2: 'metric2Value', - metric3: 'metric3Value', - }); - }); - - test('it should log metadata correctly', () => { - // Prepare - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.addMetadata('foo', 'bar'); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData.foo).toEqual('bar'); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - foo: 'bar', - }); - }); - - test('it should throw error on empty metrics when throwOnEmptyMetrics is true', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.throwOnEmptyMetrics(); - - // Assess - expect(() => metrics.serializeMetrics()).toThrow( - 'The number of metrics recorded must be higher than zero' - ); - }); - - test('it should use the default namespace when no namespace is provided in constructor or found in environment variable', () => { - // Prepare - process.env.POWERTOOLS_METRICS_NAMESPACE = ''; - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics(); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Namespace).toEqual( - DEFAULT_NAMESPACE - ); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: DEFAULT_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - }); - }); - - test('it should use namespace provided in constructor', () => { - // Prepare - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Namespace).toEqual( - TEST_NAMESPACE - ); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: testMetric, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [testMetric]: 10, - }); - }); - - test('it should contain a metric value if added once', () => { - // Prepare - const metricName = 'test-metric'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(metricName, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Metrics.length).toBe(1); - expect(loggedData[metricName]).toEqual(10); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: metricName, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [metricName]: 10, - }); - }); - - test('it should convert metric value with the same name and unit to array if added multiple times', () => { - // Prepare - const metricName = 'test-metric'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(metricName, MetricUnit.Count, 10); - metrics.addMetric(metricName, MetricUnit.Count, 20); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Metrics.length).toBe(1); - expect(loggedData[metricName]).toEqual([10, 20]); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: metricName, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [metricName]: [10, 20], - }); - }); - - test('it should create multiple metric values if added multiple times', () => { - // Prepare - const metricName1 = 'test-metric-1'; - const metricName2 = 'test-metric-2'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(metricName1, MetricUnit.Count, 10); - metrics.addMetric(metricName2, MetricUnit.Seconds, 20); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Metrics.length).toBe(2); - expect(loggedData[metricName1]).toEqual(10); - expect(loggedData[metricName2]).toEqual(20); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: metricName1, - Unit: MetricUnit.Count, - }, - { - Name: metricName2, - Unit: MetricUnit.Seconds, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [metricName1]: 10, - [metricName2]: 20, - }); - }); - - test('it should not contain `StorageResolution` as key for non-high resolution metrics', () => { - // Prepare - const metricName = 'test-metric'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(metricName, MetricUnit.Count, 10); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Metrics.length).toBe(1); - expect( - loggedData._aws.CloudWatchMetrics[0].Metrics[0].StorageResolution - ).toBeUndefined(); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: metricName, - Unit: MetricUnit.Count, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [metricName]: 10, - }); - }); - - test('it should contain `StorageResolution` as key & high metric resolution as value for high resolution metrics', () => { - // Prepare - const metricName1 = 'test-metric'; - const metricName2 = 'test-metric-2'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.addMetric(metricName1, MetricUnit.Count, 10); - metrics.addMetric( - metricName2, - MetricUnit.Seconds, - 10, - MetricResolution.High - ); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData._aws.CloudWatchMetrics[0].Metrics.length).toBe(2); - expect( - loggedData._aws.CloudWatchMetrics[0].Metrics[0].StorageResolution - ).toBeUndefined(); - expect( - loggedData._aws.CloudWatchMetrics[0].Metrics[1].StorageResolution - ).toEqual(MetricResolution.High); - expect(loggedData).toEqual({ - _aws: { - CloudWatchMetrics: [ - { - Dimensions: [['service']], - Metrics: [ - { - Name: metricName1, - Unit: MetricUnit.Count, - }, - { - Name: metricName2, - StorageResolution: 1, - Unit: MetricUnit.Seconds, - }, - ], - Namespace: TEST_NAMESPACE, - }, - ], - Timestamp: mockDate.getTime(), - }, - service: 'service_undefined', - [metricName1]: 10, - [metricName2]: 10, - }); - }); - }); - - describe('Method: setDefaultDimensions', () => { - test('it should set default dimensions correctly when service name is provided', () => { - // Prepare - const serviceName = 'test-service'; - const metrics: Metrics = new Metrics({ serviceName: serviceName }); - const defaultDimensionsToBeAdded = { - environment: 'dev', - foo: 'bar', - }; - - // Act - metrics.setDefaultDimensions(defaultDimensionsToBeAdded); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - defaultDimensions: { - ...defaultDimensionsToBeAdded, - service: serviceName, - }, - }) - ); - }); - - test('it should set default dimensions correctly when service name is not provided', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const defaultDimensionsToBeAdded = { - environment: 'dev', - foo: 'bar', - }; - - // Act - metrics.setDefaultDimensions(defaultDimensionsToBeAdded); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - defaultDimensions: { - ...defaultDimensionsToBeAdded, - service: 'service_undefined', - }, - }) - ); - }); - - test('it should add default dimensions', () => { - // Prepare - const serviceName = 'test-service'; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - serviceName, - defaultDimensions: { 'test-dimension': 'test-dimension-value' }, - }); - const defaultDimensionsToBeAdded = { - environment: 'dev', - foo: 'bar', - }; - - // Act - metrics.setDefaultDimensions(defaultDimensionsToBeAdded); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - defaultDimensions: { - ...defaultDimensionsToBeAdded, - service: serviceName, - 'test-dimension': 'test-dimension-value', - }, - }) - ); - }); - - test('it should update already added default dimensions values', () => { - // Prepare - const serviceName = 'test-service'; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - serviceName, - defaultDimensions: { - environment: 'dev', - }, - }); - const defaultDimensionsToBeAdded = { - environment: 'prod', - foo: 'bar', - }; - - // Act - metrics.setDefaultDimensions(defaultDimensionsToBeAdded); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - defaultDimensions: { - foo: 'bar', - service: serviceName, - environment: 'prod', - }, - }) - ); - }); - - test('it should throw error if number of default dimensions reaches the maximum allowed', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - const defaultDimensions: LooseObject = {}; - - // Starts from 1 because the service dimension is already added by default - for (let i = 1; i < MAX_DIMENSION_COUNT - 1; i++) { - defaultDimensions[`${dimensionName}-${i}`] = `${dimensionValue}-${i}`; - } - - // Act & Assess - expect(() => - metrics.setDefaultDimensions(defaultDimensions) - ).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['defaultDimensions']).length).toBe( - MAX_DIMENSION_COUNT - 1 - ); - expect(() => { - metrics.setDefaultDimensions({ - 'another-dimension': 'another-dimension-value', - }); - }).toThrowError('Max dimension count hit'); - }); - - test('it should consider default dimensions provided in constructor, while throwing error if number of default dimensions reaches the maximum allowed', () => { - // Prepare - const initialDefaultDimensions: LooseObject = { - 'test-dimension': 'test-value', - environment: 'dev', - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - defaultDimensions: initialDefaultDimensions, - }); - const dimensionName = 'test-dimension'; - const dimensionValue = 'test-value'; - const defaultDimensions: LooseObject = {}; - - // Starts from 3 because the service dimension is already added by default & two dimensions are already added in the constructor - for (let i = 3; i < MAX_DIMENSION_COUNT - 1; i++) { - defaultDimensions[`${dimensionName}-${i}`] = `${dimensionValue}-${i}`; - } - - // Act & Assess - expect(() => - metrics.setDefaultDimensions(defaultDimensions) - ).not.toThrowError(); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(Object.keys(metrics['defaultDimensions']).length).toBe( - MAX_DIMENSION_COUNT - 1 - ); - expect(() => { - metrics.setDefaultDimensions({ - 'another-dimension': 'another-dimension-value', - }); - }).toThrowError('Max dimension count hit'); - }); - }); - - describe('Method: setFunctionName', () => { - test('it should set the function name', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.setFunctionName('test-function'); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - functionName: 'test-function', - }) - ); - }); - }); - - describe('Method: singleMetric', () => { - test('it should return a single Metric object', () => { - // Prepare - const defaultDimensions = { - foo: 'bar', - service: 'order', - }; - const metrics: Metrics = new Metrics({ - namespace: TEST_NAMESPACE, - defaultDimensions, - singleMetric: false, - }); - - // Act - const singleMetric = metrics.singleMetric(); - - //Asses - expect(singleMetric).toEqual( - expect.objectContaining({ - isSingleMetric: true, - namespace: TEST_NAMESPACE, - defaultDimensions, - }) - ); - }); - }); - - describe('Method: throwOnEmptyMetrics', () => { - test('it should set the throwOnEmptyMetrics flag to true', () => { - // Prepare - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act - metrics.throwOnEmptyMetrics(); - - // Assess - expect(metrics).toEqual( - expect.objectContaining({ - shouldThrowOnEmptyMetrics: true, - }) - ); - }); - }); - - describe('Feature: POWERTOOLS_DEV', () => { - it('uses the global console object when the environment variable is set', () => { - // Prepare - process.env.POWERTOOLS_DEV = 'true'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act & Assess - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - expect(metrics['console']).toEqual(console); - }); - }); - - describe('Method: isDisabled', () => { - it('should be enabled by default', () => { - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act & Assess - // biome-ignore lint/complexity/useLiteralKeys: accessing protected method - expect(metrics['isDisabled']()).toBe(false); - }); - - it('should be disabled if POWERTOOLS_DEV is set to true', () => { - process.env.POWERTOOLS_DEV = 'true'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act & Assess - // biome-ignore lint/complexity/useLiteralKeys: accessing protected method - expect(metrics['isDisabled']()).toBe(true); - }); - - it('should be disabled if POWERTOOLS_METRICS_DISABLED is set to true', () => { - // Prepare - process.env.POWERTOOLS_METRICS_DISABLED = 'true'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act & Assess - // biome-ignore lint/complexity/useLiteralKeys: accessing protected method - expect(metrics['isDisabled']()).toBe(true); - }); - - it('should be enabled if POWERTOOLS_METRICS_DISABLED is set to false', () => { - process.env.POWERTOOLS_METRICS_DISABLED = 'false'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act & Assess - // biome-ignore lint/complexity/useLiteralKeys: accessing protected method - expect(metrics['isDisabled']()).toBe(false); - }); - - it('should be enabled if POWERTOOLS_DEV is set to true and POWERTOOLS_METRICS_DISABLED is set to false', () => { - process.env.POWERTOOLS_DEV = 'true'; - process.env.POWERTOOLS_METRICS_DISABLED = 'false'; - const metrics: Metrics = new Metrics({ namespace: TEST_NAMESPACE }); - - // Act & Assess - // biome-ignore lint/complexity/useLiteralKeys: accessing protected method - expect(metrics['isDisabled']()).toBe(false); - }); - }); - - describe('Method: setTimestamp', () => { - const testCases = [ - { - format: 'milliseconds', - getTimestamp: (timestampMs: number) => timestampMs, - }, - { - format: 'Date object', - getTimestamp: (timestampMs: number) => new Date(timestampMs), - }, - ]; - - for (const { format, getTimestamp } of testCases) { - describe(`when timestamp is provided as ${format}`, () => { - test('should set the timestamp if provided in the future', () => { - // Prepare - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics(); - const timestampMs = mockDate.getTime() + 10 * 60 * 1000; // Add 10 minutes in milliseconds - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(getTimestamp(timestampMs)); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: timestampMs, - }), - }) - ); - }); - - test('should set the timestamp if provided in the past', () => { - // Prepare - const testMetric = 'test-metric'; - const metrics: Metrics = new Metrics(); - const timestampMs = mockDate.getTime() - 10 * 60 * 1000; // Subtract 10 minutes in milliseconds - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(getTimestamp(timestampMs)); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: timestampMs, - }), - }) - ); - }); - - test('should not log a warning if the timestamp is within valid future range', () => { - // Prepare - const testMetric = 'test-metric'; - const timestampMs = mockDate.getTime() + EMF_MAX_TIMESTAMP_FUTURE_AGE; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(getTimestamp(timestampMs)); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).not.toHaveBeenCalled(); - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: timestampMs, - }), - }) - ); - }); - - test('should log a warning if the timestamp is more than the future range but still set the timestamp', () => { - // Prepare - const testMetric = 'test-metric'; - const timestampMs = - mockDate.getTime() + EMF_MAX_TIMESTAMP_FUTURE_AGE + 1; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(getTimestamp(timestampMs)); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledWith( - "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + - 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' - ); - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: timestampMs, - }), - }) - ); - }); - - test('should not log a warning if the timestamp is within past range and set the timestamp', () => { - // Prepare - const testMetric = 'test-metric'; - const timestampMs = mockDate.getTime() - EMF_MAX_TIMESTAMP_PAST_AGE; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(getTimestamp(timestampMs)); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).not.toHaveBeenCalled(); - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: timestampMs, - }), - }) - ); - }); - - test('should log a warning if the timestamp is more than past range but still set the timestamp', () => { - // Prepare - const testMetric = 'test-metric'; - const timestampMs = - mockDate.getTime() - EMF_MAX_TIMESTAMP_PAST_AGE - 1; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(getTimestamp(timestampMs)); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledWith( - "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + - 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' - ); - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: timestampMs, - }), - }) - ); - }); - }); - } - - test('should log warning and set timestamp to 0 if not a number provided', () => { - // Prepare - const testMetric = 'test-metric'; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(Number.NaN); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledWith( - "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + - 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' - ); - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: 0, - }), - }) - ); - }); - - test('should log warning and set timestamp to 0 if not a integer number provided', () => { - // Prepare - const testMetric = 'test-metric'; - const customLogger = { - warn: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - info: jest.fn(), - }; - const metrics: Metrics = new Metrics({ logger: customLogger }); - const consoleWarnSpy = jest.spyOn(customLogger, 'warn'); - - // Act - metrics.addMetric(testMetric, MetricUnit.Count, 10); - metrics.setTimestamp(1.1); - const loggedData = metrics.serializeMetrics(); - - // Assess - expect(consoleWarnSpy).toHaveBeenCalledWith( - "This metric doesn't meet the requirements and will be skipped by Amazon CloudWatch. " + - 'Ensure the timestamp is within 14 days in the past or up to 2 hours in the future and is also a valid number or Date object.' - ); - expect(loggedData).toEqual( - expect.objectContaining({ - _aws: expect.objectContaining({ - Timestamp: 0, - }), - }) - ); - }); - }); -}); diff --git a/packages/metrics/tests/unit/coldStartMetric.test.ts b/packages/metrics/tests/unit/coldStartMetric.test.ts index b94f999c03..e73ff4ce7d 100644 --- a/packages/metrics/tests/unit/coldStartMetric.test.ts +++ b/packages/metrics/tests/unit/coldStartMetric.test.ts @@ -6,7 +6,11 @@ describe('ColdStart metric', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); }); diff --git a/packages/metrics/tests/unit/creatingMetrics.test.ts b/packages/metrics/tests/unit/creatingMetrics.test.ts index d241fa0a98..80705c9ef5 100644 --- a/packages/metrics/tests/unit/creatingMetrics.test.ts +++ b/packages/metrics/tests/unit/creatingMetrics.test.ts @@ -10,7 +10,11 @@ describe('Creating metrics', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); }); diff --git a/packages/metrics/tests/unit/customTimestamp.test.ts b/packages/metrics/tests/unit/customTimestamp.test.ts index 84e432e3a1..4512e15c62 100644 --- a/packages/metrics/tests/unit/customTimestamp.test.ts +++ b/packages/metrics/tests/unit/customTimestamp.test.ts @@ -10,7 +10,11 @@ describe('Setting custom timestamp', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); vi.useFakeTimers().setSystemTime(new Date()); }); diff --git a/packages/metrics/tests/unit/dimensions.test.ts b/packages/metrics/tests/unit/dimensions.test.ts index b4d99d87b6..04f2aa0364 100644 --- a/packages/metrics/tests/unit/dimensions.test.ts +++ b/packages/metrics/tests/unit/dimensions.test.ts @@ -6,7 +6,11 @@ describe('Working with dimensions', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); }); diff --git a/packages/metrics/tests/unit/initializeMetrics.test.ts b/packages/metrics/tests/unit/initializeMetrics.test.ts index 04742f9f15..b76c4cb4e0 100644 --- a/packages/metrics/tests/unit/initializeMetrics.test.ts +++ b/packages/metrics/tests/unit/initializeMetrics.test.ts @@ -7,7 +7,11 @@ describe('Initialize Metrics', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); }); @@ -153,4 +157,60 @@ describe('Initialize Metrics', () => { // biome-ignore lint/complexity/useLiteralKeys: we need to access the internal console object expect(metrics['console']).not.toEqual(console); }); + + class TestMetrics extends Metrics { + public isDisabled(): boolean { + return super.isDisabled(); + } + } + + it('does not disable metrics when POWERTOOLS_METRICS_DISABLED nor POWERTOOLS_DEV are not set', () => { + // Preapare + process.env.POWERTOOLS_DEV = undefined; + process.env.POWERTOOLS_METRICS_DISABLED = undefined; + const metrics = new TestMetrics(); + + // Act & Assess + expect(metrics.isDisabled()).toBe(false); + }); + + it('disables metrics when POWERTOOLS_METRICS_DISABLED is set to true', () => { + // Preapare + process.env.POWERTOOLS_DEV = undefined; + process.env.POWERTOOLS_METRICS_DISABLED = 'true'; + const metrics = new TestMetrics(); + + // Act & Assess + expect(metrics.isDisabled()).toBe(true); + }); + + it('disables metrics when POWERTOOLS_DEV is set to true', () => { + // Preapare + process.env.POWERTOOLS_DEV = 'true'; + process.env.POWERTOOLS_METRICS_DISABLED = undefined; + const metrics = new TestMetrics(); + + // Act & Assess + expect(metrics.isDisabled()).toBe(true); + }); + + it('does not disable metrics when POWERTOOLS_METRICS_DISABLED is set to false', () => { + // Preapare + process.env.POWERTOOLS_DEV = undefined; + process.env.POWERTOOLS_METRICS_DISABLED = 'false'; + const metrics = new TestMetrics(); + + // Act & Assess + expect(metrics.isDisabled()).toBe(false); + }); + + it('does not disable metrics when POWERTOOLS_METRICS_DISABLED overrides POWERTOOLS_DEV', () => { + // Preapare + process.env.POWERTOOLS_DEV = 'true'; + process.env.POWERTOOLS_METRICS_DISABLED = 'false'; + const metrics = new TestMetrics(); + + // Act & Assess + expect(metrics.isDisabled()).toBe(false); + }); }); diff --git a/packages/metrics/tests/unit/logMetrics.test.ts b/packages/metrics/tests/unit/logMetrics.test.ts index 149bdc47de..1d4f7d41e4 100644 --- a/packages/metrics/tests/unit/logMetrics.test.ts +++ b/packages/metrics/tests/unit/logMetrics.test.ts @@ -10,7 +10,11 @@ describe('LogMetrics decorator & Middy.js middleware', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); }); diff --git a/packages/metrics/tests/unit/metadata.test.ts b/packages/metrics/tests/unit/metadata.test.ts index db826aebb8..ede6b82264 100644 --- a/packages/metrics/tests/unit/metadata.test.ts +++ b/packages/metrics/tests/unit/metadata.test.ts @@ -5,7 +5,11 @@ describe('Working with metadata', () => { const ENVIRONMENT_VARIABLES = process.env; beforeEach(() => { - process.env = { ...ENVIRONMENT_VARIABLES, POWERTOOLS_DEV: 'true' }; + process.env = { + ...ENVIRONMENT_VARIABLES, + POWERTOOLS_DEV: 'true', + POWERTOOLS_METRICS_DISABLED: 'false', + }; vi.resetAllMocks(); }); diff --git a/packages/metrics/tests/unit/middleware/middy.test.ts b/packages/metrics/tests/unit/middleware/middy.test.ts deleted file mode 100644 index 8d2a17ec51..0000000000 --- a/packages/metrics/tests/unit/middleware/middy.test.ts +++ /dev/null @@ -1,452 +0,0 @@ -/** - * Test metrics middleware - * - * @group unit/metrics/middleware - */ -import { cleanupMiddlewares } from '@aws-lambda-powertools/commons'; -import context from '@aws-lambda-powertools/testing-utils/context'; -import middy from '@middy/core'; -import { MetricResolution, MetricUnit, Metrics } from '../../../src/index.js'; -import { logMetrics } from '../../../src/middleware/middy.js'; -import type { ExtraOptions } from '../../../src/types/index.js'; - -jest.mock('node:console', () => ({ - ...jest.requireActual('node:console'), - Console: jest.fn().mockImplementation(() => ({ - log: jest.fn(), - warn: jest.fn(), - debug: jest.fn(), - })), -})); -jest.spyOn(console, 'warn').mockImplementation(() => ({})); -const mockDate = new Date(1466424490000); -jest.spyOn(global, 'Date').mockImplementation(() => mockDate); - -describe('Middy middleware', () => { - const ENVIRONMENT_VARIABLES = process.env; - - beforeEach(() => { - jest.clearAllMocks(); - process.env = { ...ENVIRONMENT_VARIABLES }; - }); - - const event = { - foo: 'bar', - bar: 'baz', - }; - - describe('throwOnEmptyMetrics', () => { - test('should throw on empty metrics if set to true', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - const handler = middy(async (): Promise => undefined).use( - logMetrics(metrics, { throwOnEmptyMetrics: true }) - ); - - await expect(handler(event, context)).rejects.toThrowError( - 'The number of metrics recorded must be higher than zero' - ); - }); - - test('should not throw on empty metrics if set to false', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - const handler = middy(async (): Promise => undefined).use( - logMetrics(metrics, { throwOnEmptyMetrics: false }) - ); - - // Act & Assess - await expect(handler(event, context)).resolves.not.toThrowError(); - }); - - test('should not throw on empty metrics if not set, but should log a warning', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - logger: console, - }); - const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(); - const handler = middy(async (): Promise => undefined).use( - logMetrics([metrics]) - ); - - // Act & Assess - await expect(handler(event, context)).resolves.not.toThrowError(); - expect(consoleWarnSpy).toBeCalledTimes(1); - expect(consoleWarnSpy).toBeCalledWith( - 'No application metrics to publish. The cold-start metric may be published if enabled. If application metrics should never be empty, consider using `throwOnEmptyMetrics`' - ); - }); - }); - - describe('captureColdStartMetric', () => { - test('should capture cold start metric if set to true', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - const consoleSpy = jest - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - .spyOn(metrics['console'], 'log') - .mockImplementation(); - // Monkey patch the singleMetric method to return the metrics instance - // so that we can assert on the console output - jest.spyOn(metrics, 'singleMetric').mockImplementation(() => metrics); - - const handler = middy(async (): Promise => undefined).use( - logMetrics(metrics, { captureColdStartMetric: true }) - ); - - // Act - await handler(event, context); - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenCalledTimes(1); - const loggedData = JSON.parse(consoleSpy.mock.calls[0][0]); - expect(loggedData._aws.CloudWatchMetrics[0].Metrics.length).toBe(1); - expect(loggedData._aws.CloudWatchMetrics[0].Metrics[0].Name).toBe( - 'ColdStart' - ); - expect(loggedData._aws.CloudWatchMetrics[0].Metrics[0].Unit).toBe( - 'Count' - ); - expect(loggedData.ColdStart).toBe(1); - }); - - test('should not capture cold start metrics if set to false', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - const consoleSpy = jest - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - .spyOn(metrics['console'], 'log') - .mockImplementation(); - // Monkey patch the singleMetric method to return the metrics instance - // so that we can assert on the console output - jest.spyOn(metrics, 'singleMetric').mockImplementation(() => metrics); - const handler = middy(async (): Promise => undefined).use( - logMetrics(metrics, { captureColdStartMetric: false }) - ); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).not.toHaveBeenCalled(); - }); - - test('should not throw on empty metrics if not set', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - const handler = middy(async (): Promise => undefined).use( - logMetrics(metrics) - ); - - // Act & Assess - await expect(handler(event, context)).resolves.not.toThrow(); - }); - }); - - describe('logMetrics', () => { - test('when a metrics instance receive multiple metrics with the same name, it prints multiple values in an array format', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const handler = middy(async (): Promise => { - metrics.addMetric('successfulBooking', MetricUnit.Count, 2); - metrics.addMetric('successfulBooking', MetricUnit.Count, 1); - }).use(logMetrics(metrics)); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenNthCalledWith( - 1, - JSON.stringify({ - _aws: { - Timestamp: 1466424490000, - CloudWatchMetrics: [ - { - Namespace: 'serverlessAirline', - Dimensions: [['service']], - Metrics: [{ Name: 'successfulBooking', Unit: 'Count' }], - }, - ], - }, - service: 'orders', - successfulBooking: [2, 1], - }) - ); - }); - - test('when a metrics instance is passed WITH custom options, it prints the metrics in the stdout', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const metricsOptions: ExtraOptions = { - throwOnEmptyMetrics: true, - defaultDimensions: { environment: 'prod', aws_region: 'eu-west-1' }, - captureColdStartMetric: true, - }; - const handler = middy(async (): Promise => { - metrics.addMetric('successfulBooking', MetricUnit.Count, 1); - }).use(logMetrics(metrics, metricsOptions)); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenNthCalledWith( - 1, - JSON.stringify({ - _aws: { - Timestamp: 1466424490000, - CloudWatchMetrics: [ - { - Namespace: 'serverlessAirline', - Dimensions: [['service', 'environment', 'aws_region']], - Metrics: [{ Name: 'successfulBooking', Unit: 'Count' }], - }, - ], - }, - service: 'orders', - environment: 'prod', - aws_region: 'eu-west-1', - successfulBooking: 1, - }) - ); - }); - - test('when a metrics instance is passed WITHOUT custom options, it prints the metrics in the stdout', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const handler = middy(async (): Promise => { - metrics.addMetric('successfulBooking', MetricUnit.Count, 1); - }).use(logMetrics(metrics)); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenNthCalledWith( - 1, - JSON.stringify({ - _aws: { - Timestamp: 1466424490000, - CloudWatchMetrics: [ - { - Namespace: 'serverlessAirline', - Dimensions: [['service']], - Metrics: [{ Name: 'successfulBooking', Unit: 'Count' }], - }, - ], - }, - service: 'orders', - successfulBooking: 1, - }) - ); - }); - - test('when an array of Metrics instances is passed, it prints the metrics in the stdout', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const handler = middy(async (): Promise => { - metrics.addMetric('successfulBooking', MetricUnit.Count, 1); - }).use( - logMetrics(metrics, { - throwOnEmptyMetrics: true, - }) - ); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenNthCalledWith( - 1, - JSON.stringify({ - _aws: { - Timestamp: 1466424490000, - CloudWatchMetrics: [ - { - Namespace: 'serverlessAirline', - Dimensions: [['service']], - Metrics: [{ Name: 'successfulBooking', Unit: 'Count' }], - }, - ], - }, - service: 'orders', - successfulBooking: 1, - }) - ); - }); - - test('when enabled, and another middleware returns early, it still publishes the metrics at the end of the execution', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - const publishStoredMetricsSpy = jest.spyOn( - metrics, - 'publishStoredMetrics' - ); - const myCustomMiddleware = (): middy.MiddlewareObj => { - const before = async ( - request: middy.Request - ): Promise => { - // Return early on the second invocation - if (request.event.idx === 1) { - // Cleanup Powertools resources - await cleanupMiddlewares(request); - - // Then return early - return 'foo'; - } - }; - - return { - before, - }; - }; - const handler = middy( - (_event: { foo: string; bar: string } & { idx: number }): void => { - metrics.addMetric('successfulBooking', MetricUnit.Count, 1); - } - ) - .use(logMetrics(metrics)) - .use(myCustomMiddleware()); - - // Act - await handler({ ...event, idx: 0 }, context); - await handler({ ...event, idx: 1 }, context); - - // Assess - expect(publishStoredMetricsSpy).toBeCalledTimes(2); - }); - }); - describe('Metrics resolution', () => { - test('serialized metrics in EMF format should not contain `StorageResolution` as key if `60` is set', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const handler = middy((): void => { - metrics.addMetric( - 'successfulBooking', - MetricUnit.Count, - 1, - MetricResolution.Standard - ); - }).use(logMetrics(metrics)); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenCalledWith( - JSON.stringify({ - _aws: { - Timestamp: 1466424490000, - CloudWatchMetrics: [ - { - Namespace: 'serverlessAirline', - Dimensions: [['service']], - Metrics: [ - { - Name: 'successfulBooking', - Unit: 'Count', - }, - ], - }, - ], - }, - service: 'orders', - successfulBooking: 1, - }) - ); - }); - - test('Should be StorageResolution `1` if MetricResolution is set to `High`', async () => { - // Prepare - const metrics = new Metrics({ - namespace: 'serverlessAirline', - serviceName: 'orders', - }); - // biome-ignore lint/complexity/useLiteralKeys: This needs to be accessed with literal key for testing - const consoleSpy = jest.spyOn(metrics['console'], 'log'); - const handler = middy((): void => { - metrics.addMetric( - 'successfulBooking', - MetricUnit.Count, - 1, - MetricResolution.High - ); - }).use(logMetrics(metrics)); - - // Act - await handler(event, context); - - // Assess - expect(consoleSpy).toHaveBeenCalledWith( - JSON.stringify({ - _aws: { - Timestamp: 1466424490000, - CloudWatchMetrics: [ - { - Namespace: 'serverlessAirline', - Dimensions: [['service']], - Metrics: [ - { - Name: 'successfulBooking', - Unit: 'Count', - StorageResolution: 1, - }, - ], - }, - ], - }, - service: 'orders', - successfulBooking: 1, - }) - ); - }); - }); -}); From 6b143fac263c42e3660ef1b9d28657adee2bccb5 Mon Sep 17 00:00:00 2001 From: Andrea Amorosi Date: Mon, 25 Nov 2024 12:14:10 +0100 Subject: [PATCH 3/6] chore: pre-push hook --- .husky/pre-push | 3 --- 1 file changed, 3 deletions(-) diff --git a/.husky/pre-push b/.husky/pre-push index c1b04d1bf3..4a543f88ab 100755 --- a/.husky/pre-push +++ b/.husky/pre-push @@ -1,6 +1,3 @@ -npm t \ - -w packages/metrics - npx vitest --run \ --exclude tests/unit/layer-publisher.test.ts \ --coverage --coverage.thresholds.100 \ From 98a3a511c0bdef4076e50f48d5c1774f2b197d2d Mon Sep 17 00:00:00 2001 From: Andrea Amorosi Date: Mon, 25 Nov 2024 12:24:06 +0100 Subject: [PATCH 4/6] chore: update CI --- ...sable-run-linting-check-and-unit-tests.yml | 27 +------------------ 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/.github/workflows/reusable-run-linting-check-and-unit-tests.yml b/.github/workflows/reusable-run-linting-check-and-unit-tests.yml index ad1ce5177b..e247172381 100644 --- a/.github/workflows/reusable-run-linting-check-and-unit-tests.yml +++ b/.github/workflows/reusable-run-linting-check-and-unit-tests.yml @@ -50,6 +50,7 @@ jobs: "packages/tracer", "packages/parser", "packages/parameters", + "packages/metrics" ] fail-fast: false steps: @@ -70,32 +71,6 @@ jobs: run: | npm run test:unit:coverage -w ${{ matrix.workspace }} npm run test:unit:types -w ${{ matrix.workspace }} - run-linting-check-and-unit-tests-on-utilities: - runs-on: ubuntu-latest - env: - NODE_ENV: dev - strategy: - matrix: - version: [18, 20, 22] - fail-fast: false - steps: - - name: Checkout code - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - name: Setup NodeJS - uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 - with: - node-version: ${{ matrix.version }} - cache: "npm" - - name: Setup dependencies - uses: aws-powertools/actions/.github/actions/cached-node-modules@d406bac5563f1d8c793519a3eedfe620f6a14872 - with: - nodeVersion: ${{ matrix.version }} - - name: Run linting - run: | - npm run lint -w packages/metrics - - name: Run unit tests - run: | - npm t -w packages/metrics check-examples: runs-on: ubuntu-latest env: From 27d0a84cdda9ac3ff94fa15b413ac910ccdba89c Mon Sep 17 00:00:00 2001 From: Andrea Amorosi Date: Mon, 25 Nov 2024 14:00:59 +0100 Subject: [PATCH 5/6] Update packages/metrics/package.json Co-authored-by: Alexander Schueren --- packages/metrics/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/metrics/package.json b/packages/metrics/package.json index 3a3f36c863..000f515e7e 100644 --- a/packages/metrics/package.json +++ b/packages/metrics/package.json @@ -19,7 +19,7 @@ "test:e2e:nodejs20x": "RUNTIME=nodejs20x vitest --run tests/e2e", "test:e2e:nodejs22x": "RUNTIME=nodejs22x vitest --run tests/e2e", "test:e2e": "vitest --run tests/e2e", - "watch": "jest --group=unit --watch ", + "watch": "vitest watch test/unit ", "build:cjs": "tsc --build tsconfig.json && echo '{ \"type\": \"commonjs\" }' > lib/cjs/package.json", "build:esm": "tsc --build tsconfig.esm.json && echo '{ \"type\": \"module\" }' > lib/esm/package.json", "build": "npm run build:esm & npm run build:cjs", From 5a9cbab780ccf4f58a8c87a0d273e444c912d7e1 Mon Sep 17 00:00:00 2001 From: Andrea Amorosi Date: Mon, 25 Nov 2024 14:02:27 +0100 Subject: [PATCH 6/6] chore: remove watch --- packages/metrics/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/metrics/package.json b/packages/metrics/package.json index 000f515e7e..a4d13bc4ec 100644 --- a/packages/metrics/package.json +++ b/packages/metrics/package.json @@ -19,7 +19,6 @@ "test:e2e:nodejs20x": "RUNTIME=nodejs20x vitest --run tests/e2e", "test:e2e:nodejs22x": "RUNTIME=nodejs22x vitest --run tests/e2e", "test:e2e": "vitest --run tests/e2e", - "watch": "vitest watch test/unit ", "build:cjs": "tsc --build tsconfig.json && echo '{ \"type\": \"commonjs\" }' > lib/cjs/package.json", "build:esm": "tsc --build tsconfig.esm.json && echo '{ \"type\": \"module\" }' > lib/esm/package.json", "build": "npm run build:esm & npm run build:cjs",