From fa6191e365aa9f9763f71107c573c6792ad8cbaa Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Jul 2023 14:39:50 +0200 Subject: [PATCH 01/24] fix(parameters): make cache aware of single vs multiple calls Signed-off-by: heitorlessa --- aws_lambda_powertools/utilities/parameters/base.py | 2 +- aws_lambda_powertools/utilities/parameters/types.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index e4be9d33cdc..78bf865faf0 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -27,7 +27,7 @@ from aws_lambda_powertools.shared import constants, user_agent from aws_lambda_powertools.shared.functions import resolve_max_age -from aws_lambda_powertools.utilities.parameters.types import TransformOptions +from aws_lambda_powertools.utilities.parameters.types import RecursiveOptions, TransformOptions from .exceptions import GetParameterError, TransformParameterError diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py index 6a15873c496..2dbf1593d72 100644 --- a/aws_lambda_powertools/utilities/parameters/types.py +++ b/aws_lambda_powertools/utilities/parameters/types.py @@ -1,3 +1,4 @@ from typing_extensions import Literal TransformOptions = Literal["json", "binary", "auto", None] +RecursiveOptions = Literal[True, False] From e89705b51f754dfa72b7e44b4ff4a0183b480c17 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Jul 2023 15:16:51 +0200 Subject: [PATCH 02/24] chore: cleanup, add test for single and nested Signed-off-by: heitorlessa --- aws_lambda_powertools/utilities/parameters/base.py | 2 +- aws_lambda_powertools/utilities/parameters/types.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index 78bf865faf0..e4be9d33cdc 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -27,7 +27,7 @@ from aws_lambda_powertools.shared import constants, user_agent from aws_lambda_powertools.shared.functions import resolve_max_age -from aws_lambda_powertools.utilities.parameters.types import RecursiveOptions, TransformOptions +from aws_lambda_powertools.utilities.parameters.types import TransformOptions from .exceptions import GetParameterError, TransformParameterError diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py index 2dbf1593d72..6a15873c496 100644 --- a/aws_lambda_powertools/utilities/parameters/types.py +++ b/aws_lambda_powertools/utilities/parameters/types.py @@ -1,4 +1,3 @@ from typing_extensions import Literal TransformOptions = Literal["json", "binary", "auto", None] -RecursiveOptions = Literal[True, False] From a3d278712a966239a95e94f72d96acd4f6fdb210 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Aug 2023 22:16:11 +0100 Subject: [PATCH 03/24] chore(deps): bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 (#2943) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 465ee561c4a..98012b067ef 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -237,12 +237,12 @@ jobs: - name: Upload to PyPi prod if: ${{ !inputs.skip_pypi }} - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + uses: pypa/gh-action-pypi-publish@ade57f54dcc56d4858ca681c80269c26dc7b9149 # v1.8.9 # PyPi test maintenance affected us numerous times, leaving for history purposes # - name: Upload to PyPi test # if: ${{ !inputs.skip_pypi }} - # uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + # uses: pypa/gh-action-pypi-publish@ade57f54dcc56d4858ca681c80269c26dc7b9149 # v1.8.9 # with: # repository-url: https://test.pypi.org/legacy/ From 91554519d5889bd0eed0b9056e2fca1dc90cfc27 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Aug 2023 00:13:07 +0100 Subject: [PATCH 04/24] chore(deps-dev): bump the boto-typing group with 1 update (#2944) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4ca647d5721..b101ea72568 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1693,13 +1693,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "mypy-boto3-secretsmanager" -version = "1.28.16" -description = "Type annotations for boto3.SecretsManager 1.28.16 service generated with mypy-boto3-builder 7.17.1" +version = "1.28.24" +description = "Type annotations for boto3.SecretsManager 1.28.24 service generated with mypy-boto3-builder 7.17.2" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-secretsmanager-1.28.16.tar.gz", hash = "sha256:07f443b31d2114ac363cfbdbc5f4b97934ca48fb99734bbd06d5c39bce244b83"}, - {file = "mypy_boto3_secretsmanager-1.28.16-py3-none-any.whl", hash = "sha256:05508c3a96d96e482e5aff21b508319a1911e6662aea5be96aa7f7089b8dbfd4"}, + {file = "mypy-boto3-secretsmanager-1.28.24.tar.gz", hash = "sha256:13461d8d2891ec0e430437dbb71c0879ee431ddfedb6b21c265878642faeb2a7"}, + {file = "mypy_boto3_secretsmanager-1.28.24-py3-none-any.whl", hash = "sha256:e224809e28d99c1360bfe6428e8b567bb4a43c38a71263eba0ff4de7fa321142"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index d12a8069c31..0a332631f9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ mypy-boto3-cloudwatch = "^1.28.16" mypy-boto3-dynamodb = "^1.28.19" mypy-boto3-lambda = "^1.28.19" mypy-boto3-logs = "^1.28.16" -mypy-boto3-secretsmanager = "^1.28.16" +mypy-boto3-secretsmanager = "^1.28.24" mypy-boto3-ssm = "^1.28.16" mypy-boto3-s3 = "^1.28.19" mypy-boto3-xray = "^1.28.16" From 50ec8250e3d09221b84196e1c6d81b3e8f407a3c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 11 Aug 2023 11:55:07 +0200 Subject: [PATCH 05/24] chore(ci): changelog rebuild (#2945) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ecaec447097..84e2c81efd8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,44 +30,47 @@ ## Maintenance -* **ci:** enable protected branch auditing ([#2913](https://github.com/aws-powertools/powertools-lambda-python/issues/2913)) * **ci:** group dependabot updates ([#2896](https://github.com/aws-powertools/powertools-lambda-python/issues/2896)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.28 to 1.18.29 in /layer/scripts/layer-balancer ([#2844](https://github.com/aws-powertools/powertools-lambda-python/issues/2844)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) +* **ci:** enable protected branch auditing ([#2913](https://github.com/aws-powertools/powertools-lambda-python/issues/2913)) +* **deps:** bump pydantic from 1.10.11 to 1.10.12 ([#2846](https://github.com/aws-powertools/powertools-lambda-python/issues/2846)) * **deps:** bump squidfunk/mkdocs-material from `33e28bd` to `cd3a522` in /docs ([#2859](https://github.com/aws-powertools/powertools-lambda-python/issues/2859)) * **deps:** bump slsa-framework/slsa-github-generator from 1.7.0 to 1.8.0 ([#2927](https://github.com/aws-powertools/powertools-lambda-python/issues/2927)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.37.1 to 1.38.0 in /layer/scripts/layer-balancer ([#2843](https://github.com/aws-powertools/powertools-lambda-python/issues/2843)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.28 to 1.18.29 in /layer/scripts/layer-balancer ([#2844](https://github.com/aws-powertools/powertools-lambda-python/issues/2844)) +* **deps:** bump actions/dependency-review-action from 3.0.6 to 3.0.7 ([#2941](https://github.com/aws-powertools/powertools-lambda-python/issues/2941)) * **deps:** bump github.com/aws/aws-sdk-go-v2 from 1.19.0 to 1.19.1 in /layer/scripts/layer-balancer ([#2877](https://github.com/aws-powertools/powertools-lambda-python/issues/2877)) * **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.0 to 1.38.1 in /layer/scripts/layer-balancer ([#2876](https://github.com/aws-powertools/powertools-lambda-python/issues/2876)) -* **deps:** bump pydantic from 1.10.11 to 1.10.12 ([#2846](https://github.com/aws-powertools/powertools-lambda-python/issues/2846)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.29 to 1.18.30 in /layer/scripts/layer-balancer ([#2875](https://github.com/aws-powertools/powertools-lambda-python/issues/2875)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.37.1 to 1.38.0 in /layer/scripts/layer-balancer ([#2843](https://github.com/aws-powertools/powertools-lambda-python/issues/2843)) * **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.30 to 1.18.31 in /layer/scripts/layer-balancer ([#2889](https://github.com/aws-powertools/powertools-lambda-python/issues/2889)) -* **deps:** bump actions/dependency-review-action from 3.0.6 to 3.0.7 ([#2941](https://github.com/aws-powertools/powertools-lambda-python/issues/2941)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 ([#2943](https://github.com/aws-powertools/powertools-lambda-python/issues/2943)) * **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.1 to 1.39.0 in /layer/scripts/layer-balancer ([#2890](https://github.com/aws-powertools/powertools-lambda-python/issues/2890)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.29 to 1.18.30 in /layer/scripts/layer-balancer ([#2875](https://github.com/aws-powertools/powertools-lambda-python/issues/2875)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) +* **deps-dev:** bump the boto-typing group with 1 update ([#2944](https://github.com/aws-powertools/powertools-lambda-python/issues/2944)) * **deps-dev:** bump mkdocs-material from 9.1.19 to 9.1.21 ([#2894](https://github.com/aws-powertools/powertools-lambda-python/issues/2894)) * **deps-dev:** bump aws-cdk from 2.88.0 to 2.89.0 ([#2887](https://github.com/aws-powertools/powertools-lambda-python/issues/2887)) +* **deps-dev:** bump the boto-typing group with 11 updates ([#2901](https://github.com/aws-powertools/powertools-lambda-python/issues/2901)) * **deps-dev:** bump mypy-boto3-logs from 1.28.1 to 1.28.15 ([#2880](https://github.com/aws-powertools/powertools-lambda-python/issues/2880)) * **deps-dev:** bump mypy-boto3-appconfigdata from 1.28.0 to 1.28.15 ([#2879](https://github.com/aws-powertools/powertools-lambda-python/issues/2879)) * **deps-dev:** bump mypy-boto3-lambda from 1.28.11 to 1.28.15 ([#2878](https://github.com/aws-powertools/powertools-lambda-python/issues/2878)) * **deps-dev:** bump mypy-boto3-xray from 1.28.0 to 1.28.15 ([#2881](https://github.com/aws-powertools/powertools-lambda-python/issues/2881)) -* **deps-dev:** bump the boto-typing group with 11 updates ([#2901](https://github.com/aws-powertools/powertools-lambda-python/issues/2901)) * **deps-dev:** bump cfn-lint from 0.79.5 to 0.79.6 ([#2899](https://github.com/aws-powertools/powertools-lambda-python/issues/2899)) * **deps-dev:** bump sentry-sdk from 1.28.1 to 1.29.0 ([#2900](https://github.com/aws-powertools/powertools-lambda-python/issues/2900)) -* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) * **deps-dev:** bump ruff from 0.0.280 to 0.0.281 ([#2891](https://github.com/aws-powertools/powertools-lambda-python/issues/2891)) +* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) +* **deps-dev:** bump ruff from 0.0.281 to 0.0.282 ([#2905](https://github.com/aws-powertools/powertools-lambda-python/issues/2905)) * **deps-dev:** bump mypy-boto3-cloudformation from 1.28.10 to 1.28.12 ([#2864](https://github.com/aws-powertools/powertools-lambda-python/issues/2864)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.28.0 to 1.28.12 ([#2865](https://github.com/aws-powertools/powertools-lambda-python/issues/2865)) * **deps-dev:** bump cfn-lint from 0.79.3 to 0.79.4 ([#2862](https://github.com/aws-powertools/powertools-lambda-python/issues/2862)) * **deps-dev:** bump mypy-boto3-appconfig from 1.28.0 to 1.28.12 ([#2861](https://github.com/aws-powertools/powertools-lambda-python/issues/2861)) * **deps-dev:** bump mypy-boto3-ssm from 1.28.0 to 1.28.12 ([#2863](https://github.com/aws-powertools/powertools-lambda-python/issues/2863)) -* **deps-dev:** bump ruff from 0.0.281 to 0.0.282 ([#2905](https://github.com/aws-powertools/powertools-lambda-python/issues/2905)) -* **deps-dev:** bump ruff from 0.0.282 to 0.0.283 ([#2937](https://github.com/aws-powertools/powertools-lambda-python/issues/2937)) -* **deps-dev:** bump cfn-lint from 0.78.2 to 0.79.3 ([#2854](https://github.com/aws-powertools/powertools-lambda-python/issues/2854)) -* **deps-dev:** bump aws-cdk from 2.89.0 to 2.90.0 ([#2932](https://github.com/aws-powertools/powertools-lambda-python/issues/2932)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.28.0 to 1.28.11 ([#2847](https://github.com/aws-powertools/powertools-lambda-python/issues/2847)) * **deps-dev:** bump the boto-typing group with 4 updates ([#2928](https://github.com/aws-powertools/powertools-lambda-python/issues/2928)) +* **deps-dev:** bump aws-cdk from 2.89.0 to 2.90.0 ([#2932](https://github.com/aws-powertools/powertools-lambda-python/issues/2932)) * **deps-dev:** bump mypy-boto3-lambda from 1.28.0 to 1.28.11 ([#2845](https://github.com/aws-powertools/powertools-lambda-python/issues/2845)) +* **deps-dev:** bump ruff from 0.0.282 to 0.0.283 ([#2937](https://github.com/aws-powertools/powertools-lambda-python/issues/2937)) +* **deps-dev:** bump ruff from 0.0.283 to 0.0.284 ([#2940](https://github.com/aws-powertools/powertools-lambda-python/issues/2940)) +* **deps-dev:** bump cfn-lint from 0.78.2 to 0.79.3 ([#2854](https://github.com/aws-powertools/powertools-lambda-python/issues/2854)) * **docs:** disable line length rule using older syntax ([#2920](https://github.com/aws-powertools/powertools-lambda-python/issues/2920)) * **docs:** include the environment variables section in the utilities documentation ([#2925](https://github.com/aws-powertools/powertools-lambda-python/issues/2925)) * **maintenance:** enables publishing docs and changelog, running e2e tests only in the main repository ([#2924](https://github.com/aws-powertools/powertools-lambda-python/issues/2924)) From 3923439bce0cedf3876e50067a699e4a1cf56891 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Aug 2023 21:42:57 +0100 Subject: [PATCH 06/24] chore(deps-dev): bump aws-cdk from 2.90.0 to 2.91.0 (#2947) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 14 +++++++------- package.json | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index 88805244fe0..a293273d144 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,13 +11,13 @@ "package-lock.json": "^1.0.0" }, "devDependencies": { - "aws-cdk": "^2.90.0" + "aws-cdk": "^2.91.0" } }, "node_modules/aws-cdk": { - "version": "2.90.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.90.0.tgz", - "integrity": "sha512-6u9pCZeDyIo03tQBdutLD723tuHBsbOQDor72FRxq1uNFWRbVCmZ8ROk2/APAjYJbl4BK2lW9SEgAb8hapaybA==", + "version": "2.91.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.91.0.tgz", + "integrity": "sha512-YSnTiLyNtng0eW1y9XdyopSTP3Kuyhs5cF5iRcaCk9o+3zrvadgxvcWVT7caXNfE8iOI9IKwSd2GiABeVd20eQ==", "dev": true, "bin": { "cdk": "bin/cdk" @@ -51,9 +51,9 @@ }, "dependencies": { "aws-cdk": { - "version": "2.90.0", - "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.90.0.tgz", - "integrity": "sha512-6u9pCZeDyIo03tQBdutLD723tuHBsbOQDor72FRxq1uNFWRbVCmZ8ROk2/APAjYJbl4BK2lW9SEgAb8hapaybA==", + "version": "2.91.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.91.0.tgz", + "integrity": "sha512-YSnTiLyNtng0eW1y9XdyopSTP3Kuyhs5cF5iRcaCk9o+3zrvadgxvcWVT7caXNfE8iOI9IKwSd2GiABeVd20eQ==", "dev": true, "requires": { "fsevents": "2.3.2" diff --git a/package.json b/package.json index 1b3e4d4d348..485a25885b2 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "aws-lambda-powertools-python-e2e", "version": "1.0.0", "devDependencies": { - "aws-cdk": "^2.90.0" + "aws-cdk": "^2.91.0" }, "dependencies": { "package-lock.json": "^1.0.0" From 3445000d3a91ae179b6690921bf8c46a1f940903 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Aug 2023 23:15:06 +0100 Subject: [PATCH 07/24] chore(deps): bump gitpython from 3.1.31 to 3.1.32 in /docs (#2948) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs/requirements.txt | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 5030fc70306..42d3c2ab26e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -16,16 +16,10 @@ gitdb==4.0.10 \ --hash=sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a \ --hash=sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7 # via gitpython -gitpython==3.1.31 \ - --hash=sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573 \ - --hash=sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d +gitpython==3.1.32 \ + --hash=sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6 \ + --hash=sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f # via mkdocs-git-revision-date-plugin -importlib-metadata==6.7.0 \ - --hash=sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4 \ - --hash=sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5 - # via - # markdown - # mkdocs jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 @@ -192,7 +186,3 @@ watchdog==3.0.0 \ --hash=sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44 \ --hash=sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33 # via mkdocs -zipp==3.15.0 \ - --hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \ - --hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556 - # via importlib-metadata From ab43717f5a37a9ba92e9f65a31fffb01c5e3bcbd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Aug 2023 23:16:35 +0100 Subject: [PATCH 08/24] chore(deps): bump pypa/gh-action-pypi-publish from 1.8.9 to 1.8.10 (#2946) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- .github/workflows/release.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 98012b067ef..613f3278893 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -237,12 +237,12 @@ jobs: - name: Upload to PyPi prod if: ${{ !inputs.skip_pypi }} - uses: pypa/gh-action-pypi-publish@ade57f54dcc56d4858ca681c80269c26dc7b9149 # v1.8.9 + uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e # v1.8.10 # PyPi test maintenance affected us numerous times, leaving for history purposes # - name: Upload to PyPi test # if: ${{ !inputs.skip_pypi }} - # uses: pypa/gh-action-pypi-publish@ade57f54dcc56d4858ca681c80269c26dc7b9149 # v1.8.9 + # uses: pypa/gh-action-pypi-publish@b7f401de30cb6434a1e19f805ff006643653240e # v1.8.10 # with: # repository-url: https://test.pypi.org/legacy/ From 8ba31a35187ee7418813904cb4e54b56a5e10a10 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 08:34:44 +0100 Subject: [PATCH 09/24] chore(ci): changelog rebuild (#2952) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 84e2c81efd8..caf81f591a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,44 +32,47 @@ * **ci:** group dependabot updates ([#2896](https://github.com/aws-powertools/powertools-lambda-python/issues/2896)) * **ci:** enable protected branch auditing ([#2913](https://github.com/aws-powertools/powertools-lambda-python/issues/2913)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.1 to 1.39.0 in /layer/scripts/layer-balancer ([#2890](https://github.com/aws-powertools/powertools-lambda-python/issues/2890)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) +* **deps:** bump slsa-framework/slsa-github-generator from 1.7.0 to 1.8.0 ([#2927](https://github.com/aws-powertools/powertools-lambda-python/issues/2927)) * **deps:** bump pydantic from 1.10.11 to 1.10.12 ([#2846](https://github.com/aws-powertools/powertools-lambda-python/issues/2846)) +* **deps:** bump github.com/aws/aws-sdk-go-v2 from 1.19.0 to 1.19.1 in /layer/scripts/layer-balancer ([#2877](https://github.com/aws-powertools/powertools-lambda-python/issues/2877)) * **deps:** bump squidfunk/mkdocs-material from `33e28bd` to `cd3a522` in /docs ([#2859](https://github.com/aws-powertools/powertools-lambda-python/issues/2859)) -* **deps:** bump slsa-framework/slsa-github-generator from 1.7.0 to 1.8.0 ([#2927](https://github.com/aws-powertools/powertools-lambda-python/issues/2927)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.28 to 1.18.29 in /layer/scripts/layer-balancer ([#2844](https://github.com/aws-powertools/powertools-lambda-python/issues/2844)) * **deps:** bump actions/dependency-review-action from 3.0.6 to 3.0.7 ([#2941](https://github.com/aws-powertools/powertools-lambda-python/issues/2941)) -* **deps:** bump github.com/aws/aws-sdk-go-v2 from 1.19.0 to 1.19.1 in /layer/scripts/layer-balancer ([#2877](https://github.com/aws-powertools/powertools-lambda-python/issues/2877)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) * **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.0 to 1.38.1 in /layer/scripts/layer-balancer ([#2876](https://github.com/aws-powertools/powertools-lambda-python/issues/2876)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 ([#2943](https://github.com/aws-powertools/powertools-lambda-python/issues/2943)) * **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.29 to 1.18.30 in /layer/scripts/layer-balancer ([#2875](https://github.com/aws-powertools/powertools-lambda-python/issues/2875)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.28 to 1.18.29 in /layer/scripts/layer-balancer ([#2844](https://github.com/aws-powertools/powertools-lambda-python/issues/2844)) * **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.37.1 to 1.38.0 in /layer/scripts/layer-balancer ([#2843](https://github.com/aws-powertools/powertools-lambda-python/issues/2843)) +* **deps:** bump gitpython from 3.1.31 to 3.1.32 in /docs ([#2948](https://github.com/aws-powertools/powertools-lambda-python/issues/2948)) * **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.30 to 1.18.31 in /layer/scripts/layer-balancer ([#2889](https://github.com/aws-powertools/powertools-lambda-python/issues/2889)) -* **deps:** bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 ([#2943](https://github.com/aws-powertools/powertools-lambda-python/issues/2943)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.1 to 1.39.0 in /layer/scripts/layer-balancer ([#2890](https://github.com/aws-powertools/powertools-lambda-python/issues/2890)) -* **deps-dev:** bump the boto-typing group with 1 update ([#2944](https://github.com/aws-powertools/powertools-lambda-python/issues/2944)) -* **deps-dev:** bump mkdocs-material from 9.1.19 to 9.1.21 ([#2894](https://github.com/aws-powertools/powertools-lambda-python/issues/2894)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.9 to 1.8.10 ([#2946](https://github.com/aws-powertools/powertools-lambda-python/issues/2946)) * **deps-dev:** bump aws-cdk from 2.88.0 to 2.89.0 ([#2887](https://github.com/aws-powertools/powertools-lambda-python/issues/2887)) +* **deps-dev:** bump mkdocs-material from 9.1.19 to 9.1.21 ([#2894](https://github.com/aws-powertools/powertools-lambda-python/issues/2894)) * **deps-dev:** bump the boto-typing group with 11 updates ([#2901](https://github.com/aws-powertools/powertools-lambda-python/issues/2901)) +* **deps-dev:** bump cfn-lint from 0.79.5 to 0.79.6 ([#2899](https://github.com/aws-powertools/powertools-lambda-python/issues/2899)) +* **deps-dev:** bump sentry-sdk from 1.28.1 to 1.29.0 ([#2900](https://github.com/aws-powertools/powertools-lambda-python/issues/2900)) * **deps-dev:** bump mypy-boto3-logs from 1.28.1 to 1.28.15 ([#2880](https://github.com/aws-powertools/powertools-lambda-python/issues/2880)) * **deps-dev:** bump mypy-boto3-appconfigdata from 1.28.0 to 1.28.15 ([#2879](https://github.com/aws-powertools/powertools-lambda-python/issues/2879)) * **deps-dev:** bump mypy-boto3-lambda from 1.28.11 to 1.28.15 ([#2878](https://github.com/aws-powertools/powertools-lambda-python/issues/2878)) * **deps-dev:** bump mypy-boto3-xray from 1.28.0 to 1.28.15 ([#2881](https://github.com/aws-powertools/powertools-lambda-python/issues/2881)) -* **deps-dev:** bump cfn-lint from 0.79.5 to 0.79.6 ([#2899](https://github.com/aws-powertools/powertools-lambda-python/issues/2899)) -* **deps-dev:** bump sentry-sdk from 1.28.1 to 1.29.0 ([#2900](https://github.com/aws-powertools/powertools-lambda-python/issues/2900)) * **deps-dev:** bump ruff from 0.0.280 to 0.0.281 ([#2891](https://github.com/aws-powertools/powertools-lambda-python/issues/2891)) -* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) * **deps-dev:** bump ruff from 0.0.281 to 0.0.282 ([#2905](https://github.com/aws-powertools/powertools-lambda-python/issues/2905)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.28.0 to 1.28.11 ([#2847](https://github.com/aws-powertools/powertools-lambda-python/issues/2847)) +* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) +* **deps-dev:** bump the boto-typing group with 4 updates ([#2928](https://github.com/aws-powertools/powertools-lambda-python/issues/2928)) * **deps-dev:** bump mypy-boto3-cloudformation from 1.28.10 to 1.28.12 ([#2864](https://github.com/aws-powertools/powertools-lambda-python/issues/2864)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.28.0 to 1.28.12 ([#2865](https://github.com/aws-powertools/powertools-lambda-python/issues/2865)) * **deps-dev:** bump cfn-lint from 0.79.3 to 0.79.4 ([#2862](https://github.com/aws-powertools/powertools-lambda-python/issues/2862)) * **deps-dev:** bump mypy-boto3-appconfig from 1.28.0 to 1.28.12 ([#2861](https://github.com/aws-powertools/powertools-lambda-python/issues/2861)) * **deps-dev:** bump mypy-boto3-ssm from 1.28.0 to 1.28.12 ([#2863](https://github.com/aws-powertools/powertools-lambda-python/issues/2863)) -* **deps-dev:** bump mypy-boto3-dynamodb from 1.28.0 to 1.28.11 ([#2847](https://github.com/aws-powertools/powertools-lambda-python/issues/2847)) -* **deps-dev:** bump the boto-typing group with 4 updates ([#2928](https://github.com/aws-powertools/powertools-lambda-python/issues/2928)) * **deps-dev:** bump aws-cdk from 2.89.0 to 2.90.0 ([#2932](https://github.com/aws-powertools/powertools-lambda-python/issues/2932)) -* **deps-dev:** bump mypy-boto3-lambda from 1.28.0 to 1.28.11 ([#2845](https://github.com/aws-powertools/powertools-lambda-python/issues/2845)) * **deps-dev:** bump ruff from 0.0.282 to 0.0.283 ([#2937](https://github.com/aws-powertools/powertools-lambda-python/issues/2937)) * **deps-dev:** bump ruff from 0.0.283 to 0.0.284 ([#2940](https://github.com/aws-powertools/powertools-lambda-python/issues/2940)) +* **deps-dev:** bump mypy-boto3-lambda from 1.28.0 to 1.28.11 ([#2845](https://github.com/aws-powertools/powertools-lambda-python/issues/2845)) +* **deps-dev:** bump the boto-typing group with 1 update ([#2944](https://github.com/aws-powertools/powertools-lambda-python/issues/2944)) +* **deps-dev:** bump aws-cdk from 2.90.0 to 2.91.0 ([#2947](https://github.com/aws-powertools/powertools-lambda-python/issues/2947)) * **deps-dev:** bump cfn-lint from 0.78.2 to 0.79.3 ([#2854](https://github.com/aws-powertools/powertools-lambda-python/issues/2854)) * **docs:** disable line length rule using older syntax ([#2920](https://github.com/aws-powertools/powertools-lambda-python/issues/2920)) * **docs:** include the environment variables section in the utilities documentation ([#2925](https://github.com/aws-powertools/powertools-lambda-python/issues/2925)) From 91e1ceccf567a80a21e315eba181aa8ce309374a Mon Sep 17 00:00:00 2001 From: roy <34189329+royassis@users.noreply.github.com> Date: Mon, 14 Aug 2023 12:04:08 +0300 Subject: [PATCH 10/24] feat(event_handler): allow stripping route prefixes using regexes (#2521) Co-authored-by: Roy Assis Co-authored-by: Ruben Fonseca --- .../event_handler/api_gateway.py | 34 +++++++++++++------ .../event_handler/lambda_function_url.py | 4 +-- .../event_handler/vpc_lattice.py | 4 +-- docs/core/event_handler/api_gateway.md | 10 +++++- .../src/strip_route_prefix_regex.py | 21 ++++++++++++ .../event_handler/test_api_gateway.py | 33 ++++++++++++++++++ 6 files changed, 90 insertions(+), 16 deletions(-) create mode 100644 examples/event_handler_rest/src/strip_route_prefix_regex.py diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 446b1eca856..1e6fe2a50bb 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -520,7 +520,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """ Parameters @@ -534,9 +534,10 @@ def __init__( environment variable serializer : Callable, optional function to serialize `obj` to a JSON formatted `str`, by default json.dumps - strip_prefixes: List[str], optional - optional list of prefixes to be removed from the request path before doing the routing. This is often used - with api gateways with multiple custom mappings. + strip_prefixes: List[Union[str, Pattern]], optional + optional list of prefixes to be removed from the request path before doing the routing. + This is often used with api gateways with multiple custom mappings. + Each prefix can be a static string or a compiled regex pattern """ self._proxy_type = proxy_type self._dynamic_routes: List[Route] = [] @@ -713,10 +714,21 @@ def _remove_prefix(self, path: str) -> str: return path for prefix in self._strip_prefixes: - if path == prefix: - return "/" - if self._path_starts_with(path, prefix): - return path[len(prefix) :] + if isinstance(prefix, str): + if path == prefix: + return "/" + + if self._path_starts_with(path, prefix): + return path[len(prefix) :] + + if isinstance(prefix, Pattern): + path = re.sub(prefix, "", path) + + # When using regexes, we might get into a point where everything is removed + # from the string, so we check if it's empty and return /, since there's nothing + # else to strip anymore. + if not path: + return "/" return path @@ -911,7 +923,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon API Gateway REST and HTTP API v1 payload resolver""" super().__init__(ProxyEventType.APIGatewayProxyEvent, cors, debug, serializer, strip_prefixes) @@ -942,7 +954,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon API Gateway HTTP API v2 payload resolver""" super().__init__(ProxyEventType.APIGatewayProxyEventV2, cors, debug, serializer, strip_prefixes) @@ -956,7 +968,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon Application Load Balancer (ALB) resolver""" super().__init__(ProxyEventType.ALBEvent, cors, debug, serializer, strip_prefixes) diff --git a/aws_lambda_powertools/event_handler/lambda_function_url.py b/aws_lambda_powertools/event_handler/lambda_function_url.py index 6978b29f451..433a013ab0b 100644 --- a/aws_lambda_powertools/event_handler/lambda_function_url.py +++ b/aws_lambda_powertools/event_handler/lambda_function_url.py @@ -1,4 +1,4 @@ -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, List, Optional, Pattern, Union from aws_lambda_powertools.event_handler import CORSConfig from aws_lambda_powertools.event_handler.api_gateway import ( @@ -51,6 +51,6 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): super().__init__(ProxyEventType.LambdaFunctionUrlEvent, cors, debug, serializer, strip_prefixes) diff --git a/aws_lambda_powertools/event_handler/vpc_lattice.py b/aws_lambda_powertools/event_handler/vpc_lattice.py index 1150f7224fb..b3cb042b40b 100644 --- a/aws_lambda_powertools/event_handler/vpc_lattice.py +++ b/aws_lambda_powertools/event_handler/vpc_lattice.py @@ -1,4 +1,4 @@ -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, List, Optional, Pattern, Union from aws_lambda_powertools.event_handler import CORSConfig from aws_lambda_powertools.event_handler.api_gateway import ( @@ -47,7 +47,7 @@ def __init__( cors: Optional[CORSConfig] = None, debug: Optional[bool] = None, serializer: Optional[Callable[[Dict], str]] = None, - strip_prefixes: Optional[List[str]] = None, + strip_prefixes: Optional[List[Union[str, Pattern]]] = None, ): """Amazon VPC Lattice resolver""" super().__init__(ProxyEventType.VPCLatticeEvent, cors, debug, serializer, strip_prefixes) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 708a9de6855..dcfa38f6f9a 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -272,7 +272,7 @@ When using [Custom Domain API Mappings feature](https://docs.aws.amazon.com/apig **Scenario**: You have a custom domain `api.mydomain.dev`. Then you set `/payment` API Mapping to forward any payment requests to your Payments API. -**Challenge**: This means your `path` value for any API requests will always contain `/payment/`, leading to HTTP 404 as Event Handler is trying to match what's after `payment/`. This gets further complicated with an [arbitrary level of nesting](https://github.com/aws-powertools/powertools-lambda-roadmap/issues/34){target="_blank"}. +**Challenge**: This means your `path` value for any API requests will always contain `/payment/`, leading to HTTP 404 as Event Handler is trying to match what's after `payment/`. This gets further complicated with an [arbitrary level of nesting](https://github.com/aws-powertools/powertools-lambda/issues/34){target="_blank"}. To address this API Gateway behavior, we use `strip_prefixes` parameter to account for these prefixes that are now injected into the path regardless of which type of API Gateway you're using. @@ -293,6 +293,14 @@ To address this API Gateway behavior, we use `strip_prefixes` parameter to accou For example, when using `strip_prefixes` value of `/pay`, there is no difference between a request path of `/pay` and `/pay/`; and the path argument would be defined as `/`. +For added flexibility, you can use regexes to strip a prefix. This is helpful when you have many options due to different combinations of prefixes (e.g: multiple environments, multiple versions). + +=== "strip_route_prefix_regex.py" + + ```python hl_lines="12" + --8<-- "examples/event_handler_rest/src/strip_route_prefix_regex.py" + ``` + ## Advanced ### CORS diff --git a/examples/event_handler_rest/src/strip_route_prefix_regex.py b/examples/event_handler_rest/src/strip_route_prefix_regex.py new file mode 100644 index 00000000000..4ea4b4249f4 --- /dev/null +++ b/examples/event_handler_rest/src/strip_route_prefix_regex.py @@ -0,0 +1,21 @@ +import re + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.typing import LambdaContext + +# This will support: +# /v1/dev/subscriptions/ +# /v1/stg/subscriptions/ +# /v1/qa/subscriptions/ +# /v2/dev/subscriptions/ +# ... +app = APIGatewayRestResolver(strip_prefixes=[re.compile(r"/v[1-3]+/(dev|stg|qa)")]) + + +@app.get("/subscriptions/") +def get_subscription(subscription): + return {"subscription_id": subscription} + + +def lambda_handler(event: dict, context: LambdaContext) -> dict: + return app.resolve(event, context) diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index 26c71e1f27d..2afd1241bed 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -1,5 +1,6 @@ import base64 import json +import re import zlib from copy import deepcopy from decimal import Decimal @@ -1077,6 +1078,38 @@ def foo(): assert response["statusCode"] == 200 +@pytest.mark.parametrize( + "path", + [ + pytest.param("/stg/foo", id="path matched pay prefix"), + pytest.param("/dev/foo", id="path matched pay prefix with multiple numbers"), + pytest.param("/foo", id="path does not start with any of the prefixes"), + ], +) +def test_remove_prefix_by_regex(path: str): + app = ApiGatewayResolver(strip_prefixes=[re.compile(r"/(dev|stg)")]) + + @app.get("/foo") + def foo(): + ... + + response = app({"httpMethod": "GET", "path": path}, None) + + assert response["statusCode"] == 200 + + +def test_empty_path_when_using_regexes(): + app = ApiGatewayResolver(strip_prefixes=[re.compile(r"/(dev|stg)")]) + + @app.get("/") + def foo(): + ... + + response = app({"httpMethod": "GET", "path": "/dev"}, None) + + assert response["statusCode"] == 200 + + @pytest.mark.parametrize( "prefix", [ From 9e4971ef4e66d9c35be767cbf25a9a5dcafbd0a9 Mon Sep 17 00:00:00 2001 From: Roger Zhang Date: Mon, 14 Aug 2023 10:47:48 -0700 Subject: [PATCH 11/24] feat(metrics): add Datadog observability provider (#2906) Co-authored-by: Leandro Damascena Co-authored-by: heitorlessa --- .markdownlintignore | 2 + aws_lambda_powertools/metrics/metrics.py | 2 + .../metrics/provider/base.py | 9 +- .../metrics/provider/datadog/__init__.py | 7 + .../metrics/provider/datadog/datadog.py | 391 ++++++++++++++++++ .../metrics/provider/datadog/metrics.py | 126 ++++++ .../metrics/provider/datadog/warnings.py | 8 + aws_lambda_powertools/shared/constants.py | 2 + docs/core/metrics.md | 34 +- docs/core/metrics/datadog.md | 259 ++++++++++++ docs/core/metrics/index.md | 6 + docs/index.md | 2 +- .../sam/dynamodb_batch_processing.yaml | 2 +- .../sam/kinesis_batch_processing.yaml | 2 +- .../sam/sqs_batch_processing.yaml | 2 +- examples/idempotency/templates/sam.yaml | 2 +- examples/logger/sam/template.yaml | 2 +- examples/metrics/sam/template.yaml | 2 +- .../metrics/src/clear_metrics_in_tests.py | 4 +- examples/metrics_datadog/sam/template.yaml | 39 ++ .../src/add_datadog_metrics.py | 9 + .../src/add_metrics_with_tags.py | 9 + .../src/add_metrics_with_timestamp.py | 11 + .../src/assert_single_datadog_metric.py | 9 + .../src/capture_cold_start_datadog_metric.py | 9 + .../src/capture_cold_start_metric_output.json | 8 + .../src/clear_datadog_metrics_in_tests.py | 13 + .../src/flush_datadog_metrics.py | 17 + .../src/flush_metrics_to_standard_output.py | 9 + .../src/log_metrics_output.json | 9 + .../src/log_metrics_standard_output.json | 8 + .../src/raise_on_empty_datadog_metrics.py | 10 + .../metrics_datadog/src/run_tests_env_var.sh | 1 + .../metrics_datadog/src/set_default_tags.py | 10 + .../src/set_default_tags_log_metrics.py | 11 + examples/tracer/sam/template.yaml | 2 +- includes/abbreviations.md | 1 + mkdocs.yml | 9 +- poetry.lock | 306 ++++++++++++-- pyproject.toml | 2 + tests/functional/metrics/conftest.py | 5 + .../metrics/test_metrics_datadog.py | 281 +++++++++++++ tests/unit/metrics/conftest.py | 6 + tests/unit/metrics/test_functions.py | 63 +++ tests/unit/metrics/test_unit_datadog.py | 69 ++++ 45 files changed, 1741 insertions(+), 49 deletions(-) create mode 100644 .markdownlintignore create mode 100644 aws_lambda_powertools/metrics/provider/datadog/__init__.py create mode 100644 aws_lambda_powertools/metrics/provider/datadog/datadog.py create mode 100644 aws_lambda_powertools/metrics/provider/datadog/metrics.py create mode 100644 aws_lambda_powertools/metrics/provider/datadog/warnings.py create mode 100644 docs/core/metrics/datadog.md create mode 100644 docs/core/metrics/index.md create mode 100644 examples/metrics_datadog/sam/template.yaml create mode 100644 examples/metrics_datadog/src/add_datadog_metrics.py create mode 100644 examples/metrics_datadog/src/add_metrics_with_tags.py create mode 100644 examples/metrics_datadog/src/add_metrics_with_timestamp.py create mode 100644 examples/metrics_datadog/src/assert_single_datadog_metric.py create mode 100644 examples/metrics_datadog/src/capture_cold_start_datadog_metric.py create mode 100644 examples/metrics_datadog/src/capture_cold_start_metric_output.json create mode 100644 examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py create mode 100644 examples/metrics_datadog/src/flush_datadog_metrics.py create mode 100644 examples/metrics_datadog/src/flush_metrics_to_standard_output.py create mode 100644 examples/metrics_datadog/src/log_metrics_output.json create mode 100644 examples/metrics_datadog/src/log_metrics_standard_output.json create mode 100644 examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py create mode 100644 examples/metrics_datadog/src/run_tests_env_var.sh create mode 100644 examples/metrics_datadog/src/set_default_tags.py create mode 100644 examples/metrics_datadog/src/set_default_tags_log_metrics.py create mode 100644 includes/abbreviations.md create mode 100644 tests/functional/metrics/test_metrics_datadog.py create mode 100644 tests/unit/metrics/conftest.py create mode 100644 tests/unit/metrics/test_functions.py create mode 100644 tests/unit/metrics/test_unit_datadog.py diff --git a/.markdownlintignore b/.markdownlintignore new file mode 100644 index 00000000000..11b6d7ffe29 --- /dev/null +++ b/.markdownlintignore @@ -0,0 +1,2 @@ +docs/core/metrics/index.md +includes/abbreviations.md diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py index 900e0da7dd7..cb970fcfdc0 100644 --- a/aws_lambda_powertools/metrics/metrics.py +++ b/aws_lambda_powertools/metrics/metrics.py @@ -51,6 +51,8 @@ def lambda_handler(): service name to be used as metric dimension, by default "service_undefined" namespace : str, optional Namespace for metrics + provider: AmazonCloudWatchEMFProvider, optional + Pre-configured AmazonCloudWatchEMFProvider provider Raises ------ diff --git a/aws_lambda_powertools/metrics/provider/base.py b/aws_lambda_powertools/metrics/provider/base.py index 8bd2440658a..702b4b3d2ba 100644 --- a/aws_lambda_powertools/metrics/provider/base.py +++ b/aws_lambda_powertools/metrics/provider/base.py @@ -179,8 +179,13 @@ def handler(event, context): e Propagate error received """ + extra_args = {} - default_dimensions = kwargs.get("default_dimensions") + if kwargs.get("default_dimensions"): + extra_args.update({"default_dimensions": kwargs.get("default_dimensions")}) + + if kwargs.get("default_tags"): + extra_args.update({"default_tags": kwargs.get("default_tags")}) # If handler is None we've been called with parameters # Return a partial function with args filled @@ -190,7 +195,7 @@ def handler(event, context): self.log_metrics, capture_cold_start_metric=capture_cold_start_metric, raise_on_empty_metrics=raise_on_empty_metrics, - default_dimensions=default_dimensions, + **extra_args, ) @functools.wraps(lambda_handler) diff --git a/aws_lambda_powertools/metrics/provider/datadog/__init__.py b/aws_lambda_powertools/metrics/provider/datadog/__init__.py new file mode 100644 index 00000000000..23cb35d31eb --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/__init__.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider +from aws_lambda_powertools.metrics.provider.datadog.metrics import DatadogMetrics + +__all__ = [ + "DatadogMetrics", + "DatadogProvider", +] diff --git a/aws_lambda_powertools/metrics/provider/datadog/datadog.py b/aws_lambda_powertools/metrics/provider/datadog/datadog.py new file mode 100644 index 00000000000..6195589cd1b --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/datadog.py @@ -0,0 +1,391 @@ +from __future__ import annotations + +import json +import logging +import numbers +import os +import re +import time +import warnings +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.provider import BaseProvider +from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning +from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.functions import resolve_env_var_choice +from aws_lambda_powertools.utilities.typing import LambdaContext + +METRIC_NAME_REGEX = re.compile(r"^[a-zA-Z0-9_.]+$") + +logger = logging.getLogger(__name__) + +# Check if using datadog layer +try: + from datadog_lambda.metric import lambda_metric # type: ignore +except ImportError: # pragma: no cover + lambda_metric = None # pragma: no cover + +DEFAULT_NAMESPACE = "default" + + +class DatadogProvider(BaseProvider): + """ + DatadogProvider creates metrics asynchronously via Datadog extension or exporter. + + **Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.** + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace to be set for all metrics + + Raises + ------ + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails EMF schema validation + """ + + def __init__( + self, + metric_set: List | None = None, + namespace: str | None = None, + flush_to_log: bool | None = None, + default_tags: Dict[str, Any] | None = None, + ): + self.metric_set = metric_set if metric_set is not None else [] + self.namespace = ( + resolve_env_var_choice(choice=namespace, env=os.getenv(constants.METRICS_NAMESPACE_ENV)) + or DEFAULT_NAMESPACE + ) + self.default_tags = default_tags or {} + self.flush_to_log = resolve_env_var_choice(choice=flush_to_log, env=os.getenv(constants.DATADOG_FLUSH_TO_LOG)) + + # adding name,value,timestamp,tags + def add_metric( + self, + name: str, + value: float, + timestamp: int | None = None, + **tags, + ) -> None: + """ + The add_metrics function that will be used by metrics class. + + Parameters + ---------- + name: str + Name/Key for the metrics + value: float + Value for the metrics + timestamp: int + Timestamp in int for the metrics, default = time.time() + tags: List[str] + In format like List["tag:value","tag2:value2"] + args: Any + extra args will be dropped for compatibility + kwargs: Any + extra kwargs will be converted into tags, e.g., add_metrics(sales=sam) -> tags=['sales:sam'] + + Examples + -------- + >>> provider = DatadogProvider() + >>> + >>> provider.add_metric( + >>> name='coffee_house.order_value', + >>> value=12.45, + >>> tags=['product:latte', 'order:online'], + >>> sales='sam' + >>> ) + """ + + # validating metric name + if not self._validate_datadog_metric_name(name): + docs = "https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics" + raise SchemaValidationError( + f"Invalid metric name. Please ensure the metric {name} follows the requirements. \n" + f"See Datadog documentation here: \n {docs}", + ) + + # validating metric tag + self._validate_datadog_tags_name(tags) + + if not isinstance(value, numbers.Real): + raise MetricValueError(f"{value} is not a valid number") + + if not timestamp: + timestamp = int(time.time()) + + logger.debug({"details": "Appending metric", "metrics": name}) + self.metric_set.append({"m": name, "v": value, "e": timestamp, "t": tags}) + + def serialize_metric_set(self, metrics: List | None = None) -> List: + """Serializes metrics + + Example + ------- + **Serialize metrics into Datadog format** + + metrics = DatadogMetric() + # ...add metrics, tags, namespace + ret = metrics.serialize_metric_set() + + Returns + ------- + List + Serialized metrics following Datadog specification + + Raises + ------ + SchemaValidationError + Raised when serialization fail schema validation + """ + + if metrics is None: # pragma: no cover + metrics = self.metric_set + + if len(metrics) == 0: + raise SchemaValidationError("Must contain at least one metric.") + + output_list: List = [] + + logger.debug({"details": "Serializing metrics", "metrics": metrics}) + + for single_metric in metrics: + if self.namespace != DEFAULT_NAMESPACE: + metric_name = f"{self.namespace}.{single_metric['m']}" + else: + metric_name = single_metric["m"] + + output_list.append( + { + "m": metric_name, + "v": single_metric["v"], + "e": single_metric["e"], + "t": self._serialize_datadog_tags(metric_tags=single_metric["t"], default_tags=self.default_tags), + }, + ) + + return output_list + + # flush serialized data to output + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + """Manually flushes the metrics. This is normally not necessary, + unless you're running on other runtimes besides Lambda, where the @log_metrics + decorator already handles things for you. + + Parameters + ---------- + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + """ + if not raise_on_empty_metrics and len(self.metric_set) == 0: + warnings.warn( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'", + stacklevel=2, + ) + + else: + logger.debug("Flushing existing metrics") + metrics = self.serialize_metric_set() + # submit through datadog extension + if lambda_metric and not self.flush_to_log: + # use lambda_metric function from datadog package, submit metrics to datadog + for metric_item in metrics: # pragma: no cover + lambda_metric( # pragma: no cover + metric_name=metric_item["m"], + value=metric_item["v"], + timestamp=metric_item["e"], + tags=metric_item["t"], + ) + else: + # dd module not found: flush to log, this format can be recognized via datadog log forwarder + # https://github.com/Datadog/datadog-lambda-python/blob/main/datadog_lambda/metric.py#L77 + for metric_item in metrics: + print(json.dumps(metric_item, separators=(",", ":"))) + + self.clear_metrics() + + def clear_metrics(self): + logger.debug("Clearing out existing metric set from memory") + self.metric_set.clear() + + def add_cold_start_metric(self, context: LambdaContext) -> None: + """Add cold start metric and function_name dimension + + Parameters + ---------- + context : Any + Lambda context + """ + logger.debug("Adding cold start metric and function_name tagging") + self.add_metric(name="ColdStart", value=1, function_name=context.function_name) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + **kwargs, + ): + """Decorator to serialize and publish metrics at the end of a function execution. + + Be aware that the log_metrics **does call* the decorated function (e.g. lambda_handler). + + Example + ------- + **Lambda function using tracer and metrics decorators** + + from aws_lambda_powertools import Tracer + from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + metrics = DatadogMetrics(namespace="powertools") + tracer = Tracer(service="payment") + + @tracer.capture_lambda_handler + @metrics.log_metrics + def handler(event, context): + ... + + Parameters + ---------- + lambda_handler : Callable[[Any, Any], Any], optional + lambda function handler, by default None + capture_cold_start_metric : bool, optional + captures cold start metric, by default False + raise_on_empty_metrics : bool, optional + raise exception if no metrics are emitted, by default False + **kwargs + + Raises + ------ + e + Propagate error received + """ + + default_tags = kwargs.get("default_tags") + + if default_tags: + self.set_default_tags(**default_tags) + + return super().log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + **kwargs, + ) + + def set_default_tags(self, **tags) -> None: + """Persist tags across Lambda invocations + + Parameters + ---------- + tags : **kwargs + tags as key=value + + Example + ------- + **Sets some default dimensions that will always be present across metrics and invocations** + + from aws_lambda_powertools import Metrics + + metrics = Metrics(namespace="ServerlessAirline", service="payment") + metrics.set_default_tags(environment="demo", another="one") + + @metrics.log_metrics() + def lambda_handler(): + return True + """ + self._validate_datadog_tags_name(tags) + self.default_tags.update(**tags) + + @staticmethod + def _serialize_datadog_tags(metric_tags: Dict[str, Any], default_tags: Dict[str, Any]) -> List[str]: + """ + Serialize metric tags into a list of formatted strings for Datadog integration. + + This function takes a dictionary of metric-specific tags or default tags. + It parse these tags and converts them into a list of strings in the format "tag_key:tag_value". + + Parameters + ---------- + metric_tags: Dict[str, Any] + A dictionary containing metric-specific tags. + default_tags: Dict[str, Any] + A dictionary containing default tags applicable to all metrics. + + Returns: + ------- + List[str] + A list of formatted tag strings, each in the "tag_key:tag_value" format. + + Example: + >>> metric_tags = {'environment': 'production', 'service': 'web'} + >>> serialize_datadog_tags(metric_tags, None) + ['environment:production', 'service:web'] + """ + tags = metric_tags or default_tags + + return [f"{tag_key}:{tag_value}" for tag_key, tag_value in tags.items()] + + @staticmethod + def _validate_datadog_tags_name(tags: Dict): + """ + Validate a metric tag according to specific requirements. + + Metric tags must start with a letter. + Metric tags must not exceed 200 characters. Fewer than 100 is preferred from a UI perspective. + + More information here: https://docs.datadoghq.com/getting_started/tagging/#define-tags + + Parameters: + ---------- + tags: Dict + The metric tags to be validated. + """ + for tag_key, tag_value in tags.items(): + tag = f"{tag_key}:{tag_value}" + if not tag[0].isalpha() or len(tag) > 200: + docs = "https://docs.datadoghq.com/getting_started/tagging/#define-tags" + warnings.warn( + f"Invalid tag value. Please ensure the specific tag {tag} follows the requirements. \n" + f"May incur data loss for metrics. \n" + f"See Datadog documentation here: \n {docs}", + DatadogDataValidationWarning, + stacklevel=2, + ) + + @staticmethod + def _validate_datadog_metric_name(metric_name: str) -> bool: + """ + Validate a metric name according to specific requirements. + + Metric names must start with a letter. + Metric names must only contain ASCII alphanumerics, underscores, and periods. + Other characters, including spaces, are converted to underscores. + Unicode is not supported. + Metric names must not exceed 200 characters. Fewer than 100 is preferred from a UI perspective. + + More information here: https://docs.datadoghq.com/metrics/custom_metrics/#naming-custom-metrics + + Parameters: + ---------- + metric_name: str + The metric name to be validated. + + Returns: + ------- + bool + True if the metric name is valid, False otherwise. + """ + + # Check if the metric name starts with a letter + # Check if the metric name contains more than 200 characters + # Check if the resulting metric name only contains ASCII alphanumerics, underscores, and periods + if not metric_name[0].isalpha() or len(metric_name) > 200 or not METRIC_NAME_REGEX.match(metric_name): + return False + + return True diff --git a/aws_lambda_powertools/metrics/provider/datadog/metrics.py b/aws_lambda_powertools/metrics/provider/datadog/metrics.py new file mode 100644 index 00000000000..3ee4dc2f835 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/metrics.py @@ -0,0 +1,126 @@ +# NOTE: keeps for compatibility +from __future__ import annotations + +from typing import Any, Callable, Dict, List, Optional + +from aws_lambda_powertools.metrics.provider.datadog.datadog import DatadogProvider + + +class DatadogMetrics: + """ + DatadogProvider creates metrics asynchronously via Datadog extension or exporter. + + **Use `aws_lambda_powertools.DatadogMetrics` to create and metrics to Datadog.** + + Example + ------- + **Creates a few metrics and publish at the end of a function execution** + + from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + metrics = DatadogMetrics(namespace="ServerlessAirline") + + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + return True + + Environment variables + --------------------- + POWERTOOLS_METRICS_NAMESPACE : str + metric namespace + + Parameters + ---------- + flush_to_log : bool, optional + Used when using export instead of Lambda Extension + namespace : str, optional + Namespace for metrics + provider: DatadogProvider, optional + Pre-configured DatadogProvider provider + + Raises + ------ + MetricValueError + When metric value isn't a number + SchemaValidationError + When metric object fails Datadog schema validation + """ + + # NOTE: We use class attrs to share metrics data across instances + # this allows customers to initialize Metrics() throughout their code base (and middlewares) + # and not get caught by accident with metrics data loss, or data deduplication + # e.g., m1 and m2 add metric ProductCreated, however m1 has 'version' dimension but m2 doesn't + # Result: ProductCreated is created twice as we now have 2 different EMF blobs + _metrics: List = [] + _default_tags: Dict[str, Any] = {} + + def __init__( + self, + namespace: str | None = None, + flush_to_log: bool | None = None, + provider: DatadogProvider | None = None, + ): + self.metric_set = self._metrics + self.default_tags = self._default_tags + + if provider is None: + self.provider = DatadogProvider( + namespace=namespace, + flush_to_log=flush_to_log, + metric_set=self.metric_set, + ) + else: + self.provider = provider + + def add_metric( + self, + name: str, + value: float, + timestamp: int | None = None, + **tags: Any, + ) -> None: + self.provider.add_metric(name=name, value=value, timestamp=timestamp, **tags) + + def serialize_metric_set(self, metrics: List | None = None) -> List: + return self.provider.serialize_metric_set(metrics=metrics) + + def flush_metrics(self, raise_on_empty_metrics: bool = False) -> None: + self.provider.flush_metrics(raise_on_empty_metrics=raise_on_empty_metrics) + + def log_metrics( + self, + lambda_handler: Callable[[Dict, Any], Any] | Optional[Callable[[Dict, Any, Optional[Dict]], Any]] = None, + capture_cold_start_metric: bool = False, + raise_on_empty_metrics: bool = False, + default_tags: Dict[str, Any] | None = None, + ): + return self.provider.log_metrics( + lambda_handler=lambda_handler, + capture_cold_start_metric=capture_cold_start_metric, + raise_on_empty_metrics=raise_on_empty_metrics, + default_tags=default_tags, + ) + + def set_default_tags(self, **tags) -> None: + self.provider.set_default_tags(**tags) + self.default_tags.update(**tags) + + def clear_metrics(self) -> None: + self.provider.clear_metrics() + + def clear_default_tags(self) -> None: + self.provider.default_tags.clear() + self.default_tags.clear() + + # We now allow customers to bring their own instance + # of the DatadogProvider provider + # So we need to define getter/setter for namespace property + # To access this attribute on the provider instance. + @property + def namespace(self): + return self.provider.namespace + + @namespace.setter + def namespace(self, namespace): + self.provider.namespace = namespace diff --git a/aws_lambda_powertools/metrics/provider/datadog/warnings.py b/aws_lambda_powertools/metrics/provider/datadog/warnings.py new file mode 100644 index 00000000000..accf19526e7 --- /dev/null +++ b/aws_lambda_powertools/metrics/provider/datadog/warnings.py @@ -0,0 +1,8 @@ +class DatadogDataValidationWarning(Warning): + message: str + + def __init__(self, message: str): + self.message = message + + def __str__(self) -> str: + return self.message diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index eede3120833..900589ea973 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -12,6 +12,8 @@ METRICS_NAMESPACE_ENV: str = "POWERTOOLS_METRICS_NAMESPACE" +DATADOG_FLUSH_TO_LOG: str = "DD_FLUSH_TO_LOG" + SERVICE_NAME_ENV: str = "POWERTOOLS_SERVICE_NAME" XRAY_TRACE_ID_ENV: str = "_X_AMZN_TRACE_ID" LAMBDA_TASK_ROOT_ENV: str = "LAMBDA_TASK_ROOT" diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 2fd4cfc98d1..31b4ea99ce7 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -1,5 +1,5 @@ --- -title: Metrics +title: Amazon CloudWatch EMF Metrics description: Core utility --- @@ -16,7 +16,7 @@ These metrics can be visualized through [Amazon CloudWatch Console](https://cons ## Terminologies -If you're new to Amazon CloudWatch, there are two terminologies you must be aware of before using this utility: +If you're new to Amazon CloudWatch, there are five terminologies you must be aware of before using this utility: * **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. * **Dimensions**. Metrics metadata in key-value format. They help you slice and dice metrics visualization, for example `ColdStart` metric by Payment `service`. @@ -197,9 +197,9 @@ This has the advantage of keeping cold start metric separate from your applicati The following environment variable is available to configure Metrics at a global scope: -| Setting | Description | Environment variable | Default | -|--------------------|------------------------------------------------------------------------------|-----------------------------------------|---------| -| **Namespace Name** | Sets namespace used for metrics. | `POWERTOOLS_METRICS_NAMESPACE` | `None` | +| Setting | Description | Environment variable | Default | +| ------------------ | -------------------------------- | ------------------------------ | ------- | +| **Namespace Name** | Sets namespace used for metrics. | `POWERTOOLS_METRICS_NAMESPACE` | `None` | `POWERTOOLS_METRICS_NAMESPACE` is also available on a per-instance basis with the `namespace` parameter, which will consequently override the environment variable value. @@ -261,7 +261,7 @@ By default it will skip all previously defined dimensions including default dime ### Flushing metrics manually -If you are using the AWS Lambda Web Adapter project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. +If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. ???+ warning This does not capture Cold Start metrics, and metric data validation still applies. @@ -286,9 +286,9 @@ You can use `EphemeralMetrics` class when looking to isolate multiple instances `EphemeralMetrics` has only one difference while keeping nearly the exact same set of features: -| Feature | Metrics | EphemeralMetrics | -| ----------------------------------------------------------------------------------------------------------- | ------- | ---------------- | -| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - | +| Feature | Metrics | EphemeralMetrics | +| --------------------------------------------------------------------- | ------- | ---------------- | +| **Share data across instances** (metrics, dimensions, metadata, etc.) | Yes | - | !!! question "Why not changing the default `Metrics` behaviour to not share data across instances?" @@ -327,6 +327,20 @@ These issues are exacerbated when you create **(A)** metric dimensions condition That is why `Metrics` shares data across instances by default, as that covers 80% of use cases and different personas using Powertools. This allows them to instantiate `Metrics` in multiple places throughout their code - be a separate file, a middleware, or an abstraction that sets default dimensions. +### Observability providers + +> An observability provider is an [AWS Lambda Partner](https://docs.aws.amazon.com/lambda/latest/dg/extensions-api-partners.html){target="_blank" rel="nofollow"} offering a platform for logging, metrics, traces, etc. + +We provide a thin-wrapper on top of the most requested observability providers. We strive to keep a similar UX as close as possible while keeping our value add features. + +!!! tip "Missing your preferred provider? Please create a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE){target="_blank"}." + +Current providers: + +| Provider | Notes | +| ------------------------------------- | -------------------------------------------------------- | +| [Datadog](./datadog){target="_blank"} | Uses Datadog SDK and Datadog Lambda Extension by default | + ## Testing your code ### Setting environment variables @@ -384,4 +398,4 @@ You can read standard output and assert whether metrics have been flushed. Here' ``` ???+ tip - For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/test_metrics.py){target="_blank"} + For more elaborate assertions and comparisons, check out [our functional testing for Metrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/metrics/test_metrics_cloudwatch_emf.py){target="_blank"} diff --git a/docs/core/metrics/datadog.md b/docs/core/metrics/datadog.md new file mode 100644 index 00000000000..fb5927b3a63 --- /dev/null +++ b/docs/core/metrics/datadog.md @@ -0,0 +1,259 @@ +--- +title: Datadog +description: Metrics provider +--- + + +This observability provider creates custom metrics by flushing metrics to [Datadog Lambda extension](https://docs.datadoghq.com/serverless/installation/python/?tab=datadogcli){target="_blank" rel="nofollow"}, or to standard output via [Datadog Forwarder](https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation){target="_blank" rel="nofollow"}. These metrics can be visualized in the [Datadog console](https://app.datadoghq.com/metric/explore){target="_blank" rel="nofollow"}. + + +```mermaid +stateDiagram-v2 + direction LR + LambdaFn: Your Lambda function + LambdaCode: DatadogMetrics + DatadogSDK: Datadog SDK + DatadogExtension: Datadog Lambda Extension + Datadog: Datadog Dashboard + LambdaExtension: Lambda Extension + + LambdaFn --> LambdaCode + LambdaCode --> DatadogSDK + DatadogSDK --> DatadogExtension + + state LambdaExtension { + DatadogExtension --> Datadog: async + } + +``` + +## Key features + +* Flush metrics to Datadog extension or standard output +* Validate against common metric definitions mistakes +* Support to add default tags + +## Terminologies + +If you're new to Datadog Metrics, there are three terminologies you must be aware of before using this utility: + +* **Namespace**. It's the highest level container that will group multiple metrics from multiple services for a given application, for example `ServerlessEcommerce`. +* **Metric**. It's the name of the metric, for example: SuccessfulBooking or UpdatedBooking. +* **Tags**. Metrics metadata in key-value pair format. They help provide contextual information, and filter org organize metrics. + +You can read more details in the [Datadog official documentation](https://docs.datadoghq.com/metrics/custom_metrics/){target="_blank" rel="nofollow"}. + +## Getting started + +???+ tip + All examples shared in this documentation are available within the [project repository](https://github.com/aws-powertools/powertools-lambda-python/tree/develop/examples){target="_blank" }. + +### Install + +> **Using Datadog Forwarder?** You can skip this step. + +We recommend using [Datadog SDK](https://docs.datadoghq.com/serverless/installation/python/){target="_blank" rel="nofollow"} and Datadog Lambda Extension with this feature for optimal results. + +For Datadog SDK, you can add `aws-lambda-powertools[datadog]` as a dependency in your preferred tool, or as a Lambda Layer in the following example: + +```yaml hl_lines="15-16 28 32" title="AWS Serverless Application Model (SAM) example" +--8<-- "examples/metrics_datadog/sam/template.yaml" +``` + +### Creating metrics + +You can create metrics using `add_metric`. + +By default, we will generate the current timestamp for you. Alternatively, you can use the `timestamp` parameter to set a custom one in epoch time. + +=== "add_datadog_metrics.py" + + ```python hl_lines="4 7 9" + --8<-- "examples/metrics_datadog/src/add_datadog_metrics.py" + ``` + +=== "add_metrics_with_timestamp.py" + + ```python hl_lines="11" + --8<-- "examples/metrics_datadog/src/add_metrics_with_timestamp.py" + ``` + +???+ warning "Warning: Do not create metrics outside the handler" + Metrics added in the global scope will only be added during cold start. Disregard if you that's the intended behavior. + +### Adding tags + +You can add any number of tags to your metrics via keyword arguments (`key=value`). They are helpful to filter, organize, and aggregate your metrics later. + +!!! info "We will emit a warning for tags [beyond the 200 chars limit](https://docs.datadoghq.com/getting_started/tagging/){target="_blank" rel="nofollow"}." + +=== "add_metrics_with_tags.py" + + ```python hl_lines="9" + --8<-- "examples/metrics_datadog/src/add_metrics_with_tags.py" + ``` + +### Adding default tags + +You can persist tags across Lambda invocations and `DatadogMetrics` instances via `set_default_tags` method, or `default_tags` parameter in the `log_metrics` decorator. + +If you'd like to remove them at some point, you can use the `clear_default_tags` method. + +???+ note "Metric tag takes precedence over default tags of the same name" + When adding tags with the same name via `add_metric` and `set_default_tags`, `add_metric` takes precedence. + +=== "set_default_tags.py" + + ```python hl_lines="5" + --8<-- "examples/metrics_datadog/src/set_default_tags.py" + ``` + +=== "set_default_tags_log_metrics.py" + + ```python hl_lines="6 9" + --8<-- "examples/metrics_datadog/src/set_default_tags_log_metrics.py" + ``` + +### Flushing metrics + +Use `log_metrics` decorator to automatically serialize and flush your metrics (SDK or Forwarder) at the end of your invocation. + +This decorator also ensures metrics are flushed in the event of an exception, including warning you in case you forgot to add metrics. + +=== "add_metrics.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/add_metrics_with_tags.py" + ``` + +=== "log_metrics_output.json" + + ```json hl_lines="2 6 7" + --8<-- "examples/metrics_datadog/src/log_metrics_output.json" + ``` + +#### Raising SchemaValidationError on empty metrics + +Use `raise_on_empty_metrics=True` if you want to ensure at least one metric is always emitted. + +```python hl_lines="7" title="Failing fast if no metrics are added" +--8<-- "examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py" +``` + +???+ tip "Suppressing warning messages on empty metrics" + If you expect your function to execute without publishing metrics every time, you can suppress the warning with **`warnings.filterwarnings("ignore", "No metrics to publish*")`**. + +### Capturing cold start metric + +You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. + +=== "capture_cold_start_metric.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/capture_cold_start_datadog_metric.py" + ``` + +=== "capture_cold_start_metric_output.json" + + ```json hl_lines="2 6" + --8<-- "examples/metrics_datadog/src/capture_cold_start_metric_output.json" + ``` + +If it's a cold start invocation, this feature will: + +* Create a separate Datadog metric solely containing a metric named `ColdStart` +* Add `function_name` metric tag + +This has the advantage of keeping cold start metric separate from your application metrics, where you might have unrelated tags. + +???+ info + We do not emit 0 as a value for ColdStart metric for cost reasons. [Let us know](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=){target="_blank"} if you'd prefer a flag to override it. + +### Environment variables + +You can use any of the following environment variables to configure `DatadogMetrics`: + +| Setting | Description | Environment variable | Constructor parameter | +| -------------------- | -------------------------------------------------------------------------------- | ------------------------------ | --------------------- | +| **Metric namespace** | Logical container where all metrics will be placed e.g. `ServerlessAirline` | `POWERTOOLS_METRICS_NAMESPACE` | `namespace` | +| **Flush to log** | Use this when you want to flush metrics to be exported through Datadog Forwarder | `DD_FLUSH_TO_LOG` | `flush_to_log` | + +## Advanced + +### Flushing metrics manually + +If you are using the [AWS Lambda Web Adapter](https://github.com/awslabs/aws-lambda-web-adapter){target="_blank"} project, or a middleware with custom metric logic, you can use `flush_metrics()`. This method will serialize, print metrics available to standard output, and clear in-memory metrics data. + +???+ warning + This does not capture Cold Start metrics, and metric data validation still applies. + +Contrary to the `log_metrics` decorator, you are now also responsible to flush metrics in the event of an exception. + +```python hl_lines="17" title="Manually flushing and clearing metrics from memory" +--8<-- "examples/metrics_datadog/src/flush_datadog_metrics.py" +``` + +### Integrating with Datadog Forwarder + +Use `flush_to_log=True` in `DatadogMetrics` to integrate with the legacy [Datadog Forwarder](https://docs.datadoghq.com/logs/guide/forwarder/?tab=cloudformation){target="_blank" rel="nofollow"}. + +This will serialize and flush metrics to standard output. + +=== "flush_metrics_to_standard_output.py" + + ```python hl_lines="4" + --8<-- "examples/metrics_datadog/src/flush_metrics_to_standard_output.py" + ``` + +=== "log_metrics_standard_output.json" + + ```json + --8<-- "examples/metrics_datadog/src/log_metrics_standard_output.json" + ``` + +## Testing your code + +### Setting environment variables + +???+ tip + Ignore this section, if: + + * You are explicitly setting namespace via `namespace` parameter + * You're not instantiating `DatadogMetrics` in the global namespace + + For example, `DatadogMetrics(namespace="ServerlessAirline")` + +Make sure to set `POWERTOOLS_METRICS_NAMESPACE` before running your tests to prevent failing on `SchemaValidation` exception. You can set it before you run tests or via pytest plugins like [dotenv](https://pypi.org/project/pytest-dotenv/){target="_blank" rel="nofollow"}. + +```bash title="Injecting dummy metric namespace before running tests" +--8<-- "examples/metrics_datadog/src/run_tests_env_var.sh" +``` + +1. **`DD_FLUSH_TO_LOG=True`** makes it easier to test by flushing final metrics to standard output. + +### Clearing metrics + +`DatadogMetrics` keep metrics in memory across multiple instances. If you need to test this behavior, you can use the following Pytest fixture to ensure metrics are reset incl. cold start: + +```python title="Clearing metrics between tests" +--8<-- "examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py" +``` + +### Functional testing + +You can read standard output and assert whether metrics have been flushed. Here's an example using `pytest` with `capsys` built-in fixture: + +=== "assert_single_datadog_metric.py" + + ```python hl_lines="7" + --8<-- "examples/metrics_datadog/src/assert_single_datadog_metric.py" + ``` + +=== "add_datadog_metrics.py" + + ```python + --8<-- "examples/metrics_datadog/src/add_datadog_metrics.py" + ``` + +???+ tip + For more elaborate assertions and comparisons, check out [our functional testing for DatadogMetrics utility.](https://github.com/aws-powertools/powertools-lambda-python/blob/develop/tests/functional/metrics/test_metrics_datadog.py){target="_blank"} diff --git a/docs/core/metrics/index.md b/docs/core/metrics/index.md new file mode 100644 index 00000000000..359ce28eb33 --- /dev/null +++ b/docs/core/metrics/index.md @@ -0,0 +1,6 @@ +--- +title: Metrics +description: Core utility +--- + +--8<-- "docs/core/metrics.md" diff --git a/docs/index.md b/docs/index.md index 54a0f2c58ad..4ea82dd127c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -673,7 +673,7 @@ Compared with the [public Layer ARN](#lambda-layer) option, SAR allows you to ch ## Quick getting started ```bash title="Hello world example using SAM CLI" -sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.10 --no-tracing +sam init --app-template hello-world-powertools-python --name sam-app --package-type Zip --runtime python3.11 --no-tracing ``` ## Features diff --git a/examples/batch_processing/sam/dynamodb_batch_processing.yaml b/examples/batch_processing/sam/dynamodb_batch_processing.yaml index 2ed70d65a86..4e436c083e5 100644 --- a/examples/batch_processing/sam/dynamodb_batch_processing.yaml +++ b/examples/batch_processing/sam/dynamodb_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/sam/kinesis_batch_processing.yaml b/examples/batch_processing/sam/kinesis_batch_processing.yaml index 314d4f8c98f..6c80bd2f333 100644 --- a/examples/batch_processing/sam/kinesis_batch_processing.yaml +++ b/examples/batch_processing/sam/kinesis_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/batch_processing/sam/sqs_batch_processing.yaml b/examples/batch_processing/sam/sqs_batch_processing.yaml index 77871c3478b..2dd827107d4 100644 --- a/examples/batch_processing/sam/sqs_batch_processing.yaml +++ b/examples/batch_processing/sam/sqs_batch_processing.yaml @@ -6,7 +6,7 @@ Globals: Function: Timeout: 5 MemorySize: 256 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/idempotency/templates/sam.yaml b/examples/idempotency/templates/sam.yaml index 8443a0914d7..7c2f65a6a4d 100644 --- a/examples/idempotency/templates/sam.yaml +++ b/examples/idempotency/templates/sam.yaml @@ -17,7 +17,7 @@ Resources: HelloWorldFunction: Type: AWS::Serverless::Function Properties: - Runtime: python3.10 + Runtime: python3.11 Handler: app.py Policies: - Statement: diff --git a/examples/logger/sam/template.yaml b/examples/logger/sam/template.yaml index a72b96f32e2..ddaa2f16407 100644 --- a/examples/logger/sam/template.yaml +++ b/examples/logger/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/metrics/sam/template.yaml b/examples/metrics/sam/template.yaml index 50a2964bc4b..ace4c71f2e1 100644 --- a/examples/metrics/sam/template.yaml +++ b/examples/metrics/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/examples/metrics/src/clear_metrics_in_tests.py b/examples/metrics/src/clear_metrics_in_tests.py index cea3879af83..a5462d3d9e1 100644 --- a/examples/metrics/src/clear_metrics_in_tests.py +++ b/examples/metrics/src/clear_metrics_in_tests.py @@ -1,7 +1,7 @@ import pytest from aws_lambda_powertools import Metrics -from aws_lambda_powertools.metrics import metrics as metrics_global +from aws_lambda_powertools.metrics.provider import cold_start @pytest.fixture(scope="function", autouse=True) @@ -9,6 +9,6 @@ def reset_metric_set(): # Clear out every metric data prior to every test metrics = Metrics() metrics.clear_metrics() - metrics_global.is_cold_start = True # ensure each test has cold start + cold_start.is_cold_start = True # ensure each test has cold start metrics.clear_default_dimensions() # remove persisted default dimensions, if any yield diff --git a/examples/metrics_datadog/sam/template.yaml b/examples/metrics_datadog/sam/template.yaml new file mode 100644 index 00000000000..39c8883c150 --- /dev/null +++ b/examples/metrics_datadog/sam/template.yaml @@ -0,0 +1,39 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Powertools for AWS Lambda (Python) version + +Globals: + Function: + Timeout: 5 + Runtime: python3.11 + Tracing: Active + Environment: + Variables: + POWERTOOLS_METRICS_NAMESPACE: ServerlessAirline + # [Production setup] + # DATADOG_API_KEY_SECRET_ARN: "" + # [Development only] + DD_API_KEY: "" + # Configuration details: https://docs.datadoghq.com/serverless/installation/python/?tab=datadogcli + DD_SITE: datadoghq.com + + Layers: + # Find the latest Layer version in the official documentation + # https://docs.powertools.aws.dev/lambda/python/latest/#lambda-layer + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:40 + # Find the latest Layer version in the Datadog official documentation + + # Datadog SDK + # Latest versions: https://github.com/DataDog/datadog-lambda-python/releases + - !Sub arn:aws:lambda:${AWS::Region}:464622532012:layer:Datadog-Python310:78 + + # Datadog Lambda Extension + # Latest versions: https://github.com/DataDog/datadog-lambda-extension/releases + - !Sub arn:aws:lambda:${AWS::Region}:464622532012:layer:Datadog-Extension:45 + +Resources: + CaptureLambdaHandlerExample: + Type: AWS::Serverless::Function + Properties: + CodeUri: ../src + Handler: capture_lambda_handler.handler diff --git a/examples/metrics_datadog/src/add_datadog_metrics.py b/examples/metrics_datadog/src/add_datadog_metrics.py new file mode 100644 index 00000000000..6fe6774152e --- /dev/null +++ b/examples/metrics_datadog/src/add_datadog_metrics.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/add_metrics_with_tags.py b/examples/metrics_datadog/src/add_metrics_with_tags.py new file mode 100644 index 00000000000..9ebb0680c13 --- /dev/null +++ b/examples/metrics_datadog/src/add_metrics_with_tags.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1, tag1="powertools", tag2="python") diff --git a/examples/metrics_datadog/src/add_metrics_with_timestamp.py b/examples/metrics_datadog/src/add_metrics_with_timestamp.py new file mode 100644 index 00000000000..b2bef65e9ab --- /dev/null +++ b/examples/metrics_datadog/src/add_metrics_with_timestamp.py @@ -0,0 +1,11 @@ +import time + +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1, timestamp=int(time.time())) diff --git a/examples/metrics_datadog/src/assert_single_datadog_metric.py b/examples/metrics_datadog/src/assert_single_datadog_metric.py new file mode 100644 index 00000000000..7b6ebf0909b --- /dev/null +++ b/examples/metrics_datadog/src/assert_single_datadog_metric.py @@ -0,0 +1,9 @@ +import add_datadog_metrics + + +def test_log_metrics(capsys): + add_datadog_metrics.lambda_handler({}, {}) + + log = capsys.readouterr().out.strip() # remove any extra line + + assert "SuccessfulBooking" in log # basic string assertion in JSON str diff --git a/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py b/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py new file mode 100644 index 00000000000..ec8c2fc1e19 --- /dev/null +++ b/examples/metrics_datadog/src/capture_cold_start_datadog_metric.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics(capture_cold_start_metric=True) +def lambda_handler(event: dict, context: LambdaContext): + return diff --git a/examples/metrics_datadog/src/capture_cold_start_metric_output.json b/examples/metrics_datadog/src/capture_cold_start_metric_output.json new file mode 100644 index 00000000000..ee7da985f66 --- /dev/null +++ b/examples/metrics_datadog/src/capture_cold_start_metric_output.json @@ -0,0 +1,8 @@ +{ + "m":"ColdStart", + "v":1, + "e":1691707488, + "t":[ + "function_name:HelloWorldFunction" + ] + } diff --git a/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py b/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py new file mode 100644 index 00000000000..e80552eba83 --- /dev/null +++ b/examples/metrics_datadog/src/clear_datadog_metrics_in_tests.py @@ -0,0 +1,13 @@ +import pytest + +from aws_lambda_powertools.metrics.provider import cold_start +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics + + +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + # Clear out every metric data prior to every test + metrics = DatadogMetrics() + metrics.clear_metrics() + cold_start.is_cold_start = True # ensure each test has cold start + yield diff --git a/examples/metrics_datadog/src/flush_datadog_metrics.py b/examples/metrics_datadog/src/flush_datadog_metrics.py new file mode 100644 index 00000000000..89e02fc2f3f --- /dev/null +++ b/examples/metrics_datadog/src/flush_datadog_metrics.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +def book_flight(flight_id: str, **kwargs): + # logic to book flight + ... + metrics.add_metric(name="SuccessfulBooking", value=1) + + +def lambda_handler(event: dict, context: LambdaContext): + try: + book_flight(flight_id=event.get("flight_id", "")) + finally: + metrics.flush_metrics() diff --git a/examples/metrics_datadog/src/flush_metrics_to_standard_output.py b/examples/metrics_datadog/src/flush_metrics_to_standard_output.py new file mode 100644 index 00000000000..a58fe877925 --- /dev/null +++ b/examples/metrics_datadog/src/flush_metrics_to_standard_output.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics(flush_to_log=True) + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/log_metrics_output.json b/examples/metrics_datadog/src/log_metrics_output.json new file mode 100644 index 00000000000..782cea9dc4f --- /dev/null +++ b/examples/metrics_datadog/src/log_metrics_output.json @@ -0,0 +1,9 @@ +{ + "m":"SuccessfulBooking", + "v":1, + "e":1691707076, + "t":[ + "tag1:powertools", + "tag2:python" + ] +} diff --git a/examples/metrics_datadog/src/log_metrics_standard_output.json b/examples/metrics_datadog/src/log_metrics_standard_output.json new file mode 100644 index 00000000000..35fcb8a096a --- /dev/null +++ b/examples/metrics_datadog/src/log_metrics_standard_output.json @@ -0,0 +1,8 @@ +{ + "m":"SuccessfulBooking", + "v":1, + "e":1691768022, + "t":[ + + ] +} diff --git a/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py b/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py new file mode 100644 index 00000000000..2242b1dfe06 --- /dev/null +++ b/examples/metrics_datadog/src/raise_on_empty_datadog_metrics.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + + +@metrics.log_metrics(raise_on_empty_metrics=True) # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + # no metrics being created will now raise SchemaValidationError + return diff --git a/examples/metrics_datadog/src/run_tests_env_var.sh b/examples/metrics_datadog/src/run_tests_env_var.sh new file mode 100644 index 00000000000..5663afd3ba4 --- /dev/null +++ b/examples/metrics_datadog/src/run_tests_env_var.sh @@ -0,0 +1 @@ +POWERTOOLS_METRICS_NAMESPACE="ServerlessAirline" DD_FLUSH_TO_LOG="True" python -m pytest # (1)! diff --git a/examples/metrics_datadog/src/set_default_tags.py b/examples/metrics_datadog/src/set_default_tags.py new file mode 100644 index 00000000000..94d4335b212 --- /dev/null +++ b/examples/metrics_datadog/src/set_default_tags.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() +metrics.set_default_tags(tag1="powertools", tag2="python") + + +@metrics.log_metrics # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/metrics_datadog/src/set_default_tags_log_metrics.py b/examples/metrics_datadog/src/set_default_tags_log_metrics.py new file mode 100644 index 00000000000..c276c1d53ff --- /dev/null +++ b/examples/metrics_datadog/src/set_default_tags_log_metrics.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.utilities.typing import LambdaContext + +metrics = DatadogMetrics() + +default_tags = {"tag1": "powertools", "tag2": "python"} + + +@metrics.log_metrics(default_tags=default_tags) # ensures metrics are flushed upon request completion/failure +def lambda_handler(event: dict, context: LambdaContext): + metrics.add_metric(name="SuccessfulBooking", value=1) diff --git a/examples/tracer/sam/template.yaml b/examples/tracer/sam/template.yaml index 3eb6ef0acd0..d9e7d8a29da 100644 --- a/examples/tracer/sam/template.yaml +++ b/examples/tracer/sam/template.yaml @@ -5,7 +5,7 @@ Description: Powertools for AWS Lambda (Python) version Globals: Function: Timeout: 5 - Runtime: python3.10 + Runtime: python3.11 Tracing: Active Environment: Variables: diff --git a/includes/abbreviations.md b/includes/abbreviations.md new file mode 100644 index 00000000000..ed52b93fe64 --- /dev/null +++ b/includes/abbreviations.md @@ -0,0 +1 @@ +*[observability provider]: An AWS Lambda Observability Partner diff --git a/mkdocs.yml b/mkdocs.yml index 49bf5a347e5..1b9f4545239 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -15,7 +15,10 @@ nav: - Features: - core/tracer.md - core/logger.md - - core/metrics.md + - Metrics: + - core/metrics/index.md + - Amazon CloudWatch EMF: core/metrics.md + - Datadog: core/metrics/datadog.md - Event Handler: - core/event_handler/api_gateway.md - core/event_handler/appsync.md @@ -57,7 +60,6 @@ theme: features: - header.autohide - navigation.sections - - navigation.expand - navigation.top - navigation.instant - navigation.indexes @@ -73,6 +75,7 @@ theme: markdown_extensions: - admonition + - abbr - pymdownx.tabbed: alternate_style: true - pymdownx.highlight: @@ -82,6 +85,8 @@ markdown_extensions: base_path: "." check_paths: true restrict_base_path: false + auto_append: + - includes/abbreviations.md - meta - toc: permalink: true diff --git a/poetry.lock b/poetry.lock index b101ea72568..00655322ec3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -93,17 +93,17 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-alpha" -version = "2.90.0a0" +version = "2.91.0a0" description = "The CDK Construct Library for AWS::APIGatewayv2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.90.0a0.tar.gz", hash = "sha256:b81e8321a2a76594fd6d79725eb6136dad844aef9122fe666b7bc5f11bb18de7"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.90.0a0-py3-none-any.whl", hash = "sha256:154f146d5a88c602aa477869aea0109c0691437e9a0f6a9d61d98b9b52c83b51"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.91.0a0.tar.gz", hash = "sha256:a7b0e78862f3dd81cf13740df2ecda1c877545500872dc476f2dbf3807632a32"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:e3d606055c2fe268d80f96052b583060a25fadcdee79d89a75f2eac4354f2e69"}, ] [package.dependencies] -aws-cdk-lib = "2.90.0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.86.0,<2.0.0" publication = ">=0.0.3" @@ -111,18 +111,18 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-authorizers-alpha" -version = "2.90.0a0" +version = "2.91.0a0" description = "Authorizers for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.90.0a0.tar.gz", hash = "sha256:7aeda815ca63f14362c2d08b06c8e85a4694fc07b8c7ba2a1c20b9346792c74c"}, - {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.90.0a0-py3-none-any.whl", hash = "sha256:ebaeaeddbdcb16b4130ce948f3d8db42254a68372b497700643bd654331207b6"}, + {file = "aws-cdk.aws-apigatewayv2-authorizers-alpha-2.91.0a0.tar.gz", hash = "sha256:cafd747af66f92755f188172f0e892503bc73c26f0d6d95e5f733c67b0307fa8"}, + {file = "aws_cdk.aws_apigatewayv2_authorizers_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:972393ad1c220708616322946ba3f8936cbe143a69e543762295c1ea02d69849"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.90.0.a0" -aws-cdk-lib = "2.90.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.86.0,<2.0.0" publication = ">=0.0.3" @@ -130,18 +130,18 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-aws-apigatewayv2-integrations-alpha" -version = "2.90.0a0" +version = "2.91.0a0" description = "Integrations for AWS APIGateway V2" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.90.0a0.tar.gz", hash = "sha256:558f12d2e951ae424f828ea8e2bfecc5c4271b4eb0c583feadd7420fdaca7cc7"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.90.0a0-py3-none-any.whl", hash = "sha256:b26ee8a02b06f99110383dc4ad9a4542415cc5c50c35aaccc90c63d80aeeb1fd"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.91.0a0.tar.gz", hash = "sha256:db607df2563f0b839795a41218a59e3ebc29e906dd08aed7b0b59aceba0bde02"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.91.0a0-py3-none-any.whl", hash = "sha256:34d0f103846613a72cfae8419be2e4302863a1e8f6e81951b0a51c2f62ab80b3"}, ] [package.dependencies] -"aws-cdk.aws-apigatewayv2-alpha" = "2.90.0.a0" -aws-cdk-lib = "2.90.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.91.0.a0" +aws-cdk-lib = "2.91.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.86.0,<2.0.0" publication = ">=0.0.3" @@ -149,13 +149,13 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk-lib" -version = "2.90.0" +version = "2.91.0" description = "Version 2 of the AWS Cloud Development Kit library" optional = false python-versions = "~=3.7" files = [ - {file = "aws-cdk-lib-2.90.0.tar.gz", hash = "sha256:d99e304f96f1b04c41922cfa2fc98c1cdd7c88e45c6ebb980ecfc367cdc77e87"}, - {file = "aws_cdk_lib-2.90.0-py3-none-any.whl", hash = "sha256:ef481a40c3ece38aeaf15706ecbfeea19860b8a7b789ea7b28056a9f456c65d1"}, + {file = "aws-cdk-lib-2.91.0.tar.gz", hash = "sha256:1163926527a8b7da931cddea77a4824b929b3f775447c3b7427ecdef7701ce74"}, + {file = "aws_cdk_lib-2.91.0-py3-none-any.whl", hash = "sha256:ec2cadeb5727ea8259ad8a54ac9ff40502032cd2572c81f4594df93365da39da"}, ] [package.dependencies] @@ -201,12 +201,15 @@ requests = ">=0.14.0" [[package]] name = "aws-sam-translator" version = "1.73.0" +version = "1.73.0" description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"}, {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"}, + {file = "aws-sam-translator-1.73.0.tar.gz", hash = "sha256:bfa7cad3a78f002edeec5e39fd61b616cf84f34f61010c5dc2f7a76845fe7a02"}, + {file = "aws_sam_translator-1.73.0-py3-none-any.whl", hash = "sha256:c0132b065d743773fcd2573ed1ae60e0129fa46043fad76430261b098a811924"}, ] [package.dependencies] @@ -308,17 +311,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.28.23" +version = "1.28.24" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.23-py3-none-any.whl", hash = "sha256:807d4a4698ba9a76d5901a1663ff1943d13efbc388908f38b60f209c3511f1d6"}, - {file = "boto3-1.28.23.tar.gz", hash = "sha256:839deb868d1278dd5a3f87208cfc4a8e259c95ca3cbe607cc322d435f02f63b0"}, + {file = "boto3-1.28.24-py3-none-any.whl", hash = "sha256:0300ca6ec8bc136eb316b32cc1e30c66b85bc497f5a5fe42e095ae4280569708"}, + {file = "boto3-1.28.24.tar.gz", hash = "sha256:9d1b4713c888e53a218648ad71522bee9bec9d83f2999fff2494675af810b632"}, ] [package.dependencies] -botocore = ">=1.31.23,<1.32.0" +botocore = ">=1.31.24,<1.32.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -327,13 +330,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.23" +version = "1.31.24" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.23-py3-none-any.whl", hash = "sha256:d0a95f74eb6bd99e8f52f16af0a430ba6cd1526744f40ffdd3fcccceeaf961c2"}, - {file = "botocore-1.31.23.tar.gz", hash = "sha256:f3258feaebce48f138eb2675168c4d33cc3d99e9f45af13cb8de47bdc2b9c573"}, + {file = "botocore-1.31.24-py3-none-any.whl", hash = "sha256:8c7ba9b09e9104e2d473214e1ffcf84b77e04cf6f5f2344942c1eed9e299f947"}, + {file = "botocore-1.31.24.tar.gz", hash = "sha256:2d8f412c67f9285219f52d5dbbb6ef0dfa9f606da29cbdd41b6d6474bcc4bbd4"}, ] [package.dependencies] @@ -344,6 +347,31 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.16.26)"] +[[package]] +name = "bytecode" +version = "0.13.0" +description = "Python module to generate and modify bytecode" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bytecode-0.13.0-py3-none-any.whl", hash = "sha256:e69f92e7d27f99d5d7d76e6a824bd3d9ff857c72b59927aaf87e1a620f67fe50"}, + {file = "bytecode-0.13.0.tar.gz", hash = "sha256:6af3c2f0a31ce05dce41f7eea5cc380e33f5e8fbb7dcee3b52467a00acd52fcd"}, +] + +[[package]] +name = "bytecode" +version = "0.14.2" +description = "Python module to generate and modify bytecode" +optional = false +python-versions = ">=3.8" +files = [ + {file = "bytecode-0.14.2-py3-none-any.whl", hash = "sha256:e368a2b9bbd7c986133c951250db94fb32f774cfc49752a9db9073bcf9899762"}, + {file = "bytecode-0.14.2.tar.gz", hash = "sha256:386378d9025d68ddb144870ae74330a492717b11b8c9164c4034e88add808f0c"}, +] + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + [[package]] name = "cattrs" version = "23.1.2" @@ -736,6 +764,154 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "datadog" +version = "0.46.0" +description = "The Datadog Python library" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "datadog-0.46.0-py2.py3-none-any.whl", hash = "sha256:3d7bcda6177b43be4cdb52e16b4bdd4f9005716c0dd7cfea009e018c36bb7a3d"}, + {file = "datadog-0.46.0.tar.gz", hash = "sha256:e4fbc92a85e2b0919a226896ae45fc5e4b356c0c57f1c2659659dfbe0789c674"}, +] + +[package.dependencies] +requests = ">=2.6.0" + +[[package]] +name = "datadog-lambda" +version = "4.78.0" +description = "The Datadog AWS Lambda Library" +optional = false +python-versions = ">=3.7.0,<4" +files = [ + {file = "datadog_lambda-4.78.0-py3-none-any.whl", hash = "sha256:660bae6057f3b2033b0c035e9d542af491e40f9ce57b97b4891c491262b9148c"}, + {file = "datadog_lambda-4.78.0.tar.gz", hash = "sha256:3e57faa8f80ddd43b595355b92045fde8f9ed87efe8619133e82cebb87cbe434"}, +] + +[package.dependencies] +datadog = ">=0.41.0,<1.0.0" +ddtrace = "1.15.2" +importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} +typing_extensions = {version = ">=4.0,<5.0", markers = "python_version < \"3.8\""} +urllib3 = "<2.0.0" +wrapt = ">=1.11.2,<2.0.0" + +[package.extras] +dev = ["boto3 (>=1.10.33,<2.0.0)", "flake8 (>=3.7.9,<4.0.0)", "httpretty (>=0.9.7,<0.10.0)", "nose2 (>=0.9.1,<0.10.0)", "requests (>=2.22.0,<3.0.0)"] + +[[package]] +name = "ddsketch" +version = "2.0.4" +description = "Distributed quantile sketches" +optional = false +python-versions = ">=2.7" +files = [ + {file = "ddsketch-2.0.4-py3-none-any.whl", hash = "sha256:3227a270fd686a29d3a7128f9352ccf852314410380fc11384356f1ae2a75938"}, + {file = "ddsketch-2.0.4.tar.gz", hash = "sha256:32f7314077fec8747d4faebaec2c854b5ffc399c5f552f73fa94024f48d74d64"}, +] + +[package.dependencies] +protobuf = {version = ">=3.0.0", markers = "python_version >= \"3.7\""} +six = "*" + +[[package]] +name = "ddtrace" +version = "1.15.2" +description = "Datadog APM client library" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "ddtrace-1.15.2-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:ca0411333fbdb0fafa06d412bbd76ab8d2647cc9dcb8a7833952ce4fe09eb421"}, + {file = "ddtrace-1.15.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e2603749f97a5191b32f710c8ec5248bb58f4f9a1cb337559f93c5f0f8cea33b"}, + {file = "ddtrace-1.15.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8392087809e047f701e38ecc4f2990bcfe399a22c516a1dbcbdff50fb7382a79"}, + {file = "ddtrace-1.15.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2f7649c24a7463be9b86d5f11ac6eaa2014896eaf409e67f3dc813a6bb0ed8b6"}, + {file = "ddtrace-1.15.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:dbdbc5bf3b2b56b8e61b241ee372d897b295344e269475f38e837c9bfe03ae2c"}, + {file = "ddtrace-1.15.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:23d39c72ad1844977a80d79206d773c3ec1f1346816b9e45427c25ef88597b4e"}, + {file = "ddtrace-1.15.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:da458bbbc4de14dd8e8f60aefe42a66c551a9f50c69c6e361acc7edab579a3e4"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d07bb0c50d2df7ff9281bea83534db5127cee8ac2f94111c9544d03d49f60613"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:499b3e3d0359e492792ecf8ab6efcf4b1991fbaa523338774333e9a2a66d9d37"}, + {file = "ddtrace-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedd0937f83e0d7b261960365fec5771f39ced599c90f589548a1738a586799d"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7643d20991cd7e1c21e65d8b5c292a9dca8d124f69f9e96cc2b5fb8d47802c3a"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3f6bb76fe33c2e4842236036f78b1bbdd4da0f2b846627ca7d72b01ac49b3076"}, + {file = "ddtrace-1.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ebc9b89501c8a557dab43170e4a12e90358130413a87a0276ccaa0038b0636a4"}, + {file = "ddtrace-1.15.2-cp310-cp310-win32.whl", hash = "sha256:c10ca0e3a63310d314ec7fa55d53f4b4434f06c4d321d64d757814679161bf5d"}, + {file = "ddtrace-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:6208635442ea52ff3f97b9fc64ac25772cda8f105a607a385e55bf524bceefc5"}, + {file = "ddtrace-1.15.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8524f460be02b402f63b11ad3b1177955c8608f814e1758b87f53f15bf9a7599"}, + {file = "ddtrace-1.15.2-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5a2dd127a65e12189055818ab72d44d80587acaaf450c65624e0482d63ff9970"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3350c647120fbc3355eb35ce054c88e63bc073d71949f377d59b1152a2ed0f4"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adb76713207f0ef688f68a539f9cb63e19cd149d48d36befb835f67f49395ed7"}, + {file = "ddtrace-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8ff5c250c5abfbbbd76a7d3167308a2373ad7e55ecf3c7c26a62fcd2be8a57"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:264bed998870b63f548b96f57dd771014cd02ef0b21bb382e745900a7b72ef28"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:20bfd8db155167d2ccfddc25b50649338534b12cb00f7ed08514af1eb6a4956e"}, + {file = "ddtrace-1.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72479172bd10f5998188a05b0b4a109ccb2a93467a0aa1e6656d5396c83fb253"}, + {file = "ddtrace-1.15.2-cp311-cp311-win32.whl", hash = "sha256:23bee3d0eb971cc1565caa429620b82f2d69ef648e9c792046b9481188dba9ab"}, + {file = "ddtrace-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:65a29d23ecfbc7cc4ca1069a5586aa836ae3978e64251414933432078bc29bc2"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:3a2852da4a76503211ca8b77a50fc86df36ba15fab04b45a6a17faa386f53839"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:244180c6decb244c7fda929dc5969b3a510e5a4857239063de1fae139fac9837"}, + {file = "ddtrace-1.15.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:46f9ba0c282a62953f03d1add8eae8c80613244bb93a1ff997dad71d07ce6c72"}, + {file = "ddtrace-1.15.2-cp35-cp35m-win32.whl", hash = "sha256:a39dbf1ca657cc3a876143301e5d775e2f9bcf2ed1e9b4366fb3cf9d6a345a82"}, + {file = "ddtrace-1.15.2-cp35-cp35m-win_amd64.whl", hash = "sha256:7cfd9514e82871321e86897fe567c7548fc45da523df591f3e5adc6633a5781c"}, + {file = "ddtrace-1.15.2-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:3a2978b07d19d4ebf936fde1e455c61b3d88f103f1f9e360b9269fe1a1dc608a"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e7795a7f65a6e844ab57a0b31d400e79c4a1f69d174fab8edc69e6d2db56962"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aae5306b3b0ec48cb8ade3362629c31bd25999244addff0f4a2f6f3934509894"}, + {file = "ddtrace-1.15.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14fb33bd6d9fa70638d43de7b5170e1c9961d3fbc277314609941e108c45716d"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:745ce3c9559fa331ef30208ff1ccaafe3ab3c02f2e01177c560c94acd6f4de27"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7706d35215d2cca0a89581ec11da56e25742914ae0865b928034ee9ad7278cf3"}, + {file = "ddtrace-1.15.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0bc18e900d1495deb61093f1af38d94af6a1ca66dd541fd47bd093c3f3b80b4a"}, + {file = "ddtrace-1.15.2-cp36-cp36m-win32.whl", hash = "sha256:b13f4042ef3f391714aca5ca1f03ff3c24c1d201ab5af02f0405335aa5602ff5"}, + {file = "ddtrace-1.15.2-cp36-cp36m-win_amd64.whl", hash = "sha256:eb32e3b3d0f472447b3d427a075007135b3c39488c1fe0f1e097863f326a439b"}, + {file = "ddtrace-1.15.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:0953fd9a2555801d68674bb4905f64805efe1e02b3f11def21eb7655be046717"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9436ec9cc00b70730d2c1777f11aca7f4863a49ddd27d0b1478e84c1a7667b6f"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7215b21c1eaf56b38bf46c66193db3736ecadeb9ae1b9ca780a91addbaa9853"}, + {file = "ddtrace-1.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a5f7155b99fe9393bfa4f0e4ef2610ddf59e70aefcf99a95acae8b31e29cc4"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:df103a600c2901dc54929ef58dee41887a0bb558efbf7e41a7489bd6264fcf44"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d51a73238ad8ceff4232ffa94b860d61187b325e7fab746044dafa312d6bc415"}, + {file = "ddtrace-1.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bfc5777204c0c34465fc4ce38d8d1268d9f95ffcbf7e4025e9a5d3e87d3e17c3"}, + {file = "ddtrace-1.15.2-cp37-cp37m-win32.whl", hash = "sha256:9516dbfc974af9632d75e9c32b38e695b88ea18ebfa4580dd0f768bc05272fba"}, + {file = "ddtrace-1.15.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a510252a3d5be6c29db2c69cbd2535268532e8d568fae06b295a06041e1b969d"}, + {file = "ddtrace-1.15.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:82995243719c87aefc85d7df0e1ae61bba8ae1f805d48cbaf2132beb215f1968"}, + {file = "ddtrace-1.15.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:ca5dd51910a81451d236fccdbf5d3ca8e284aa3be56f08db92644f85ef88c56e"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d76f485769c035df3ede4ad9830bac06aa8b69ac4617f2eb1251b1094468009"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4dd5f5e477021b8810b2b685e1e16ba5a99f31239e22abc71794688b7f3e6e4d"}, + {file = "ddtrace-1.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ec73676c60cc3cf08430f19a59daccbbb5770edc74ad15a99bf4237a40d0fb"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6b140f11b89d902174df05e8b9c1eb1b522a63e6c60c5d68ccac8913bb371bbb"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c7c8a0e140d28e49cf8cd96cdec8e17232c5525ed5c154729b8afb6cb93a8e2b"}, + {file = "ddtrace-1.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0112d258c3a681a63e5f057b9e3ee8504b60d773d95baf195462d9ff4096caa9"}, + {file = "ddtrace-1.15.2-cp38-cp38-win32.whl", hash = "sha256:6ea7b80eb8019a70c999ef8cfd34fd6078a2ae154007d124d5e642531bf1a9d6"}, + {file = "ddtrace-1.15.2-cp38-cp38-win_amd64.whl", hash = "sha256:282b8c9b46d7a8450325299cf348a0f1d8f9f34d174a0ea402bc1a1df4ad7cf3"}, + {file = "ddtrace-1.15.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:53b171404b59c1e030ea614e194d1483fb42437a02ffdd7f4a45175613dd7cb4"}, + {file = "ddtrace-1.15.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9ba06236dd8bd64776b7b734dd9421709670fef090857448e75c97acb30cdce7"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6abe5ba4396c9f7633cab68d0e81c5fd94f7c77b046b3ee969eded068a522d7"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61a1b48f97a07e2f422ec01bb23861716300cebe4afd917ab36bb4db68904da4"}, + {file = "ddtrace-1.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86e186dc66802e2d71b94330c1635fd4c3f881a1bb71747be162a57b7602daaa"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:762b5a0777454299c4ac62177578969ed551c973063f87a8825d9d073e5250ce"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:889d359f4382fde41893ba5c00b412cbea8502e1b6bb6c83bf87fa6e63cbfabe"}, + {file = "ddtrace-1.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c4d3c9ce3456181e535b9da42bde7c850dc7224039fd083e95b05010c2ff9748"}, + {file = "ddtrace-1.15.2-cp39-cp39-win32.whl", hash = "sha256:69e47d28327a7afb263c16cc6bf1227e1b2bf1fdb2d559dce913a138a3f36807"}, + {file = "ddtrace-1.15.2-cp39-cp39-win_amd64.whl", hash = "sha256:da780fbfe6dd749ee571a468b8e86f1fd4f51626d35626c2356f8a440efe0dfa"}, + {file = "ddtrace-1.15.2.tar.gz", hash = "sha256:e5c1a5965ea8d8260586769102d79522bc7d9758a271252bb58ee05d6c5cd9a8"}, +] + +[package.dependencies] +attrs = {version = ">=20", markers = "python_version > \"2.7\""} +bytecode = [ + {version = ">=0.13.0,<0.14.0", markers = "python_version == \"3.7\""}, + {version = "*", markers = "python_version >= \"3.8\""}, +] +cattrs = {version = "*", markers = "python_version >= \"3.7\""} +ddsketch = ">=2.0.1" +envier = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +opentelemetry-api = {version = ">=1", markers = "python_version >= \"3.7\""} +protobuf = {version = ">=3", markers = "python_version >= \"3.7\""} +six = ">=1.12.0" +typing-extensions = "*" +xmltodict = ">=0.12" + +[package.extras] +opentracing = ["opentracing (>=2.0.0)"] + [[package]] name = "decorator" version = "5.1.1" @@ -747,6 +923,37 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "envier" +version = "0.4.0" +description = "Python application configuration via the environment" +optional = false +python-versions = ">=2.7" +files = [ + {file = "envier-0.4.0-py3-none-any.whl", hash = "sha256:7b91af0f16ea3e56d91ec082f038987e81b441fc19c657a8b8afe0909740a706"}, + {file = "envier-0.4.0.tar.gz", hash = "sha256:e68dcd1ed67d8b6313883e27dff3e701b7fba944d2ed4b7f53d0cc2e12364a82"}, +] + +[package.extras] +mypy = ["mypy"] + [[package]] name = "exceptiongroup" version = "1.1.2" @@ -1762,6 +1969,21 @@ doc = ["nb2plots (>=0.6)", "numpydoc (>=1.1)", "pillow (>=8.2)", "pydata-sphinx- extra = ["lxml (>=4.5)", "pydot (>=1.4.1)", "pygraphviz (>=1.7)"] test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] +[[package]] +name = "opentelemetry-api" +version = "1.19.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"}, + {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<7.0" + [[package]] name = "packaging" version = "23.1" @@ -1857,6 +2079,28 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "protobuf" +version = "4.24.0" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, + {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, + {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, + {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, + {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, + {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, + {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, + {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, + {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, + {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, + {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, +] + [[package]] name = "publication" version = "0.0.3" @@ -2735,7 +2979,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -optional = true +optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, @@ -2831,6 +3075,17 @@ PyYAML = ">=4.2b1,<7.0" radon = ">=4,<6" requests = ">=2.0,<3.0" +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + [[package]] name = "zipp" version = "3.15.0" @@ -2852,6 +3107,7 @@ aws-sdk = ["boto3"] datamasking-all = ["aws-encryption-sdk", "itsdangerous"] datamasking-aws-sdk = ["aws-encryption-sdk"] datamasking-itsdangerous = ["itsdangerous"] +datadog = [] parser = ["pydantic"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] diff --git a/pyproject.toml b/pyproject.toml index 0a332631f9d..f1bb04abecd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,7 @@ ijson = "^3.2.2" typed-ast = { version = "^1.5.5", python = "< 3.8"} hvac = "^1.1.1" aws-requests-auth = "^0.4.3" +datadog-lambda = "^4.77.0" [tool.poetry.extras] parser = ["pydantic"] @@ -89,6 +90,7 @@ tracer = ["aws-xray-sdk"] all = ["pydantic", "aws-xray-sdk", "fastjsonschema", "itsdangerous", "aws-encryption-sdk"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] +datadog=["datadog-lambda"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.79.6" diff --git a/tests/functional/metrics/conftest.py b/tests/functional/metrics/conftest.py index cb0e083ca1f..2de3a0087c2 100644 --- a/tests/functional/metrics/conftest.py +++ b/tests/functional/metrics/conftest.py @@ -29,6 +29,11 @@ def metric() -> Dict[str, str]: return {"name": "single_metric", "unit": MetricUnit.Count, "value": 1} +@pytest.fixture +def metric_datadog() -> Dict[str, str]: + return {"name": "single_metric", "value": 1, "timestamp": 1691678198, "powertools": "datadog"} + + @pytest.fixture def metrics() -> List[Dict[str, str]]: return [ diff --git a/tests/functional/metrics/test_metrics_datadog.py b/tests/functional/metrics/test_metrics_datadog.py new file mode 100644 index 00000000000..c81c825f656 --- /dev/null +++ b/tests/functional/metrics/test_metrics_datadog.py @@ -0,0 +1,281 @@ +import json +import warnings +from collections import namedtuple + +import pytest +from test_metrics_provider import capture_metrics_output + +from aws_lambda_powertools.metrics.exceptions import MetricValueError, SchemaValidationError +from aws_lambda_powertools.metrics.provider.cold_start import reset_cold_start_flag +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics, DatadogProvider + + +def test_datadog_coldstart(capsys): + reset_cold_start_flag() + + # GIVEN DatadogMetrics is initialized + dd_provider = DatadogProvider(flush_to_log=True) + metrics = DatadogMetrics(provider=dd_provider) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN log_metrics is used with capture_cold_start_metric + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn2")) + logs = capsys.readouterr().out.strip() + + # THEN ColdStart metric and function_name and service dimension should be logged + assert "ColdStart" in logs + assert "example_fn2" in logs + + +def test_datadog_write_to_log_with_env_variable(capsys, monkeypatch): + # GIVEN DD_FLUSH_TO_LOG env is configured + monkeypatch.setenv("DD_FLUSH_TO_LOG", "True") + metrics = DatadogMetrics() + + # WHEN we add a metric + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + metrics.flush_metrics() + logs = capture_metrics_output(capsys) + + # THEN metrics is flushed to log + logs["e"] = "" + assert logs == json.loads('{"m":"item_sold","v":1,"e":"","t":["product:latte","order:online"]}') + + +def test_datadog_with_invalid_metric_value(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + # WHEN we pass an incorrect metric value (non-numeric) + # WHEN we attempt to serialize a valid Datadog metric + # THEN it should fail validation and raise MetricValueError + with pytest.raises(MetricValueError, match=".*is not a valid number"): + metrics.add_metric(name="item_sold", value="a", product="latte", order="online") + + +def test_datadog_with_invalid_metric_name(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + # WHEN we a metric name starting with a number + # WHEN we attempt to serialize a valid Datadog metric + # THEN it should fail validation and raise MetricValueError + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name="1_item_sold", value="a", product="latte", order="online") + + +def test_datadog_raise_on_empty(): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics() + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN we set raise_on_empty_metrics to True + @metrics.log_metrics(raise_on_empty_metrics=True) + def lambda_handler(event, context): + pass + + # THEN it should fail with no metric serialized + with pytest.raises(SchemaValidationError, match="Must contain at least one metric."): + lambda_handler({}, LambdaContext("example_fn")) + + +def test_datadog_tags_using_kwargs(capsys): + # GIVEN DatadogMetrics is initialized + metrics = DatadogMetrics(flush_to_log=True) + + # WHEN we add tags using kwargs + metrics.add_metric("order_valve", 12.45, sales="sam") + metrics.flush_metrics() + logs = capsys.readouterr().out.strip() + log_dict = json.loads(logs) + tag_list = log_dict.get("t") + + # THEN tags must be present + assert "sales:sam" in tag_list + + +def test_metrics_clear_metrics_after_invocation(metric_datadog): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric_datadog) + + # WHEN log_metrics is used to flush metrics from memory + @my_metrics.log_metrics + def lambda_handler(evt, context): + pass + + lambda_handler({}, {}) + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set == [] + + +def test_metrics_decorator_with_metrics_warning(): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + + # WHEN using the log_metrics decorator and no metrics have been added + @my_metrics.log_metrics + def lambda_handler(evt, context): + pass + + # THEN it should raise a warning instead of throwing an exception + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("default") + lambda_handler({}, {}) + assert len(w) == 1 + assert str(w[-1].message) == ( + "No application metrics to publish. The cold-start metric may be published if enabled. " + "If application metrics should never be empty, consider using 'raise_on_empty_metrics'" + ) + + +def test_metrics_with_default_namespace(capsys, namespace): + # GIVEN DatadogMetrics is initialized with default namespace + metrics = DatadogMetrics(flush_to_log=True) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN we add metrics + @metrics.log_metrics + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn2")) + logs = capsys.readouterr().out.strip() + + # THEN default namespace must be assumed + assert namespace not in logs + + +def test_datadog_with_non_default_namespace(capsys, namespace): + # GIVEN DatadogMetrics is initialized with a non-default namespace + metrics = DatadogMetrics(namespace=namespace, flush_to_log=True) + + LambdaContext = namedtuple("LambdaContext", "function_name") + + # WHEN log_metrics is used + @metrics.log_metrics + def lambda_handler(event, context): + metrics.add_metric(name="item_sold", value=1, product="latte", order="online") + + lambda_handler({}, LambdaContext("example_fn")) + logs = capsys.readouterr().out.strip() + + # THEN namespace must be present in logs + assert namespace in logs + + +def test_serialize_metrics(metric_datadog): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric_datadog) + + # WHEN we serialize metrics + my_metrics.serialize_metric_set() + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set[0]["m"] == "single_metric" + + +def test_clear_metrics(metric): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.add_metric(**metric) + my_metrics.clear_metrics() + + # THEN metric set should be empty after function has been run + assert my_metrics.metric_set == [] + + +def test_persist_default_tags(capsys): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics(flush_to_log=True) + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN we utilize log_metrics to serialize + # and flush metrics and clear all metrics and tags from memory + # at the end of a function execution + @my_metrics.log_metrics + def lambda_handler(evt, ctx): + my_metrics.add_metric(name="item_sold", value=1) + + lambda_handler({}, {}) + first_invocation = capsys.readouterr().out.strip() + + lambda_handler({}, {}) + second_invocation = capsys.readouterr().out.strip() + + # THEN we should have default tags in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_log_metrics_with_default_tags(capsys): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics(flush_to_log=True) + default_tags = {"environment": "test", "log_group": "/lambda/test"} + + # WHEN we utilize log_metrics with default dimensions to serialize + # and flush metrics and clear all metrics and tags from memory + # at the end of a function execution + @my_metrics.log_metrics(default_tags=default_tags) + def lambda_handler(evt, ctx): + my_metrics.add_metric(name="item_sold", value=1) + + lambda_handler({}, {}) + first_invocation = capsys.readouterr().out.strip() + + lambda_handler({}, {}) + second_invocation = capsys.readouterr().out.strip() + + # THEN we should have default tags in both outputs + assert "environment" in first_invocation + assert "environment" in second_invocation + + +def test_clear_default_tags(): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics() + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN they are removed via clear_default_tags method + my_metrics.clear_default_tags() + + # THEN there should be no default tags + assert not my_metrics.default_tags + + +def test_namespace_var_precedence(monkeypatch, namespace): + # GIVEN we use POWERTOOLS_METRICS_NAMESPACE + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", "a_namespace") + my_metrics = DatadogMetrics(namespace=namespace, flush_to_log=True) + + # WHEN creating a metric and explicitly set a namespace + my_metrics.add_metric(name="item_sold", value=1) + + output = my_metrics.serialize_metric_set() + + # THEN namespace should match the explicitly passed variable and not the env var + assert output[0]["m"] == f"{namespace}.item_sold" + + +def test_namespace_env_var(monkeypatch): + # GIVEN POWERTOOLS_METRICS_NAMESPACE is set + env_namespace = "a_namespace" + monkeypatch.setenv("POWERTOOLS_METRICS_NAMESPACE", env_namespace) + my_metrics = DatadogMetrics(flush_to_log=True) + + # WHEN creating a metric and explicitly set a namespace + my_metrics.add_metric(name="item_sold", value=1) + + output = my_metrics.serialize_metric_set() + + # THEN namespace should match the explicitly passed variable and not the env var + assert output[0]["m"] == f"{env_namespace}.item_sold" diff --git a/tests/unit/metrics/conftest.py b/tests/unit/metrics/conftest.py new file mode 100644 index 00000000000..8d601e4d13b --- /dev/null +++ b/tests/unit/metrics/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def namespace() -> str: + return "test_namespace" diff --git a/tests/unit/metrics/test_functions.py b/tests/unit/metrics/test_functions.py new file mode 100644 index 00000000000..f3414720bba --- /dev/null +++ b/tests/unit/metrics/test_functions.py @@ -0,0 +1,63 @@ +import pytest + +from aws_lambda_powertools.metrics.functions import ( + extract_cloudwatch_metric_resolution_value, + extract_cloudwatch_metric_unit_value, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.exceptions import ( + MetricResolutionError, + MetricUnitError, +) +from aws_lambda_powertools.metrics.provider.cloudwatch_emf.metric_properties import MetricResolution, MetricUnit + + +def test_extract_invalid_cloudwatch_metric_resolution_value(): + metric_resolutions = [resolution.value for resolution in MetricResolution] + + # GIVEN an invalid EMF resolution value + resolution = 2 + + # WHEN try to extract this value + # THEN must fail with MetricResolutionError + with pytest.raises(MetricResolutionError, match="Invalid metric resolution.*"): + extract_cloudwatch_metric_resolution_value(metric_resolutions, resolution=resolution) + + +def test_extract_valid_cloudwatch_metric_resolution_value(): + metric_resolutions = [resolution.value for resolution in MetricResolution] + + # GIVEN a valid EMF resolution value + resolution = 1 + + # WHEN try to extract this value + extracted_resolution_value = extract_cloudwatch_metric_resolution_value(metric_resolutions, resolution=resolution) + + # THEN value must be extracted + assert extracted_resolution_value == resolution + + +def test_extract_invalid_cloudwatch_metric_unit_value(): + metric_units = [unit.value for unit in MetricUnit] + metric_unit_valid_options = list(MetricUnit.__members__) + + # GIVEN an invalid EMF unit value + unit = "Fake" + + # WHEN try to extract this value + # THEN must fail with MetricUnitError + with pytest.raises(MetricUnitError, match="Invalid metric unit.*"): + extract_cloudwatch_metric_unit_value(metric_units, metric_unit_valid_options, unit=unit) + + +def test_extract_valid_cloudwatch_metric_unit_value(): + metric_units = [unit.value for unit in MetricUnit] + metric_unit_valid_options = list(MetricUnit.__members__) + + # GIVEN an invalid EMF unit value + unit = "Count" + + # WHEN try to extract this value + extracted_unit_value = extract_cloudwatch_metric_unit_value(metric_units, metric_unit_valid_options, unit=unit) + + # THEN value must be extracted + assert extracted_unit_value == unit diff --git a/tests/unit/metrics/test_unit_datadog.py b/tests/unit/metrics/test_unit_datadog.py new file mode 100644 index 00000000000..ab54e9730fe --- /dev/null +++ b/tests/unit/metrics/test_unit_datadog.py @@ -0,0 +1,69 @@ +import pytest + +from aws_lambda_powertools.metrics.exceptions import SchemaValidationError +from aws_lambda_powertools.metrics.provider.datadog import DatadogMetrics +from aws_lambda_powertools.metrics.provider.datadog.warnings import DatadogDataValidationWarning + + +def test_get_namespace_property(namespace): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics(namespace=namespace) + + # WHEN we try to access the namespace property + # THEN namespace property must be present + assert my_metrics.namespace == namespace + + +def test_set_namespace_property(namespace): + # GIVEN DatadogMetrics is initialized + my_metrics = DatadogMetrics() + + # WHEN we set the namespace property after ther initialization + my_metrics.namespace = namespace + + # THEN namespace property must be present + assert my_metrics.namespace == namespace + + +def test_default_tags_across_instances(): + # GIVEN DatadogMetrics is initialized and we persist a set of default tags + my_metrics = DatadogMetrics() + my_metrics.set_default_tags(environment="test", log_group="/lambda/test") + + # WHEN a new DatadogMetrics instance is created + same_metrics = DatadogMetrics() + + # THEN default tags should also be present in the new instance + assert "environment" in same_metrics.default_tags + + +def test_invalid_datadog_metric_name(): + metrics = DatadogMetrics() + + # GIVEN three metrics names with different invalid names + metric_name_1 = "1_metric" # Metric name must not start with number + metric_name_2 = "metric_รง" # Metric name must not contains unicode characters + metric_name_3 = "".join(["x" for _ in range(201)]) # Metric name must have less than 200 characters + + # WHEN we try to validate those metrics names + # THEN must be False + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_1, value=1) + + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_2, value=1) + + with pytest.raises(SchemaValidationError, match="Invalid metric name.*"): + metrics.add_metric(name=metric_name_3, value=1) + + +def test_invalid_datadog_metric_tag(): + metrics = DatadogMetrics() + + # GIVEN three metrics with different invalid tags + metric_tag_1 = "".join(["x" for _ in range(201)]) # Metric tags must have less than 200 characters + + # WHEN we try to validate those metrics tags + # THEN must be False + with pytest.warns(DatadogDataValidationWarning): + metrics.add_metric(name="metric_2", value=1, tag1=metric_tag_1) From bb621bcd3cfd0650f02911f1e4f3587acb041a14 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 21:30:00 +0100 Subject: [PATCH 12/24] chore(deps-dev): bump xenon from 0.9.0 to 0.9.1 (#2955) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 00655322ec3..b65032a5bb9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3061,18 +3061,18 @@ files = [ [[package]] name = "xenon" -version = "0.9.0" +version = "0.9.1" description = "Monitor code metrics for Python on your CI server" optional = false python-versions = "*" files = [ - {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"}, - {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, + {file = "xenon-0.9.1-py2.py3-none-any.whl", hash = "sha256:b2888a5764ebd57a1f9f1624fde86e8303cb30c686e492f19d98867c458f7870"}, + {file = "xenon-0.9.1.tar.gz", hash = "sha256:d6745111c3e258b749a4fd424b1b899d99ea183cea232365ee2f88fe7d80c03b"}, ] [package.dependencies] PyYAML = ">=4.2b1,<7.0" -radon = ">=4,<6" +radon = ">=4,<7" requests = ">=2.0,<3.0" [[package]] diff --git a/pyproject.toml b/pyproject.toml index f1bb04abecd..9e572b027ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ pdoc3 = "^0.10.0" pytest-asyncio = "^0.21.1" bandit = "^1.7.5" radon = "^5.1.0" -xenon = "^0.9.0" +xenon = "^0.9.1" mkdocs-git-revision-date-plugin = "^0.3.2" mike = "^1.1.2" pytest-xdist = "^3.3.1" From 64566efcdc3e5b18bdcfe04c98ac579ece3d82a8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 22:38:45 +0100 Subject: [PATCH 13/24] chore(deps): bump actions/setup-node from 3.7.0 to 3.8.0 (#2957) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/publish_v2_layer.yml | 2 +- .github/workflows/reusable_deploy_v2_layer_stack.yml | 2 +- .github/workflows/reusable_deploy_v2_sar.yml | 2 +- .github/workflows/run-e2e-tests.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/publish_v2_layer.yml b/.github/workflows/publish_v2_layer.yml index 1b5c668d79d..eb6151c9b95 100644 --- a/.github/workflows/publish_v2_layer.yml +++ b/.github/workflows/publish_v2_layer.yml @@ -101,7 +101,7 @@ jobs: - name: Install poetry run: pipx install git+https://github.com/python-poetry/poetry@68b88e5390720a3dd84f02940ec5200bfce39ac6 # v1.5.0 - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@bea5baf987ba7aa777a8a0b4ace377a21c45c381 # v3.8.0 with: node-version: "16.12" - name: Setup python diff --git a/.github/workflows/reusable_deploy_v2_layer_stack.yml b/.github/workflows/reusable_deploy_v2_layer_stack.yml index ef06b82518e..fca8cb13fa6 100644 --- a/.github/workflows/reusable_deploy_v2_layer_stack.yml +++ b/.github/workflows/reusable_deploy_v2_layer_stack.yml @@ -156,7 +156,7 @@ jobs: aws-region: ${{ matrix.region }} role-to-assume: ${{ secrets.AWS_LAYERS_ROLE_ARN }} - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@bea5baf987ba7aa777a8a0b4ace377a21c45c381 # v3.8.0 with: node-version: "16.12" - name: Setup python diff --git a/.github/workflows/reusable_deploy_v2_sar.yml b/.github/workflows/reusable_deploy_v2_sar.yml index beab36f24c2..ee84d55ed45 100644 --- a/.github/workflows/reusable_deploy_v2_sar.yml +++ b/.github/workflows/reusable_deploy_v2_sar.yml @@ -111,7 +111,7 @@ jobs: aws-region: ${{ env.AWS_REGION }} role-to-assume: ${{ secrets.AWS_SAR_V2_ROLE_ARN }} - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@bea5baf987ba7aa777a8a0b4ace377a21c45c381 # v3.8.0 with: node-version: ${{ env.NODE_VERSION }} - name: Download artifact diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 94e5f02e5c7..3285c375cd0 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -61,7 +61,7 @@ jobs: architecture: "x64" cache: "poetry" - name: Setup Node.js - uses: actions/setup-node@e33196f7422957bea03ed53f6fbb155025ffc7b8 # v3.7.0 + uses: actions/setup-node@bea5baf987ba7aa777a8a0b4ace377a21c45c381 # v3.8.0 with: node-version: "16.12" - name: Install CDK CLI From 75936e5f7afd277225b155811b92f192b72835b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 07:52:58 +0100 Subject: [PATCH 14/24] chore(deps-dev): bump cfn-lint from 0.79.6 to 0.79.7 (#2956) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Leandro Damascena --- poetry.lock | 6 +++--- pyproject.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index b65032a5bb9..cfdcf134ffe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -486,13 +486,13 @@ pycparser = "*" [[package]] name = "cfn-lint" -version = "0.79.6" +version = "0.79.7" description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" optional = false python-versions = ">=3.7, <=4.0, !=4.0" files = [ - {file = "cfn-lint-0.79.6.tar.gz", hash = "sha256:09fc9cc497fc6d15e8b822a98fa0628ed6f8e9bcce6c289d95b2fc71d50aa63f"}, - {file = "cfn_lint-0.79.6-py3-none-any.whl", hash = "sha256:b67154460acb466e837590caa7a76bd95ec908961f21ba6de2d985309668ea29"}, + {file = "cfn-lint-0.79.7.tar.gz", hash = "sha256:fcdc195a89810482af93a335b57500fc928111998d8389087f85fd59155fc904"}, + {file = "cfn_lint-0.79.7-py3-none-any.whl", hash = "sha256:05d819e47f74f4d4862b874b0cc911b4ff9773862601ffec8a4f8791df0eab3a"}, ] [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index 9e572b027ff..6916abe601e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,7 +93,7 @@ aws-sdk = ["boto3"] datadog=["datadog-lambda"] [tool.poetry.group.dev.dependencies] -cfn-lint = "0.79.6" +cfn-lint = "0.79.7" mypy = "^1.1.1" types-python-dateutil = "^2.8.19.6" httpx = ">=0.23.3,<0.25.0" From ce9f3f6d70190c13eae2effb6596c0378b796edb Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 09:01:54 +0200 Subject: [PATCH 15/24] chore(ci): changelog rebuild (#2958) Co-authored-by: Powertools for AWS Lambda (Python) bot --- CHANGELOG.md | 39 ++++++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index caf81f591a6..0bc22aa74fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,55 +25,60 @@ ## Features +* **event_handler:** allow stripping route prefixes using regexes ([#2521](https://github.com/aws-powertools/powertools-lambda-python/issues/2521)) * **layers:** add new comercial region Israel(Tel Aviv) ([#2907](https://github.com/aws-powertools/powertools-lambda-python/issues/2907)) +* **metrics:** add Datadog observability provider ([#2906](https://github.com/aws-powertools/powertools-lambda-python/issues/2906)) * **metrics:** support to bring your own metrics provider ([#2194](https://github.com/aws-powertools/powertools-lambda-python/issues/2194)) ## Maintenance * **ci:** group dependabot updates ([#2896](https://github.com/aws-powertools/powertools-lambda-python/issues/2896)) * **ci:** enable protected branch auditing ([#2913](https://github.com/aws-powertools/powertools-lambda-python/issues/2913)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.1 to 1.39.0 in /layer/scripts/layer-balancer ([#2890](https://github.com/aws-powertools/powertools-lambda-python/issues/2890)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) -* **deps:** bump slsa-framework/slsa-github-generator from 1.7.0 to 1.8.0 ([#2927](https://github.com/aws-powertools/powertools-lambda-python/issues/2927)) -* **deps:** bump pydantic from 1.10.11 to 1.10.12 ([#2846](https://github.com/aws-powertools/powertools-lambda-python/issues/2846)) -* **deps:** bump github.com/aws/aws-sdk-go-v2 from 1.19.0 to 1.19.1 in /layer/scripts/layer-balancer ([#2877](https://github.com/aws-powertools/powertools-lambda-python/issues/2877)) * **deps:** bump squidfunk/mkdocs-material from `33e28bd` to `cd3a522` in /docs ([#2859](https://github.com/aws-powertools/powertools-lambda-python/issues/2859)) +* **deps:** bump github.com/aws/aws-sdk-go-v2 from 1.19.0 to 1.19.1 in /layer/scripts/layer-balancer ([#2877](https://github.com/aws-powertools/powertools-lambda-python/issues/2877)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 3 updates ([#2933](https://github.com/aws-powertools/powertools-lambda-python/issues/2933)) * **deps:** bump actions/dependency-review-action from 3.0.6 to 3.0.7 ([#2941](https://github.com/aws-powertools/powertools-lambda-python/issues/2941)) -* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) * **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.0 to 1.38.1 in /layer/scripts/layer-balancer ([#2876](https://github.com/aws-powertools/powertools-lambda-python/issues/2876)) -* **deps:** bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 ([#2943](https://github.com/aws-powertools/powertools-lambda-python/issues/2943)) +* **deps:** bump slsa-framework/slsa-github-generator from 1.7.0 to 1.8.0 ([#2927](https://github.com/aws-powertools/powertools-lambda-python/issues/2927)) +* **deps:** bump pydantic from 1.10.11 to 1.10.12 ([#2846](https://github.com/aws-powertools/powertools-lambda-python/issues/2846)) * **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.29 to 1.18.30 in /layer/scripts/layer-balancer ([#2875](https://github.com/aws-powertools/powertools-lambda-python/issues/2875)) +* **deps:** bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.9 ([#2943](https://github.com/aws-powertools/powertools-lambda-python/issues/2943)) * **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.28 to 1.18.29 in /layer/scripts/layer-balancer ([#2844](https://github.com/aws-powertools/powertools-lambda-python/issues/2844)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.37.1 to 1.38.0 in /layer/scripts/layer-balancer ([#2843](https://github.com/aws-powertools/powertools-lambda-python/issues/2843)) +* **deps:** bump the layer-balancer group in /layer/scripts/layer-balancer with 2 updates ([#2904](https://github.com/aws-powertools/powertools-lambda-python/issues/2904)) * **deps:** bump gitpython from 3.1.31 to 3.1.32 in /docs ([#2948](https://github.com/aws-powertools/powertools-lambda-python/issues/2948)) -* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.30 to 1.18.31 in /layer/scripts/layer-balancer ([#2889](https://github.com/aws-powertools/powertools-lambda-python/issues/2889)) * **deps:** bump pypa/gh-action-pypi-publish from 1.8.9 to 1.8.10 ([#2946](https://github.com/aws-powertools/powertools-lambda-python/issues/2946)) -* **deps-dev:** bump aws-cdk from 2.88.0 to 2.89.0 ([#2887](https://github.com/aws-powertools/powertools-lambda-python/issues/2887)) -* **deps-dev:** bump mkdocs-material from 9.1.19 to 9.1.21 ([#2894](https://github.com/aws-powertools/powertools-lambda-python/issues/2894)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.37.1 to 1.38.0 in /layer/scripts/layer-balancer ([#2843](https://github.com/aws-powertools/powertools-lambda-python/issues/2843)) +* **deps:** bump actions/setup-node from 3.7.0 to 3.8.0 ([#2957](https://github.com/aws-powertools/powertools-lambda-python/issues/2957)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/service/lambda from 1.38.1 to 1.39.0 in /layer/scripts/layer-balancer ([#2890](https://github.com/aws-powertools/powertools-lambda-python/issues/2890)) +* **deps:** bump github.com/aws/aws-sdk-go-v2/config from 1.18.30 to 1.18.31 in /layer/scripts/layer-balancer ([#2889](https://github.com/aws-powertools/powertools-lambda-python/issues/2889)) * **deps-dev:** bump the boto-typing group with 11 updates ([#2901](https://github.com/aws-powertools/powertools-lambda-python/issues/2901)) +* **deps-dev:** bump aws-cdk from 2.88.0 to 2.89.0 ([#2887](https://github.com/aws-powertools/powertools-lambda-python/issues/2887)) * **deps-dev:** bump cfn-lint from 0.79.5 to 0.79.6 ([#2899](https://github.com/aws-powertools/powertools-lambda-python/issues/2899)) +* **deps-dev:** bump mkdocs-material from 9.1.19 to 9.1.21 ([#2894](https://github.com/aws-powertools/powertools-lambda-python/issues/2894)) * **deps-dev:** bump sentry-sdk from 1.28.1 to 1.29.0 ([#2900](https://github.com/aws-powertools/powertools-lambda-python/issues/2900)) +* **deps-dev:** bump ruff from 0.0.280 to 0.0.281 ([#2891](https://github.com/aws-powertools/powertools-lambda-python/issues/2891)) +* **deps-dev:** bump ruff from 0.0.281 to 0.0.282 ([#2905](https://github.com/aws-powertools/powertools-lambda-python/issues/2905)) * **deps-dev:** bump mypy-boto3-logs from 1.28.1 to 1.28.15 ([#2880](https://github.com/aws-powertools/powertools-lambda-python/issues/2880)) * **deps-dev:** bump mypy-boto3-appconfigdata from 1.28.0 to 1.28.15 ([#2879](https://github.com/aws-powertools/powertools-lambda-python/issues/2879)) * **deps-dev:** bump mypy-boto3-lambda from 1.28.11 to 1.28.15 ([#2878](https://github.com/aws-powertools/powertools-lambda-python/issues/2878)) * **deps-dev:** bump mypy-boto3-xray from 1.28.0 to 1.28.15 ([#2881](https://github.com/aws-powertools/powertools-lambda-python/issues/2881)) -* **deps-dev:** bump ruff from 0.0.280 to 0.0.281 ([#2891](https://github.com/aws-powertools/powertools-lambda-python/issues/2891)) -* **deps-dev:** bump ruff from 0.0.281 to 0.0.282 ([#2905](https://github.com/aws-powertools/powertools-lambda-python/issues/2905)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.28.0 to 1.28.11 ([#2847](https://github.com/aws-powertools/powertools-lambda-python/issues/2847)) -* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) * **deps-dev:** bump the boto-typing group with 4 updates ([#2928](https://github.com/aws-powertools/powertools-lambda-python/issues/2928)) +* **deps-dev:** bump aws-cdk from 2.89.0 to 2.90.0 ([#2932](https://github.com/aws-powertools/powertools-lambda-python/issues/2932)) +* **deps-dev:** bump cfn-lint from 0.79.4 to 0.79.5 ([#2870](https://github.com/aws-powertools/powertools-lambda-python/issues/2870)) +* **deps-dev:** bump cfn-lint from 0.79.6 to 0.79.7 ([#2956](https://github.com/aws-powertools/powertools-lambda-python/issues/2956)) * **deps-dev:** bump mypy-boto3-cloudformation from 1.28.10 to 1.28.12 ([#2864](https://github.com/aws-powertools/powertools-lambda-python/issues/2864)) * **deps-dev:** bump mypy-boto3-cloudwatch from 1.28.0 to 1.28.12 ([#2865](https://github.com/aws-powertools/powertools-lambda-python/issues/2865)) * **deps-dev:** bump cfn-lint from 0.79.3 to 0.79.4 ([#2862](https://github.com/aws-powertools/powertools-lambda-python/issues/2862)) * **deps-dev:** bump mypy-boto3-appconfig from 1.28.0 to 1.28.12 ([#2861](https://github.com/aws-powertools/powertools-lambda-python/issues/2861)) * **deps-dev:** bump mypy-boto3-ssm from 1.28.0 to 1.28.12 ([#2863](https://github.com/aws-powertools/powertools-lambda-python/issues/2863)) -* **deps-dev:** bump aws-cdk from 2.89.0 to 2.90.0 ([#2932](https://github.com/aws-powertools/powertools-lambda-python/issues/2932)) * **deps-dev:** bump ruff from 0.0.282 to 0.0.283 ([#2937](https://github.com/aws-powertools/powertools-lambda-python/issues/2937)) * **deps-dev:** bump ruff from 0.0.283 to 0.0.284 ([#2940](https://github.com/aws-powertools/powertools-lambda-python/issues/2940)) +* **deps-dev:** bump cfn-lint from 0.78.2 to 0.79.3 ([#2854](https://github.com/aws-powertools/powertools-lambda-python/issues/2854)) * **deps-dev:** bump mypy-boto3-lambda from 1.28.0 to 1.28.11 ([#2845](https://github.com/aws-powertools/powertools-lambda-python/issues/2845)) -* **deps-dev:** bump the boto-typing group with 1 update ([#2944](https://github.com/aws-powertools/powertools-lambda-python/issues/2944)) * **deps-dev:** bump aws-cdk from 2.90.0 to 2.91.0 ([#2947](https://github.com/aws-powertools/powertools-lambda-python/issues/2947)) -* **deps-dev:** bump cfn-lint from 0.78.2 to 0.79.3 ([#2854](https://github.com/aws-powertools/powertools-lambda-python/issues/2854)) +* **deps-dev:** bump xenon from 0.9.0 to 0.9.1 ([#2955](https://github.com/aws-powertools/powertools-lambda-python/issues/2955)) +* **deps-dev:** bump the boto-typing group with 1 update ([#2944](https://github.com/aws-powertools/powertools-lambda-python/issues/2944)) * **docs:** disable line length rule using older syntax ([#2920](https://github.com/aws-powertools/powertools-lambda-python/issues/2920)) * **docs:** include the environment variables section in the utilities documentation ([#2925](https://github.com/aws-powertools/powertools-lambda-python/issues/2925)) * **maintenance:** enables publishing docs and changelog, running e2e tests only in the main repository ([#2924](https://github.com/aws-powertools/powertools-lambda-python/issues/2924)) From a9b6cac6bcd2e694044055b964ab68024e98bff7 Mon Sep 17 00:00:00 2001 From: aal80 Date: Tue, 15 Aug 2023 02:51:20 -0500 Subject: [PATCH 16/24] docs(metrics): update Datadog integration diagram (#2954) Co-authored-by: Leandro Damascena --- docs/core/metrics/datadog.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/core/metrics/datadog.md b/docs/core/metrics/datadog.md index fb5927b3a63..eb036fd3270 100644 --- a/docs/core/metrics/datadog.md +++ b/docs/core/metrics/datadog.md @@ -20,9 +20,10 @@ stateDiagram-v2 LambdaFn --> LambdaCode LambdaCode --> DatadogSDK DatadogSDK --> DatadogExtension + DatadogExtension --> Datadog: async state LambdaExtension { - DatadogExtension --> Datadog: async + DatadogExtension } ``` From 26d5af5bbb7ffab7ec4b25d59c77cc7e9e409763 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 15 Aug 2023 11:07:00 +0200 Subject: [PATCH 17/24] docs(roadmap): add GovCloud and China region item (#2960) --- docs/roadmap.md | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/roadmap.md b/docs/roadmap.md index fc8b4441754..c2de6829fb4 100644 --- a/docs/roadmap.md +++ b/docs/roadmap.md @@ -95,7 +95,7 @@ Since JWT is a close second, this new utility would cover higher level functions We want to make this easier by extending certain utilities to accept a `metrics` instance and metrics configuration (what metrics to create). It would be opt-in due to costs associated with creating metrics. -!!! question "Got ideas for custom metrics? Open up a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE)"{target="_blank"} +!!! question "Got ideas for custom metrics? Open up a [feature request](https://github.com/aws-powertools/powertools-lambda-python/issues/new?assignees=&labels=feature-request%2Ctriage&projects=&template=feature_request.yml&title=Feature+request%3A+TITLE)" **Major updates** @@ -104,6 +104,19 @@ We want to make this easier by extending certain utilities to accept a `metrics` - [ ] RFC to outline metrics for Event Handler (_e.g., validation errors_ ) - [ ] RFC to outline metrics for Idempotency (_e.g., cache hit_) +### Lambda Layer in GovCloud and China region + +We want to investigate security and scaling requirements for these special regions, so they're in sync for every release. + +!!! note "Help us prioritize it by reaching out to your AWS representatives or [via email](mailto:aws-lambda-powertools-feedback@amazon.com)." + +**Major updates** + +- [ ] Gather agencies and customers name to prioritize it +- [ ] Investigate security requirements for special regions +- [ ] Update CDK Layer construct to include regions +- [ ] Create additional infrastructure for special regions + ### V3 With Python 3.7 reaching [end-of-life in AWS Lambda by the end of the year](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html), we want to plan some breaking changes. As always, we plan on having ample notice, a detailed upgrade guide, and keep breaking changes to a minimum to ease transition (e.g., it took ~7 months from v2 to surpass v1 downloads). From 8f671c9906e688ffde9052b44be871dd02e6801f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Jul 2023 14:39:50 +0200 Subject: [PATCH 18/24] fix(parameters): make cache aware of single vs multiple calls Signed-off-by: heitorlessa --- aws_lambda_powertools/utilities/parameters/base.py | 2 +- aws_lambda_powertools/utilities/parameters/types.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index e4be9d33cdc..78bf865faf0 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -27,7 +27,7 @@ from aws_lambda_powertools.shared import constants, user_agent from aws_lambda_powertools.shared.functions import resolve_max_age -from aws_lambda_powertools.utilities.parameters.types import TransformOptions +from aws_lambda_powertools.utilities.parameters.types import RecursiveOptions, TransformOptions from .exceptions import GetParameterError, TransformParameterError diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py index 6a15873c496..2dbf1593d72 100644 --- a/aws_lambda_powertools/utilities/parameters/types.py +++ b/aws_lambda_powertools/utilities/parameters/types.py @@ -1,3 +1,4 @@ from typing_extensions import Literal TransformOptions = Literal["json", "binary", "auto", None] +RecursiveOptions = Literal[True, False] From 78bab3ff85548d9ff61cc59c3b5b2ff60f3bc1d8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 25 Jul 2023 15:16:51 +0200 Subject: [PATCH 19/24] chore: cleanup, add test for single and nested Signed-off-by: heitorlessa --- aws_lambda_powertools/utilities/parameters/base.py | 2 +- aws_lambda_powertools/utilities/parameters/types.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/aws_lambda_powertools/utilities/parameters/base.py b/aws_lambda_powertools/utilities/parameters/base.py index 78bf865faf0..e4be9d33cdc 100644 --- a/aws_lambda_powertools/utilities/parameters/base.py +++ b/aws_lambda_powertools/utilities/parameters/base.py @@ -27,7 +27,7 @@ from aws_lambda_powertools.shared import constants, user_agent from aws_lambda_powertools.shared.functions import resolve_max_age -from aws_lambda_powertools.utilities.parameters.types import RecursiveOptions, TransformOptions +from aws_lambda_powertools.utilities.parameters.types import TransformOptions from .exceptions import GetParameterError, TransformParameterError diff --git a/aws_lambda_powertools/utilities/parameters/types.py b/aws_lambda_powertools/utilities/parameters/types.py index 2dbf1593d72..6a15873c496 100644 --- a/aws_lambda_powertools/utilities/parameters/types.py +++ b/aws_lambda_powertools/utilities/parameters/types.py @@ -1,4 +1,3 @@ from typing_extensions import Literal TransformOptions = Literal["json", "binary", "auto", None] -RecursiveOptions = Literal[True, False] From db96b1a79e870b428f7ae29e8e2e5ca31641b6f8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 15 Aug 2023 14:36:26 +0200 Subject: [PATCH 20/24] chore(test): remove itsdangerous from perf test Signed-off-by: heitorlessa --- tests/performance/test_data_masking.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/tests/performance/test_data_masking.py b/tests/performance/test_data_masking.py index 33ebe42733f..9dcf041b81f 100644 --- a/tests/performance/test_data_masking.py +++ b/tests/performance/test_data_masking.py @@ -4,9 +4,6 @@ import pytest from aws_lambda_powertools.utilities.data_masking.base import DataMasking -from aws_lambda_powertools.utilities.data_masking.providers.itsdangerous import ( - ItsDangerousProvider, -) DATA_MASKING_PACKAGE = "aws_lambda_powertools.utilities.data_masking" DATA_MASKING_INIT_SLA: float = 0.002 @@ -58,16 +55,15 @@ def test_data_masking_init(benchmark): pytest.fail(f"High level imports should be below {DATA_MASKING_INIT_SLA}s: {stat}") -def encrypt_json_blob(): - data_masker = DataMasking(provider=ItsDangerousProvider("mykey")) - encrypted = data_masker.encrypt(json_blob, json_blob_fields) - data_masker.decrypt(encrypted, json_blob_fields) +def mask_json_blob(): + data_masker = DataMasking() + data_masker.mask(json_blob, json_blob_fields) @pytest.mark.perf @pytest.mark.benchmark(group="core", disable_gc=True, warmup=False) def test_data_masking_encrypt_with_json_blob(benchmark): - benchmark.pedantic(encrypt_json_blob) + benchmark.pedantic(mask_json_blob) stat = benchmark.stats.stats.max if stat > DATA_MASKING_NESTED_ENCRYPT_SLA: pytest.fail(f"High level imports should be below {DATA_MASKING_NESTED_ENCRYPT_SLA}s: {stat}") From 88c67a585c8ebe927ae6a142f5836903ed86b274 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 15 Aug 2023 14:38:47 +0200 Subject: [PATCH 21/24] chore(deps): remove itsdangerous dependencies --- poetry.lock | 17 ++-------- pyproject.toml | 90 ++++++++++++++++++++++++++++---------------------- 2 files changed, 53 insertions(+), 54 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5a066741ff5..23ec87a652d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1301,17 +1301,6 @@ pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib" plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] -[[package]] -name = "itsdangerous" -version = "2.1.2" -description = "Safely pass data to untrusted environments and back." -optional = true -python-versions = ">=3.7" -files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, -] - [[package]] name = "jinja2" version = "3.1.2" @@ -3099,12 +3088,10 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] -all = ["aws-encryption-sdk", "aws-xray-sdk", "fastjsonschema", "itsdangerous", "pydantic"] +all = ["aws-encryption-sdk", "aws-xray-sdk", "fastjsonschema", "pydantic"] aws-sdk = ["boto3"] datadog = [] -datamasking-all = ["aws-encryption-sdk", "itsdangerous"] datamasking-aws-sdk = ["aws-encryption-sdk"] -datamasking-itsdangerous = ["itsdangerous"] parser = ["pydantic"] tracer = ["aws-xray-sdk"] validation = ["fastjsonschema"] @@ -3112,4 +3099,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "70c335b6477d58702df0f0f6821ab7ed0cc592553bb92d698359518193570b10" +content-hash = "63e60761e3a4a8b25eb484e0937a3c907b1131a1ba4fdeab897720b87fa09b64" diff --git a/pyproject.toml b/pyproject.toml index 6916abe601e..8b5bb84d0b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,21 +4,31 @@ version = "2.22.0" description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] -classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT No Attribution License (MIT-0)", - "Natural Language :: English", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT No Attribution License (MIT-0)", + "Natural Language :: English", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ] repository = "https://github.com/aws-powertools/powertools-lambda-python" documentation = "https://docs.powertools.aws.dev/lambda/python/" readme = "README.md" -keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "powertools", "feature_flags", "idempotency", "middleware"] +keywords = [ + "aws_lambda_powertools", + "aws", + "tracing", + "logging", + "lambda", + "powertools", + "feature_flags", + "idempotency", + "middleware", +] # MIT-0 is not recognized as an existing license from poetry. # By using `MIT` as a license value, a `License :: OSI Approved :: MIT License` classifier is added to the classifiers list. license = "MIT" @@ -34,11 +44,10 @@ fastjsonschema = { version = "^2.14.5", optional = true } pydantic = { version = "^1.8.2", optional = true } boto3 = { version = "^1.20.32", optional = true } typing-extensions = "^4.6.2" -itsdangerous = {version = "^2.1.2", optional = true} -aws-encryption-sdk = {version = "^3.1.1", optional = true} +aws-encryption-sdk = { version = "^3.1.1", optional = true } [tool.poetry.dev-dependencies] -coverage = {extras = ["toml"], version = "^7.2"} +coverage = { extras = ["toml"], version = "^7.2" } pytest = "^7.4.0" black = "^23.3" boto3 = "^1.18" @@ -75,22 +84,20 @@ filelock = "^3.12.2" checksumdir = "^1.2.0" mypy-boto3-appconfigdata = "^1.28.16" ijson = "^3.2.2" -typed-ast = { version = "^1.5.5", python = "< 3.8"} +typed-ast = { version = "^1.5.5", python = "< 3.8" } hvac = "^1.1.1" aws-requests-auth = "^0.4.3" datadog-lambda = "^4.77.0" [tool.poetry.extras] parser = ["pydantic"] -datamasking-itsdangerous = ["itsdangerous"] -datamasking-aws-sdk= ["aws-encryption-sdk"] -datamasking-all = ["itsdangerous", "aws-encryption-sdk"] +datamasking-aws-sdk = ["aws-encryption-sdk"] validation = ["fastjsonschema"] tracer = ["aws-xray-sdk"] -all = ["pydantic", "aws-xray-sdk", "fastjsonschema", "itsdangerous", "aws-encryption-sdk"] +all = ["pydantic", "aws-xray-sdk", "fastjsonschema", "aws-encryption-sdk"] # allow customers to run code locally without emulators (SAM CLI, etc.) aws-sdk = ["boto3"] -datadog=["datadog-lambda"] +datadog = ["datadog-lambda"] [tool.poetry.group.dev.dependencies] cfn-lint = "0.79.7" @@ -103,7 +110,12 @@ retry2 = "^0.9.5" [tool.coverage.run] source = ["aws_lambda_powertools"] -omit = ["tests/*", "aws_lambda_powertools/exceptions/*", "aws_lambda_powertools/utilities/parser/types.py", "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py"] +omit = [ + "tests/*", + "aws_lambda_powertools/exceptions/*", + "aws_lambda_powertools/utilities/parser/types.py", + "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py", +] branch = true [tool.coverage.html] @@ -113,26 +125,26 @@ title = "Powertools for AWS Lambda (Python) Test Coverage" [tool.coverage.report] fail_under = 90 exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", + # Have to re-enable the standard pragma + "pragma: no cover", - # Don't complain about missing debug-only code: - "def __repr__", - "if self.debug", + # Don't complain about missing debug-only code: + "def __repr__", + "if self.debug", - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", - # Don't complain if non-runnable code isn't run: - "if 0:", - "if __name__ == .__main__.:", + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", - # Ignore runtime type checking - "if TYPE_CHECKING:", + # Ignore runtime type checking + "if TYPE_CHECKING:", - # Ignore type function overload - "@overload", + # Ignore type function overload + "@overload", ] [tool.isort] @@ -165,16 +177,16 @@ minversion = "6.0" addopts = "-ra -vv" testpaths = "./tests" markers = [ - "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", + "perf: marks perf tests to be deselected (deselect with '-m \"not perf\"')", ] # MAINTENANCE: Remove these lines when drop support to Pydantic v1 -filterwarnings=[ +filterwarnings = [ "ignore:.*The `parse_obj` method is deprecated*:DeprecationWarning", "ignore:.*The `parse_raw` method is deprecated*:DeprecationWarning", "ignore:.*load_str_bytes is deprecated*:DeprecationWarning", "ignore:.*The `dict` method is deprecated; use `model_dump` instead*:DeprecationWarning", - "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning" + "ignore:.*Pydantic V1 style `@validator` validators are deprecated*:DeprecationWarning", ] [build-system] From 02219f7bf30d81d70e0458444af2bd4af290c385 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 18 Aug 2023 14:12:42 +0200 Subject: [PATCH 22/24] chore: disable sockets in encryption sdk tests Signed-off-by: heitorlessa --- poetry.lock | 16 +++++++++++++++- pyproject.toml | 1 + tests/functional/data_masking/conftest.py | 6 ++++++ 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 tests/functional/data_masking/conftest.py diff --git a/poetry.lock b/poetry.lock index 23ec87a652d..20af7c3f8f4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2344,6 +2344,20 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-socket" +version = "0.6.0" +description = "Pytest Plugin to disable socket calls during tests" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "pytest_socket-0.6.0-py3-none-any.whl", hash = "sha256:cca72f134ff01e0023c402e78d31b32e68da3efdf3493bf7788f8eba86a6824c"}, + {file = "pytest_socket-0.6.0.tar.gz", hash = "sha256:363c1d67228315d4fc7912f1aabfd570de29d0e3db6217d61db5728adacd7138"}, +] + +[package.dependencies] +pytest = ">=3.6.3" + [[package]] name = "pytest-xdist" version = "3.3.1" @@ -3099,4 +3113,4 @@ validation = ["fastjsonschema"] [metadata] lock-version = "2.0" python-versions = "^3.7.4" -content-hash = "63e60761e3a4a8b25eb484e0937a3c907b1131a1ba4fdeab897720b87fa09b64" +content-hash = "85a732715c0b460e59405c42a1d947555aac44adae4f0aaca4162b9fe42174e7" diff --git a/pyproject.toml b/pyproject.toml index 8b5bb84d0b2..b41db978a5e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,6 +107,7 @@ httpx = ">=0.23.3,<0.25.0" sentry-sdk = "^1.22.2" ruff = ">=0.0.272,<0.0.285" retry2 = "^0.9.5" +pytest-socket = "^0.6.0" [tool.coverage.run] source = ["aws_lambda_powertools"] diff --git a/tests/functional/data_masking/conftest.py b/tests/functional/data_masking/conftest.py new file mode 100644 index 00000000000..f73ccca4113 --- /dev/null +++ b/tests/functional/data_masking/conftest.py @@ -0,0 +1,6 @@ +from pytest_socket import disable_socket + + +def pytest_runtest_setup(): + """Disable Unix and TCP sockets for Data masking tests""" + disable_socket() From c3995dbced7b29430f2a70e5fa992802ba0f6ad8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 18 Aug 2023 14:53:08 +0200 Subject: [PATCH 23/24] refactor(tests): use a test double --- tests/functional/data_masking/conftest.py | 17 ++++++++ .../data_masking/test_aws_encryption_sdk.py | 43 +++++++++++-------- 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/tests/functional/data_masking/conftest.py b/tests/functional/data_masking/conftest.py index f73ccca4113..6127858d6b3 100644 --- a/tests/functional/data_masking/conftest.py +++ b/tests/functional/data_masking/conftest.py @@ -1,6 +1,23 @@ +from __future__ import annotations + +from typing import Tuple + from pytest_socket import disable_socket def pytest_runtest_setup(): """Disable Unix and TCP sockets for Data masking tests""" disable_socket() + + +class FakeEncryptionClient: + ENCRYPTION_HEADER = "test" + + def encrypt(self, source: bytes | str, **kwargs) -> Tuple[bytes, str]: + if isinstance(source, str): + return source.encode(), self.ENCRYPTION_HEADER + + return source, self.ENCRYPTION_HEADER + + def decrypt(self, source: bytes, **kwargs) -> Tuple[bytes, str]: + return source, "dummy_decryption_header" diff --git a/tests/functional/data_masking/test_aws_encryption_sdk.py b/tests/functional/data_masking/test_aws_encryption_sdk.py index 82a00744217..d155583fc64 100644 --- a/tests/functional/data_masking/test_aws_encryption_sdk.py +++ b/tests/functional/data_masking/test_aws_encryption_sdk.py @@ -1,18 +1,33 @@ -from unittest.mock import patch +from __future__ import annotations import pytest from aws_lambda_powertools.utilities.data_masking.base import DataMasking -from aws_lambda_powertools.utilities.data_masking.providers.aws_encryption_sdk import AwsEncryptionSdkProvider -from tests.unit.data_masking.setup import * - - -AWS_SDK_KEY = "arn:aws:kms:us-west-2:683517028648:key/269301eb-81eb-4067-ac72-98e8e49bf2b3" +from aws_lambda_powertools.utilities.data_masking.providers.aws_encryption_sdk import ( + AwsEncryptionSdkProvider, +) +from tests.functional.data_masking.conftest import FakeEncryptionClient +from tests.unit.data_masking.setup import ( + aws_encrypted_json_blob, + aws_encrypted_with_fields, + data_types, + data_types_and_masks, + dict_fields, + dictionaries, + fields_to_mask, + json_blob, + json_dict, + masked_with_fields, + python_dict, +) @pytest.fixture -def data_masker(): - return DataMasking(provider=AwsEncryptionSdkProvider(keys=[AWS_SDK_KEY])) +def data_masker() -> DataMasking: + """DataMasking using AWS Encryption SDK Provider with a fake client""" + fake_client = FakeEncryptionClient() + provider = AwsEncryptionSdkProvider(keys=["dummy"], client=fake_client) + return DataMasking(provider=provider) @pytest.mark.parametrize("value, value_masked", data_types_and_masks) @@ -39,7 +54,7 @@ def test_mask_with_fields(data_masker): @pytest.mark.parametrize("value", data_types) -def test_encrypt_decrypt(value, data_masker): +def test_encrypt_decrypt(value, data_masker: DataMasking): # GIVEN an instantiation of DataMasking with the AWS encryption provider # AWS Encryption SDK encrypt method only takes in bytes or strings @@ -67,13 +82,3 @@ def test_encrypt_decrypt_with_fields(value, fields, data_masker): assert decrypted_data == aws_encrypted_json_blob else: assert decrypted_data == aws_encrypted_with_fields - - -@patch("aws_encryption_sdk.EncryptionSDKClient") -def test_mock(get_encryption_sdk_client_mock): - get_encryption_sdk_client_mock.return_value = "mock_value" - - d_m = DataMasking(provider=AwsEncryptionSdkProvider(keys=["mock_value"])) - encrypted_data = d_m.encrypt(b"secret_data") - decrypted_data = d_m.decrypt(encrypted_data) - assert decrypted_data == b"secret_data" From 3d3ffecb96f0bf7d9a1192ad64d112c44d31010a Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 18 Aug 2023 15:07:22 +0200 Subject: [PATCH 24/24] chore: address make pr errors Signed-off-by: heitorlessa --- .../utilities/data_masking/base.py | 9 ++-- .../providers/aws_encryption_sdk.py | 6 ++- tests/unit/data_masking/test_data_masking.py | 46 +++++++++---------- 3 files changed, 32 insertions(+), 29 deletions(-) diff --git a/aws_lambda_powertools/utilities/data_masking/base.py b/aws_lambda_powertools/utilities/data_masking/base.py index e67898fb58c..5c59568702b 100644 --- a/aws_lambda_powertools/utilities/data_masking/base.py +++ b/aws_lambda_powertools/utilities/data_masking/base.py @@ -68,13 +68,16 @@ def _apply_action_to_fields(self, data: Union[dict, str], fields, action, **prov else: raise TypeError( "Unsupported data type. The 'data' parameter must be a dictionary or a JSON string " - "representation of a dictionary." + "representation of a dictionary.", ) for field in fields: + # prevent overriding loop variable + current_field = field if not isinstance(field, str): - field = json.dumps(field) - keys = field.split(".") + current_field = json.dumps(field) + + keys = current_field.split(".") curr_dict = my_dict_parsed for key in keys[:-1]: diff --git a/aws_lambda_powertools/utilities/data_masking/providers/aws_encryption_sdk.py b/aws_lambda_powertools/utilities/data_masking/providers/aws_encryption_sdk.py index c57c44de755..df5e7b200f0 100644 --- a/aws_lambda_powertools/utilities/data_masking/providers/aws_encryption_sdk.py +++ b/aws_lambda_powertools/utilities/data_masking/providers/aws_encryption_sdk.py @@ -9,8 +9,8 @@ StrictAwsKmsMasterKeyProvider, ) -from aws_lambda_powertools.utilities.data_masking.provider import Provider from aws_lambda_powertools.shared.user_agent import register_feature_to_botocore_session +from aws_lambda_powertools.utilities.data_masking.provider import Provider class SingletonMeta(type): @@ -55,6 +55,8 @@ def encrypt(self, data: Union[bytes, str], **provider_options) -> str: def decrypt(self, data: str, **provider_options) -> bytes: ciphertext_decoded = base64.b64decode(data) ciphertext, _ = self.client.decrypt( - source=ciphertext_decoded, key_provider=self.key_provider, **provider_options + source=ciphertext_decoded, + key_provider=self.key_provider, + **provider_options, ) return ciphertext diff --git a/tests/unit/data_masking/test_data_masking.py b/tests/unit/data_masking/test_data_masking.py index 3ed1b203d20..7a731800cc9 100644 --- a/tests/unit/data_masking/test_data_masking.py +++ b/tests/unit/data_masking/test_data_masking.py @@ -1,6 +1,8 @@ import json + import pytest from itsdangerous.url_safe import URLSafeSerializer + from aws_lambda_powertools.shared.constants import DATA_MASKING_STRING from aws_lambda_powertools.utilities.data_masking.base import DataMasking from aws_lambda_powertools.utilities.data_masking.provider import Provider @@ -26,16 +28,6 @@ def decrypt(self, data: str) -> str: return serialize.loads(data) -# @pytest.fixture -# def data_masker(): -# return DataMasking() - - -# @pytest.fixture -# def custom_data_masker(): -# return DataMasking(provider=MyEncryptionProvider(keys="secret-key")) - - def test_mask_int(): data_masker = DataMasking() @@ -116,7 +108,7 @@ def test_mask_dict(): "a": { "1": {"None": "hello", "four": "world"}, "b": {"3": {"4": "goodbye", "e": "world"}}, - } + }, } # WHEN mask is called with no fields argument @@ -134,7 +126,7 @@ def test_mask_dict_with_fields(): "a": { "1": {"None": "hello", "four": "world"}, "b": {"3": {"4": "goodbye", "e": "world"}}, - } + }, } # WHEN mask is called with a list of fields specified @@ -142,7 +134,10 @@ def test_mask_dict_with_fields(): # THEN the result is only the specified fields are masked assert masked_string == { - "a": {"1": {"None": DATA_MASKING_STRING, "four": "world"}, "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}} + "a": { + "1": {"None": DATA_MASKING_STRING, "four": "world"}, + "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, + }, } @@ -155,8 +150,8 @@ def test_mask_json_dict_with_fields(): "a": { "1": {"None": "hello", "four": "world"}, "b": {"3": {"4": "goodbye", "e": "world"}}, - } - } + }, + }, ) # WHEN mask is called with a list of fields specified @@ -164,7 +159,10 @@ def test_mask_json_dict_with_fields(): # THEN the result is only the specified fields are masked assert masked_json_string == { - "a": {"1": {"None": DATA_MASKING_STRING, "four": "world"}, "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}} + "a": { + "1": {"None": DATA_MASKING_STRING, "four": "world"}, + "b": {"3": {"4": DATA_MASKING_STRING, "e": "world"}}, + }, } @@ -201,7 +199,7 @@ def test_encrypt_decrypt_bool(): decrypted_data = data_masker.decrypt(encrypted_data) # THEN the result is the original input data - assert decrypted_data == True + assert decrypted_data is True def test_encrypt_decrypt_none(): @@ -213,7 +211,7 @@ def test_encrypt_decrypt_none(): decrypted_data = data_masker.decrypt(encrypted_data) # THEN the result is the original input data - assert decrypted_data == None + assert decrypted_data is None def test_encrypt_decrypt_str(): @@ -248,7 +246,7 @@ def test_dict_encryption_with_fields(): "a": { "1": {"None": "hello", "four": "world"}, "b": {"3": {"4": "goodbye", "e": "world"}}, - } + }, } # WHEN encrypting and decrypting the data with a list of fields @@ -268,8 +266,8 @@ def test_json_encryption_with_fields(): "a": { "1": {"None": "hello", "four": "world"}, "b": {"3": {"4": "goodbye", "e": "world"}}, - } - } + }, + }, ) # WHEN encrypting and decrypting a json representation of a dictionary with a list of fields @@ -360,7 +358,7 @@ def test_parsing_nonexistent_fields(): "3": { "1": {"None": "hello", "four": "world"}, "4": {"33": {"5": "goodbye", "e": "world"}}, - } + }, } # WHEN attempting to pass in fields that do not exist in the input data @@ -377,7 +375,7 @@ def test_parsing_nonstring_fields(): "3": { "1": {"None": "hello", "four": "world"}, "4": {"33": {"5": "goodbye", "e": "world"}}, - } + }, } # WHEN attempting to pass in a list of fields that are not strings @@ -396,7 +394,7 @@ def test_parsing_nonstring_keys_and_fields(): 3: { "1": {"None": "hello", "four": "world"}, 4: {"33": {"5": "goodbye", "e": "world"}}, - } + }, } masked = data_masker.mask(data, fields=[3.4])