From 610688c01948a154e32f398a84b2a45fa51d2bc9 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 31 Aug 2022 14:48:00 +0200 Subject: [PATCH 01/33] chore(ci): add cdk environment; failing still --- tests/e2e/tracer/conftest.py | 8 ++++++-- tests/e2e/tracer/infrastructure.py | 25 +++++++++++++++++++++++-- tests/e2e/tracer/test_tracer.py | 5 +++++ tests/e2e/utils/asset.py | 16 +++++++--------- tests/e2e/utils/infrastructure.py | 30 +++++++++++++++++++++++------- 5 files changed, 64 insertions(+), 20 deletions(-) diff --git a/tests/e2e/tracer/conftest.py b/tests/e2e/tracer/conftest.py index 3b724bf1247..710c75dd403 100644 --- a/tests/e2e/tracer/conftest.py +++ b/tests/e2e/tracer/conftest.py @@ -6,7 +6,9 @@ @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +# NOTE: Commented out for faster debug as we don't need a Layer yet +# def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(request: pytest.FixtureRequest): """Setup and teardown logic for E2E test infrastructure Parameters @@ -21,7 +23,9 @@ def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + # NOTE: Commented out for faster debug as we don't need a Layer yet + # stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn="") try: yield stack.deploy() finally: diff --git a/tests/e2e/tracer/infrastructure.py b/tests/e2e/tracer/infrastructure.py index 9b388558c0b..d937627455c 100644 --- a/tests/e2e/tracer/infrastructure.py +++ b/tests/e2e/tracer/infrastructure.py @@ -1,5 +1,8 @@ from pathlib import Path +from aws_cdk import aws_ec2, aws_ssm +from aws_cdk import aws_elasticloadbalancingv2 as elbv2 + from tests.e2e.utils.data_builder import build_service_name from tests.e2e.utils.infrastructure import BaseInfrastructure @@ -14,5 +17,23 @@ def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_a super().__init__(feature_name, handlers_dir, layer_arn) def create_resources(self) -> None: - env_vars = {"POWERTOOLS_SERVICE_NAME": self.SERVICE_NAME} - self.create_lambda_functions(function_props={"environment": env_vars}) + # NOTE: Commented out Lambda fns as we don't need them now + # env_vars = {"POWERTOOLS_SERVICE_NAME": self.SERVICE_NAME} + # self.create_lambda_functions(function_props={"environment": env_vars}) + + # NOTE: Test VPC can be looked up + vpc = aws_ec2.Vpc.from_lookup( + self.stack, + "VPC", + is_default=True, + region="eu-west-1" + # vpc_id="vpc-4d79432b", # NOTE: hardcode didn't work either + ) + + # NOTE: Same issue with any other lookup. + # # string_value = aws_ssm.StringParameter.from_string_parameter_attributes( + # # self.stack, "MyValue", parameter_name="/db/proxy_arn" + # # ).string_value + + alb = elbv2.ApplicationLoadBalancer(self.stack, "pqp", vpc=vpc, internet_facing=True) + self.add_cfn_output(name="ALB", value=alb.load_balancer_dns_name, arn=alb.load_balancer_arn) diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index de25bc02ebf..62a376e869e 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -35,6 +35,11 @@ def async_fn(infrastructure: dict) -> str: return infrastructure.get("AsyncCapture", "") +# NOTE: Quick test to confirm VPC can be resolved +def test_vpc(): + pass + + def test_lambda_handler_trace_is_visible(basic_handler_fn_arn: str, basic_handler_fn: str): # GIVEN handler_name = basic_handler.lambda_handler.__name__ diff --git a/tests/e2e/utils/asset.py b/tests/e2e/utils/asset.py index db9e7299d1a..2de136c3d67 100644 --- a/tests/e2e/utils/asset.py +++ b/tests/e2e/utils/asset.py @@ -24,15 +24,9 @@ class AssetTemplateConfigDestinationsAccount(BaseModel): assume_role_arn: str = Field(str, alias="assumeRoleArn") -class AssetTemplateConfigDestinations(BaseModel): - current_account_current_region: AssetTemplateConfigDestinationsAccount = Field( - AssetTemplateConfigDestinationsAccount, alias="current_account-current_region" - ) - - class AssetTemplateConfig(BaseModel): source: AssetManifest - destinations: AssetTemplateConfigDestinations + destinations: Dict[str, AssetTemplateConfigDestinationsAccount] class TemplateAssembly(BaseModel): @@ -63,8 +57,12 @@ def __init__( self.region = region self.asset_path = config.source.path self.asset_packaging = config.source.packaging - self.object_key = config.destinations.current_account_current_region.object_key - self._bucket = config.destinations.current_account_current_region.bucket_name + + # NOTE: When using cdk.Environment in a Stack instance + # CDK changes its destination asset to a dynamic key using - + self.asset_env_mapping = f"{self.account_id}-{self.region}" + self.object_key = config.destinations.get(self.asset_env_mapping).object_key # type: ignore[union-attr] + self._bucket = config.destinations.get(self.asset_env_mapping).bucket_name # type: ignore[union-attr] self.bucket_name = self._resolve_bucket_name() @property diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 6a1aa0b86ce..a68c7c481c5 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -10,7 +10,18 @@ import boto3 import pytest import yaml -from aws_cdk import App, AssetStaging, BundlingOptions, CfnOutput, DockerImage, RemovalPolicy, Stack, aws_logs +from aws_cdk import ( + App, + AssetStaging, + BundlingOptions, + CfnOutput, + DockerImage, + Environment, + RemovalPolicy, + Stack, + aws_ec2, + aws_logs, +) from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient @@ -48,17 +59,22 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - self.layer_arn = layer_arn self.stack_outputs: Dict[str, str] = {} - # NOTE: Investigate why cdk.Environment in Stack - # changes synthesized asset (no object_key in asset manifest) - self.app = App(outdir=str(SOURCE_CODE_ROOT_PATH / ".cdk")) - self.stack = Stack(self.app, self.stack_name) + # NOTE: CDK stack account and region are tokens, we need to resolve earlier self.session = boto3.Session() self.cfn: CloudFormationClient = self.session.client("cloudformation") - - # NOTE: CDK stack account and region are tokens, we need to resolve earlier self.account_id = self.session.client("sts").get_caller_identity()["Account"] self.region = self.session.region_name + self.app = App(outdir=str(SOURCE_CODE_ROOT_PATH / ".cdk")) + self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) + + # NOTE: Lookup from base infra didn't work either + self.default_vpc = aws_ec2.Vpc.from_lookup( + self.stack, + "DefaultVPC", + is_default=True, + ) + def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir From c1d103f476a2de553f4fa629190e2863c277ba79 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 1 Sep 2022 09:38:13 +0200 Subject: [PATCH 02/33] chore: remove vpc as from_attrs isnt reliable --- tests/e2e/utils/infrastructure.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index a68c7c481c5..0fff88c0ad4 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -19,7 +19,6 @@ Environment, RemovalPolicy, Stack, - aws_ec2, aws_logs, ) from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing @@ -68,13 +67,6 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - self.app = App(outdir=str(SOURCE_CODE_ROOT_PATH / ".cdk")) self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) - # NOTE: Lookup from base infra didn't work either - self.default_vpc = aws_ec2.Vpc.from_lookup( - self.stack, - "DefaultVPC", - is_default=True, - ) - def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir From cfde54bb73936be75a0a2b47bd7f3b6cb2383d75 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 1 Sep 2022 11:02:31 +0200 Subject: [PATCH 03/33] chore: ignore cdk.out Signed-off-by: heitorlessa --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index cc01240a405..8947c546ec2 100644 --- a/.gitignore +++ b/.gitignore @@ -310,3 +310,5 @@ site/ !.github/workflows/lib examples/**/sam/.aws-sam + +cdk.out From ea7fe483a48cbf213c2343a18539dbe3bcb7e25b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 1 Sep 2022 19:50:19 +0200 Subject: [PATCH 04/33] chore: migrate to CDK CLI stage 1 --- tests/e2e/tracer/conftest.py | 6 +- tests/e2e/tracer/infrastructure.py | 22 +++-- tests/e2e/tracer/test_tracer.py | 3 +- tests/e2e/utils/infrastructure.py | 127 +++++++++++++++++++++-------- 4 files changed, 110 insertions(+), 48 deletions(-) diff --git a/tests/e2e/tracer/conftest.py b/tests/e2e/tracer/conftest.py index 710c75dd403..a1d8c9667c5 100644 --- a/tests/e2e/tracer/conftest.py +++ b/tests/e2e/tracer/conftest.py @@ -1,13 +1,15 @@ +import json +import os from pathlib import Path import pytest from tests.e2e.tracer.infrastructure import TracerStack +PWD = Path(__file__).parent -@pytest.fixture(autouse=True, scope="module") -# NOTE: Commented out for faster debug as we don't need a Layer yet # def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +@pytest.fixture(autouse=True, scope="module") def infrastructure(request: pytest.FixtureRequest): """Setup and teardown logic for E2E test infrastructure diff --git a/tests/e2e/tracer/infrastructure.py b/tests/e2e/tracer/infrastructure.py index d937627455c..3d64a39168b 100644 --- a/tests/e2e/tracer/infrastructure.py +++ b/tests/e2e/tracer/infrastructure.py @@ -1,11 +1,14 @@ from pathlib import Path -from aws_cdk import aws_ec2, aws_ssm +from aws_cdk import aws_ec2 from aws_cdk import aws_elasticloadbalancingv2 as elbv2 +from aws_cdk import aws_ssm from tests.e2e.utils.data_builder import build_service_name from tests.e2e.utils.infrastructure import BaseInfrastructure +PWD = Path(__file__).parent + class TracerStack(BaseInfrastructure): # Maintenance: Tracer doesn't support dynamic service injection (tracer.py L310) @@ -26,14 +29,15 @@ def create_resources(self) -> None: self.stack, "VPC", is_default=True, - region="eu-west-1" - # vpc_id="vpc-4d79432b", # NOTE: hardcode didn't work either + region=self.region, ) - # NOTE: Same issue with any other lookup. - # # string_value = aws_ssm.StringParameter.from_string_parameter_attributes( - # # self.stack, "MyValue", parameter_name="/db/proxy_arn" - # # ).string_value + aws_ssm.StringParameter(self.stack, "MyParam", string_value="blah", parameter_name="/dummy/cdk/param") + + # NOTE: Tokens work, but `lookup` doesn't due to context being populated by the CLI + latest_string_token = aws_ssm.StringParameter.value_for_string_parameter(self.stack, "/db/proxy_arn") - alb = elbv2.ApplicationLoadBalancer(self.stack, "pqp", vpc=vpc, internet_facing=True) - self.add_cfn_output(name="ALB", value=alb.load_balancer_dns_name, arn=alb.load_balancer_arn) + # alb = elbv2.ApplicationLoadBalancer(self.stack, "pqp", vpc=vpc, internet_facing=True) + # self.add_cfn_output(name="ALB", value=alb.load_balancer_dns_name, arn=alb.load_balancer_arn) + self.add_cfn_output(name="ProxyArn", value=latest_string_token) + self.add_cfn_output(name="LookupVPC", value=vpc.vpc_arn) diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 62a376e869e..a5836a336de 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -36,7 +36,8 @@ def async_fn(infrastructure: dict) -> str: # NOTE: Quick test to confirm VPC can be resolved -def test_vpc(): +# def test_vpc(layer_cdk_cli): +def test_vpc(infrastructure): pass diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 0fff88c0ad4..4e3207705ed 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -1,6 +1,9 @@ import json import logging +import os +import subprocess import sys +import textwrap from abc import ABC, abstractmethod from enum import Enum from pathlib import Path @@ -9,7 +12,6 @@ import boto3 import pytest -import yaml from aws_cdk import ( App, AssetStaging, @@ -26,10 +28,10 @@ from mypy_boto3_cloudformation import CloudFormationClient from aws_lambda_powertools import PACKAGE_PATH -from tests.e2e.utils.asset import Assets PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" SOURCE_CODE_ROOT_PATH = PACKAGE_PATH.parent +CDK_OUT_PATH = SOURCE_CODE_ROOT_PATH / "cdk.out" logger = logging.getLogger(__name__) @@ -51,12 +53,15 @@ class PythonVersion(Enum): class BaseInfrastructure(ABC): + RANDOM_STACK_VALUE: str = f"{uuid4()}" + def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") -> None: self.feature_name = feature_name - self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{feature_name}-{uuid4()}" + self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{feature_name}-{self.RANDOM_STACK_VALUE}" self.handlers_dir = handlers_dir self.layer_arn = layer_arn self.stack_outputs: Dict[str, str] = {} + self.stack_outputs_file = f"{CDK_OUT_PATH / self.feature_name}_stack_outputs.json" # tracer_stack_outputs.json # NOTE: CDK stack account and region are tokens, we need to resolve earlier self.session = boto3.Session() @@ -64,7 +69,7 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - self.account_id = self.session.client("sts").get_caller_identity()["Account"] self.region = self.session.region_name - self.app = App(outdir=str(SOURCE_CODE_ROOT_PATH / ".cdk")) + self.app = App() self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: @@ -146,17 +151,17 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict return output def deploy(self) -> Dict[str, str]: - """Creates CloudFormation Stack and return stack outputs as dict + """Synthesize and deploy a CDK app, and return its stack outputs + + NOTE: It auto-generates a temporary CDK app to benefit from CDK CLI lookup features Returns ------- Dict[str, str] CloudFormation Stack Outputs with output key and value """ - template, asset_manifest_file = self._synthesize() - assets = Assets(asset_manifest=asset_manifest_file, account_id=self.account_id, region=self.region) - assets.upload() - self.stack_outputs = self._deploy_stack(self.stack_name, template) + cdk_app_file = self._create_temp_cdk_app() + self.stack_outputs = self._deploy_stack(cdk_app_file) return self.stack_outputs def delete(self) -> None: @@ -164,6 +169,75 @@ def delete(self) -> None: logger.debug(f"Deleting stack: {self.stack_name}") self.cfn.delete_stack(StackName=self.stack_name) + def _deploy_stack(self, cdk_app_file: str) -> Dict: + """Deploys CDK App auto-generated using CDK CLI + + Parameters + ---------- + cdk_app_file : str + Path to temporary CDK App + + Returns + ------- + Dict + Stack Output values as dict + """ + stack_file = self._create_temp_cdk_app() + command = f"cdk deploy --app 'python {stack_file}' -O {self.stack_outputs_file}" + + # CDK launches a background task, so we must wait + subprocess.check_output(command, shell=True) + return self._read_stack_output() + + def _sync_stack_name(self, stack_output: Dict): + """Synchronize initial stack name with CDK's final stack name + + Parameters + ---------- + stack_output : Dict + CDK CloudFormation Outputs, where the key is the stack name + """ + self.stack_name = list(stack_output.keys())[0] + + def _read_stack_output(self): + content = Path(self.stack_outputs_file).read_text() + outputs: Dict = json.loads(content) + + self._sync_stack_name(stack_output=outputs) + return dict(outputs.values()) + + def _create_temp_cdk_app(self): + """Autogenerate a CDK App with our Stack so that CDK CLI can deploy it + + This allows us to keep our BaseInfrastructure while supporting context lookups. + """ + # tests/e2e/tracer + stack_module_path = self.handlers_dir.relative_to(SOURCE_CODE_ROOT_PATH).parent + + # tests.e2e.tracer.infrastructure + stack_infrastructure_module = str(stack_module_path / "infrastructure").replace(os.sep, ".") + + # TracerStack + stack_infrastructure_name = self.__class__.__name__ + + code = f""" + from {stack_infrastructure_module} import {stack_infrastructure_name} + stack = {stack_infrastructure_name}(handlers_dir="{self.handlers_dir}") + stack.create_resources() + stack.app.synth() + """ + + if not CDK_OUT_PATH.is_dir(): + CDK_OUT_PATH.mkdir() + + temp_file = CDK_OUT_PATH / f"{self.stack_name}_cdk_app.py" + with temp_file.open("w") as fd: + fd.write(textwrap.dedent(code)) + + # allow CDK to read/execute file for stack deployment + temp_file.chmod(0o755) + return temp_file + @abstractmethod def create_resources(self) -> None: """Create any necessary CDK resources. It'll be called before deploy @@ -190,33 +264,6 @@ def created_resources(self): """ ... - def _synthesize(self) -> Tuple[Dict, Path]: - logger.debug("Creating CDK Stack resources") - self.create_resources() - logger.debug("Synthesizing CDK Stack into raw CloudFormation template") - cloud_assembly = self.app.synth() - cf_template: Dict = cloud_assembly.get_stack_by_name(self.stack_name).template - cloud_assembly_assets_manifest_path: str = ( - cloud_assembly.get_stack_by_name(self.stack_name).dependencies[0].file # type: ignore[attr-defined] - ) - return cf_template, Path(cloud_assembly_assets_manifest_path) - - def _deploy_stack(self, stack_name: str, template: Dict) -> Dict[str, str]: - logger.debug(f"Creating CloudFormation Stack: {stack_name}") - self.cfn.create_stack( - StackName=stack_name, - TemplateBody=yaml.dump(template), - TimeoutInMinutes=10, - OnFailure="ROLLBACK", - Capabilities=["CAPABILITY_IAM"], - ) - waiter = self.cfn.get_waiter("stack_create_complete") - waiter.wait(StackName=stack_name, WaiterConfig={"Delay": 10, "MaxAttempts": 50}) - - stack_details = self.cfn.describe_stacks(StackName=stack_name) - stack_outputs = stack_details["Stacks"][0]["Outputs"] - return {output["OutputKey"]: output["OutputValue"] for output in stack_outputs if output["OutputKey"]} - def add_cfn_output(self, name: str, value: str, arn: str = ""): """Create {Name} and optionally {Name}Arn CloudFormation Outputs. @@ -319,3 +366,11 @@ def _create_layer(self) -> str: ), ) return layer.layer_version_arn + + +if __name__ == "__main__": + layer = LambdaLayerStack(handlers_dir="") + layer.create_resources() + + # Required for CDK CLI deploy + layer.app.synth() From 72b2237557979a1df005277c3fc7f20781beb280 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 2 Sep 2022 09:53:24 +0200 Subject: [PATCH 05/33] chore: migrate to CDK CLI stage 2 - Layers work --- tests/e2e/conftest.py | 5 +- tests/e2e/{utils => lambda_layer}/Dockerfile | 0 tests/e2e/lambda_layer/__init__.py | 0 tests/e2e/lambda_layer/infrastructure.py | 47 ++++++ tests/e2e/tracer/conftest.py | 11 +- tests/e2e/utils/asset.py | 145 ------------------- tests/e2e/utils/infrastructure.py | 82 ++--------- 7 files changed, 68 insertions(+), 222 deletions(-) rename tests/e2e/{utils => lambda_layer}/Dockerfile (100%) create mode 100644 tests/e2e/lambda_layer/__init__.py create mode 100644 tests/e2e/lambda_layer/infrastructure.py delete mode 100644 tests/e2e/utils/asset.py diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index ac55d373e63..2fd03f822f3 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,10 +1,11 @@ import pytest -from tests.e2e.utils.infrastructure import LambdaLayerStack, deploy_once +from tests.e2e.lambda_layer.infrastructure import LambdaLayerStack +from tests.e2e.utils.infrastructure import deploy_once @pytest.fixture(scope="session") -def lambda_layer_arn(lambda_layer_deployment): +def lambda_layer_arn(lambda_layer_deployment: dict): yield lambda_layer_deployment.get("LayerArn") diff --git a/tests/e2e/utils/Dockerfile b/tests/e2e/lambda_layer/Dockerfile similarity index 100% rename from tests/e2e/utils/Dockerfile rename to tests/e2e/lambda_layer/Dockerfile diff --git a/tests/e2e/lambda_layer/__init__.py b/tests/e2e/lambda_layer/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/lambda_layer/infrastructure.py b/tests/e2e/lambda_layer/infrastructure.py new file mode 100644 index 00000000000..106511fdac8 --- /dev/null +++ b/tests/e2e/lambda_layer/infrastructure.py @@ -0,0 +1,47 @@ +from pathlib import Path + +from aws_cdk import AssetStaging, BundlingOptions, CfnOutput, DockerImage +from aws_cdk.aws_lambda import Code, LayerVersion + +from tests.e2e.utils.infrastructure import ( + PYTHON_RUNTIME_VERSION, + SOURCE_CODE_ROOT_PATH, + BaseInfrastructure, + PythonVersion, + logger, +) + + +class LambdaLayerStack(BaseInfrastructure): + FEATURE_NAME = "lambda-layer" + + def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: + super().__init__(feature_name, handlers_dir, layer_arn) + + def create_resources(self): + layer = self._create_layer() + CfnOutput(self.stack, "LayerArn", value=layer) + + def _create_layer(self) -> str: + logger.debug("Creating Lambda Layer with latest source code available") + output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") + input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") + + build_commands = [f"pip install .[pydantic] -t {output_dir}", f"cp -R {input_dir} {output_dir}"] + layer = LayerVersion( + self.stack, + "aws-lambda-powertools-e2e-test", + layer_version_name="aws-lambda-powertools-e2e-test", + compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], + code=Code.from_asset( + path=str(SOURCE_CODE_ROOT_PATH), + bundling=BundlingOptions( + image=DockerImage.from_build( + str(Path(__file__).parent), + build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, + ), + command=["bash", "-c", " && ".join(build_commands)], + ), + ), + ) + return layer.layer_version_arn diff --git a/tests/e2e/tracer/conftest.py b/tests/e2e/tracer/conftest.py index a1d8c9667c5..27005e9fe69 100644 --- a/tests/e2e/tracer/conftest.py +++ b/tests/e2e/tracer/conftest.py @@ -1,16 +1,13 @@ -import json -import os from pathlib import Path import pytest from tests.e2e.tracer.infrastructure import TracerStack -PWD = Path(__file__).parent -# def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest): +def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): + # # def infrastructure(request: pytest.FixtureRequest): """Setup and teardown logic for E2E test infrastructure Parameters @@ -25,9 +22,7 @@ def infrastructure(request: pytest.FixtureRequest): Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - # NOTE: Commented out for faster debug as we don't need a Layer yet - # stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) - stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn="") + stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) try: yield stack.deploy() finally: diff --git a/tests/e2e/utils/asset.py b/tests/e2e/utils/asset.py deleted file mode 100644 index 2de136c3d67..00000000000 --- a/tests/e2e/utils/asset.py +++ /dev/null @@ -1,145 +0,0 @@ -import io -import json -import logging -import zipfile -from pathlib import Path -from typing import Dict, List, Optional - -import boto3 -import botocore.exceptions -from mypy_boto3_s3 import S3Client -from pydantic import BaseModel, Field - -logger = logging.getLogger(__name__) - - -class AssetManifest(BaseModel): - path: str - packaging: str - - -class AssetTemplateConfigDestinationsAccount(BaseModel): - bucket_name: str = Field(str, alias="bucketName") - object_key: str = Field(str, alias="objectKey") - assume_role_arn: str = Field(str, alias="assumeRoleArn") - - -class AssetTemplateConfig(BaseModel): - source: AssetManifest - destinations: Dict[str, AssetTemplateConfigDestinationsAccount] - - -class TemplateAssembly(BaseModel): - version: str - files: Dict[str, AssetTemplateConfig] - - -class Asset: - def __init__( - self, config: AssetTemplateConfig, account_id: str, region: str, boto3_client: Optional[S3Client] = None - ) -> None: - """CDK Asset logic to verify existence and resolve deeply nested configuration - - Parameters - ---------- - config : AssetTemplateConfig - CDK Asset configuration found in synthesized template - account_id : str - AWS Account ID - region : str - AWS Region - boto3_client : Optional["S3Client"], optional - S3 client instance for asset operations, by default None - """ - self.config = config - self.s3 = boto3_client or boto3.client("s3") - self.account_id = account_id - self.region = region - self.asset_path = config.source.path - self.asset_packaging = config.source.packaging - - # NOTE: When using cdk.Environment in a Stack instance - # CDK changes its destination asset to a dynamic key using - - self.asset_env_mapping = f"{self.account_id}-{self.region}" - self.object_key = config.destinations.get(self.asset_env_mapping).object_key # type: ignore[union-attr] - self._bucket = config.destinations.get(self.asset_env_mapping).bucket_name # type: ignore[union-attr] - self.bucket_name = self._resolve_bucket_name() - - @property - def is_zip(self): - return self.asset_packaging == "zip" - - def exists_in_s3(self, key: str) -> bool: - try: - return self.s3.head_object(Bucket=self.bucket_name, Key=key) is not None - except botocore.exceptions.ClientError: - return False - - def _resolve_bucket_name(self) -> str: - return self._bucket.replace("${AWS::AccountId}", self.account_id).replace("${AWS::Region}", self.region) - - -class Assets: - def __init__( - self, asset_manifest: Path, account_id: str, region: str, boto3_client: Optional[S3Client] = None - ) -> None: - """CDK Assets logic to find each asset, compress, and upload - - Parameters - ---------- - asset_manifest : Path - Asset manifest JSON file (self.__synthesize) - account_id : str - AWS Account ID - region : str - AWS Region - boto3_client : Optional[S3Client], optional - S3 client instance for asset operations, by default None - """ - self.asset_manifest = asset_manifest - self.account_id = account_id - self.region = region - self.s3 = boto3_client or boto3.client("s3") - self.assets = self._find_assets_from_template() - self.assets_location = str(self.asset_manifest.parent) - - def upload(self): - """Drop-in replacement for cdk-assets package s3 upload part. - https://www.npmjs.com/package/cdk-assets. - We use custom solution to avoid dependencies from nodejs ecosystem. - We follow the same design cdk-assets: - https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. - """ - logger.debug(f"Upload {len(self.assets)} assets") - for asset in self.assets: - if not asset.is_zip: - logger.debug(f"Asset '{asset.object_key}' is not zip. Skipping upload.") - continue - - if asset.exists_in_s3(key=asset.object_key): - logger.debug(f"Asset '{asset.object_key}' already exists in S3. Skipping upload.") - continue - - archive = self._compress_assets(asset) - logger.debug("Uploading archive to S3") - self.s3.upload_fileobj(Fileobj=archive, Bucket=asset.bucket_name, Key=asset.object_key) - logger.debug("Successfully uploaded") - - def _find_assets_from_template(self) -> List[Asset]: - data = json.loads(self.asset_manifest.read_text()) - template = TemplateAssembly(**data) - return [ - Asset(config=asset_config, account_id=self.account_id, region=self.region) - for asset_config in template.files.values() - ] - - def _compress_assets(self, asset: Asset) -> io.BytesIO: - buf = io.BytesIO() - asset_dir = f"{self.assets_location}/{asset.asset_path}" - asset_files = list(Path(asset_dir).rglob("*")) - with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as archive: - for asset_file in asset_files: - logger.debug(f"Adding file '{asset_file}' to the archive.") - archive.write(asset_file, arcname=asset_file.relative_to(asset_dir)) - buf.seek(0) - return buf diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 4e3207705ed..94de77e04ff 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -12,17 +12,7 @@ import boto3 import pytest -from aws_cdk import ( - App, - AssetStaging, - BundlingOptions, - CfnOutput, - DockerImage, - Environment, - RemovalPolicy, - Stack, - aws_logs, -) +from aws_cdk import App, CfnOutput, Environment, RemovalPolicy, Stack, aws_logs from aws_cdk.aws_lambda import Code, Function, LayerVersion, Runtime, Tracing from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient @@ -72,6 +62,11 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - self.app = App() self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) + # NOTE: Inspect subclass path to generate CDK App (_create_temp_cdk_app method) + self._feature_path = Path(sys.modules[self.__class__.__module__].__file__).parent + self._feature_infra_class_name = self.__class__.__name__ + self._feature_infra_module_path = self._feature_path / "infrastructure" + def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir @@ -161,8 +156,7 @@ def deploy(self) -> Dict[str, str]: CloudFormation Stack Outputs with output key and value """ cdk_app_file = self._create_temp_cdk_app() - self.stack_outputs = self._deploy_stack(cdk_app_file) - return self.stack_outputs + return self._deploy_stack(cdk_app_file) def delete(self) -> None: """Delete CloudFormation Stack""" @@ -202,27 +196,24 @@ def _sync_stack_name(self, stack_output: Dict): def _read_stack_output(self): content = Path(self.stack_outputs_file).read_text() outputs: Dict = json.loads(content) - self._sync_stack_name(stack_output=outputs) - return dict(outputs.values()) + + # discard stack_name and get outputs as dict + self.stack_outputs = list(outputs.values())[0] + return self.stack_outputs def _create_temp_cdk_app(self): """Autogenerate a CDK App with our Stack so that CDK CLI can deploy it This allows us to keep our BaseInfrastructure while supporting context lookups. """ - # tests/e2e/tracer - stack_module_path = self.handlers_dir.relative_to(SOURCE_CODE_ROOT_PATH).parent - + # NOTE: Confirm infrastructure module exists before proceeding. # tests.e2e.tracer.infrastructure - stack_infrastructure_module = str(stack_module_path / "infrastructure").replace(os.sep, ".") - - # TracerStack - stack_infrastructure_name = self.__class__.__name__ + infra_module = str(self._feature_infra_module_path.relative_to(SOURCE_CODE_ROOT_PATH)).replace(os.sep, ".") code = f""" - from {stack_infrastructure_module} import {stack_infrastructure_name} - stack = {stack_infrastructure_name}(handlers_dir="{self.handlers_dir}") + from {infra_module} import {self._feature_infra_class_name} + stack = {self._feature_infra_class_name}(handlers_dir="{self.handlers_dir}") stack.create_resources() stack.app.synth() """ @@ -331,46 +322,3 @@ def deploy_once( yield stack_outputs finally: stack.delete() - - -class LambdaLayerStack(BaseInfrastructure): - FEATURE_NAME = "lambda-layer" - - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) - - def create_resources(self): - layer = self._create_layer() - CfnOutput(self.stack, "LayerArn", value=layer) - - def _create_layer(self) -> str: - logger.debug("Creating Lambda Layer with latest source code available") - output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") - input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") - - build_commands = [f"pip install .[pydantic] -t {output_dir}", f"cp -R {input_dir} {output_dir}"] - layer = LayerVersion( - self.stack, - "aws-lambda-powertools-e2e-test", - layer_version_name="aws-lambda-powertools-e2e-test", - compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], - code=Code.from_asset( - path=str(SOURCE_CODE_ROOT_PATH), - bundling=BundlingOptions( - image=DockerImage.from_build( - str(Path(__file__).parent), - build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, - ), - command=["bash", "-c", " && ".join(build_commands)], - ), - ), - ) - return layer.layer_version_arn - - -if __name__ == "__main__": - layer = LambdaLayerStack(handlers_dir="") - layer.create_resources() - - # Required for CDK CLI deploy - layer.app.synth() From 91b3bab8ccc17b956a906bebb493b15ef2fdd471 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sat, 3 Sep 2022 21:58:52 +0200 Subject: [PATCH 06/33] chore: migrate to CDK CLI stage 3 - off Docker --- tests/e2e/conftest.py | 54 +++++------ tests/e2e/event_handler/conftest.py | 13 +-- tests/e2e/event_handler/infrastructure.py | 11 ++- tests/e2e/lambda_layer/infrastructure.py | 53 +++------- tests/e2e/logger/conftest.py | 13 +-- tests/e2e/logger/infrastructure.py | 6 +- tests/e2e/metrics/conftest.py | 13 +-- tests/e2e/metrics/infrastructure.py | 6 +- .../tracer/{conftest.py => bkp_conftest.py} | 8 +- .../tracer/{test_tracer.py => bkp_tracer.py} | 6 -- tests/e2e/tracer/infrastructure.py | 30 +----- tests/e2e/utils/constants.py | 8 ++ tests/e2e/utils/infrastructure.py | 96 +++++++++---------- tests/e2e/utils/lambda_layer.py | 35 +++++++ 14 files changed, 150 insertions(+), 202 deletions(-) rename tests/e2e/tracer/{conftest.py => bkp_conftest.py} (60%) rename tests/e2e/tracer/{test_tracer.py => bkp_tracer.py} (97%) create mode 100644 tests/e2e/utils/constants.py create mode 100644 tests/e2e/utils/lambda_layer.py diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 2fd03f822f3..418b4a742b2 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,36 +1,32 @@ -import pytest +# import pytest -from tests.e2e.lambda_layer.infrastructure import LambdaLayerStack -from tests.e2e.utils.infrastructure import deploy_once +# from tests.e2e.lambda_layer.infrastructure import build_layer +# from tests.e2e.utils.infrastructure import call_once -@pytest.fixture(scope="session") -def lambda_layer_arn(lambda_layer_deployment: dict): - yield lambda_layer_deployment.get("LayerArn") +# # @pytest.fixture(scope="session") +# # def lambda_layer_arn(lambda_layer_deployment: dict): +# # yield lambda_layer_deployment.get("LayerArn") -@pytest.fixture(scope="session") -def lambda_layer_deployment(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str): - """Setup and teardown logic for E2E test infrastructure +# @pytest.fixture(scope="session", autouse=True) +# def lambda_layer_deployment(tmp_path_factory: pytest.TempPathFactory, worker_id: str): +# """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - tmp_path_factory : pytest.TempPathFactory - pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up - worker_id : str - pytest-xdist worker identification to detect whether parallelization is enabled +# Parameters +# ---------- +# tmp_path_factory : pytest.TempPathFactory +# pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up +# worker_id : str +# pytest-xdist worker identification to detect whether parallelization is enabled - Yields - ------ - Dict[str, str] - CloudFormation Outputs from deployed infrastructure - """ - yield from deploy_once( - stack=LambdaLayerStack, - request=request, - tmp_path_factory=tmp_path_factory, - worker_id=worker_id, - layer_arn="", - ) +# Yields +# ------ +# Dict[str, str] +# CloudFormation Outputs from deployed infrastructure +# """ +# yield from call_once( +# callable=build_layer, +# tmp_path_factory=tmp_path_factory, +# worker_id=worker_id, +# ) diff --git a/tests/e2e/event_handler/conftest.py b/tests/e2e/event_handler/conftest.py index 207ec443456..43941946ac7 100644 --- a/tests/e2e/event_handler/conftest.py +++ b/tests/e2e/event_handler/conftest.py @@ -1,27 +1,18 @@ -from pathlib import Path - import pytest from tests.e2e.event_handler.infrastructure import EventHandlerStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - lambda_layer_arn : str - Lambda Layer ARN - Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = EventHandlerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = EventHandlerStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/event_handler/infrastructure.py b/tests/e2e/event_handler/infrastructure.py index 735261138f3..bddf8c4d548 100644 --- a/tests/e2e/event_handler/infrastructure.py +++ b/tests/e2e/event_handler/infrastructure.py @@ -16,8 +16,8 @@ class EventHandlerStack(BaseInfrastructure): FEATURE_NAME = "event-handlers" - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) + def __init__(self, feature_name: str = FEATURE_NAME) -> None: + super().__init__(feature_name) def create_resources(self): functions = self.create_lambda_functions() @@ -28,7 +28,12 @@ def create_resources(self): self._create_lambda_function_url(function=functions["LambdaFunctionUrlHandler"]) def _create_alb(self, function: Function): - vpc = ec2.Vpc(self.stack, "EventHandlerVPC", max_azs=2) + vpc = ec2.Vpc.from_lookup( + self.stack, + "VPC", + is_default=True, + region=self.region, + ) alb = elbv2.ApplicationLoadBalancer(self.stack, "ALB", vpc=vpc, internet_facing=True) CfnOutput(self.stack, "ALBDnsName", value=alb.load_balancer_dns_name) diff --git a/tests/e2e/lambda_layer/infrastructure.py b/tests/e2e/lambda_layer/infrastructure.py index 106511fdac8..b1ade8f4626 100644 --- a/tests/e2e/lambda_layer/infrastructure.py +++ b/tests/e2e/lambda_layer/infrastructure.py @@ -1,47 +1,18 @@ -from pathlib import Path +import logging +import subprocess -from aws_cdk import AssetStaging, BundlingOptions, CfnOutput, DockerImage -from aws_cdk.aws_lambda import Code, LayerVersion +from tests.e2e.utils.infrastructure import CDK_OUT_PATH, SOURCE_CODE_ROOT_PATH -from tests.e2e.utils.infrastructure import ( - PYTHON_RUNTIME_VERSION, - SOURCE_CODE_ROOT_PATH, - BaseInfrastructure, - PythonVersion, - logger, -) +logger = logging.getLogger(__name__) -class LambdaLayerStack(BaseInfrastructure): - FEATURE_NAME = "lambda-layer" +def build_layer(feature_name: str = "") -> str: + LAYER_BUILD_PATH = CDK_OUT_PATH / f"layer_build_{feature_name}" - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) + # TODO: Check if source code hasn't changed (dirsum) + package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" + build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" + build_command = f"pip install {package} {build_args} --target {LAYER_BUILD_PATH}/python" + subprocess.run(build_command, shell=True) - def create_resources(self): - layer = self._create_layer() - CfnOutput(self.stack, "LayerArn", value=layer) - - def _create_layer(self) -> str: - logger.debug("Creating Lambda Layer with latest source code available") - output_dir = Path(str(AssetStaging.BUNDLING_OUTPUT_DIR), "python") - input_dir = Path(str(AssetStaging.BUNDLING_INPUT_DIR), "aws_lambda_powertools") - - build_commands = [f"pip install .[pydantic] -t {output_dir}", f"cp -R {input_dir} {output_dir}"] - layer = LayerVersion( - self.stack, - "aws-lambda-powertools-e2e-test", - layer_version_name="aws-lambda-powertools-e2e-test", - compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], - code=Code.from_asset( - path=str(SOURCE_CODE_ROOT_PATH), - bundling=BundlingOptions( - image=DockerImage.from_build( - str(Path(__file__).parent), - build_args={"IMAGE": PythonVersion[PYTHON_RUNTIME_VERSION].value["image"]}, - ), - command=["bash", "-c", " && ".join(build_commands)], - ), - ), - ) - return layer.layer_version_arn + return str(LAYER_BUILD_PATH) diff --git a/tests/e2e/logger/conftest.py b/tests/e2e/logger/conftest.py index 82a89314258..c02c63515d4 100644 --- a/tests/e2e/logger/conftest.py +++ b/tests/e2e/logger/conftest.py @@ -1,27 +1,18 @@ -from pathlib import Path - import pytest from tests.e2e.logger.infrastructure import LoggerStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - lambda_layer_arn : str - Lambda Layer ARN - Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = LoggerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = LoggerStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/logger/infrastructure.py b/tests/e2e/logger/infrastructure.py index 68aaa8eb38a..fa8d20af534 100644 --- a/tests/e2e/logger/infrastructure.py +++ b/tests/e2e/logger/infrastructure.py @@ -1,13 +1,11 @@ -from pathlib import Path - from tests.e2e.utils.infrastructure import BaseInfrastructure class LoggerStack(BaseInfrastructure): FEATURE_NAME = "logger" - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) + def __init__(self, feature_name: str = FEATURE_NAME) -> None: + super().__init__(feature_name) def create_resources(self): self.create_lambda_functions() diff --git a/tests/e2e/metrics/conftest.py b/tests/e2e/metrics/conftest.py index 663c8845be4..bb15a12f40e 100644 --- a/tests/e2e/metrics/conftest.py +++ b/tests/e2e/metrics/conftest.py @@ -1,27 +1,18 @@ -from pathlib import Path - import pytest from tests.e2e.metrics.infrastructure import MetricsStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed - lambda_layer_arn : str - Lambda Layer ARN - Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = MetricsStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = MetricsStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/metrics/infrastructure.py b/tests/e2e/metrics/infrastructure.py index 9afa59bb5cd..76aab7fd7c3 100644 --- a/tests/e2e/metrics/infrastructure.py +++ b/tests/e2e/metrics/infrastructure.py @@ -1,13 +1,11 @@ -from pathlib import Path - from tests.e2e.utils.infrastructure import BaseInfrastructure class MetricsStack(BaseInfrastructure): FEATURE_NAME = "metrics" - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) + def __init__(self, feature_name: str = FEATURE_NAME) -> None: + super().__init__(feature_name) def create_resources(self): self.create_lambda_functions() diff --git a/tests/e2e/tracer/conftest.py b/tests/e2e/tracer/bkp_conftest.py similarity index 60% rename from tests/e2e/tracer/conftest.py rename to tests/e2e/tracer/bkp_conftest.py index 27005e9fe69..593ca614f54 100644 --- a/tests/e2e/tracer/conftest.py +++ b/tests/e2e/tracer/bkp_conftest.py @@ -1,19 +1,15 @@ -from pathlib import Path - import pytest from tests.e2e.tracer.infrastructure import TracerStack @pytest.fixture(autouse=True, scope="module") -def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): +def infrastructure(lambda_layer_arn: str): # # def infrastructure(request: pytest.FixtureRequest): """Setup and teardown logic for E2E test infrastructure Parameters ---------- - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed lambda_layer_arn : str Lambda Layer ARN @@ -22,7 +18,7 @@ def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = TracerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + stack = TracerStack(layer_arn=lambda_layer_arn) try: yield stack.deploy() finally: diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/bkp_tracer.py similarity index 97% rename from tests/e2e/tracer/test_tracer.py rename to tests/e2e/tracer/bkp_tracer.py index a5836a336de..de25bc02ebf 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/bkp_tracer.py @@ -35,12 +35,6 @@ def async_fn(infrastructure: dict) -> str: return infrastructure.get("AsyncCapture", "") -# NOTE: Quick test to confirm VPC can be resolved -# def test_vpc(layer_cdk_cli): -def test_vpc(infrastructure): - pass - - def test_lambda_handler_trace_is_visible(basic_handler_fn_arn: str, basic_handler_fn: str): # GIVEN handler_name = basic_handler.lambda_handler.__name__ diff --git a/tests/e2e/tracer/infrastructure.py b/tests/e2e/tracer/infrastructure.py index 3d64a39168b..7fa6b3fe970 100644 --- a/tests/e2e/tracer/infrastructure.py +++ b/tests/e2e/tracer/infrastructure.py @@ -1,9 +1,5 @@ from pathlib import Path -from aws_cdk import aws_ec2 -from aws_cdk import aws_elasticloadbalancingv2 as elbv2 -from aws_cdk import aws_ssm - from tests.e2e.utils.data_builder import build_service_name from tests.e2e.utils.infrastructure import BaseInfrastructure @@ -16,28 +12,10 @@ class TracerStack(BaseInfrastructure): SERVICE_NAME: str = build_service_name() FEATURE_NAME = "tracer" - def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: - super().__init__(feature_name, handlers_dir, layer_arn) + def __init__(self, feature_name: str = FEATURE_NAME) -> None: + super().__init__(feature_name) def create_resources(self) -> None: # NOTE: Commented out Lambda fns as we don't need them now - # env_vars = {"POWERTOOLS_SERVICE_NAME": self.SERVICE_NAME} - # self.create_lambda_functions(function_props={"environment": env_vars}) - - # NOTE: Test VPC can be looked up - vpc = aws_ec2.Vpc.from_lookup( - self.stack, - "VPC", - is_default=True, - region=self.region, - ) - - aws_ssm.StringParameter(self.stack, "MyParam", string_value="blah", parameter_name="/dummy/cdk/param") - - # NOTE: Tokens work, but `lookup` doesn't due to context being populated by the CLI - latest_string_token = aws_ssm.StringParameter.value_for_string_parameter(self.stack, "/db/proxy_arn") - - # alb = elbv2.ApplicationLoadBalancer(self.stack, "pqp", vpc=vpc, internet_facing=True) - # self.add_cfn_output(name="ALB", value=alb.load_balancer_dns_name, arn=alb.load_balancer_arn) - self.add_cfn_output(name="ProxyArn", value=latest_string_token) - self.add_cfn_output(name="LookupVPC", value=vpc.vpc_arn) + env_vars = {"POWERTOOLS_SERVICE_NAME": self.SERVICE_NAME} + self.create_lambda_functions(function_props={"environment": env_vars}) diff --git a/tests/e2e/utils/constants.py b/tests/e2e/utils/constants.py new file mode 100644 index 00000000000..445c9f00113 --- /dev/null +++ b/tests/e2e/utils/constants.py @@ -0,0 +1,8 @@ +import sys + +from aws_lambda_powertools import PACKAGE_PATH + +PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" +SOURCE_CODE_ROOT_PATH = PACKAGE_PATH.parent +CDK_OUT_PATH = SOURCE_CODE_ROOT_PATH / "cdk.out" +LAYER_BUILD_PATH = CDK_OUT_PATH / "layer_build" diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 94de77e04ff..b704b3a5f95 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -5,9 +5,8 @@ import sys import textwrap from abc import ABC, abstractmethod -from enum import Enum from pathlib import Path -from typing import Dict, Generator, Optional, Tuple, Type +from typing import Callable, Dict, Generator, Optional, Tuple from uuid import uuid4 import boto3 @@ -17,11 +16,8 @@ from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient -from aws_lambda_powertools import PACKAGE_PATH - -PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" -SOURCE_CODE_ROOT_PATH = PACKAGE_PATH.parent -CDK_OUT_PATH = SOURCE_CODE_ROOT_PATH / "cdk.out" +from tests.e2e.lambda_layer.infrastructure import build_layer +from tests.e2e.utils.constants import CDK_OUT_PATH, PYTHON_RUNTIME_VERSION, SOURCE_CODE_ROOT_PATH logger = logging.getLogger(__name__) @@ -36,20 +32,12 @@ def __call__(self) -> Tuple[dict, str]: ... -class PythonVersion(Enum): - V37 = {"runtime": Runtime.PYTHON_3_7, "image": Runtime.PYTHON_3_7.bundling_image.image} - V38 = {"runtime": Runtime.PYTHON_3_8, "image": Runtime.PYTHON_3_8.bundling_image.image} - V39 = {"runtime": Runtime.PYTHON_3_9, "image": Runtime.PYTHON_3_9.bundling_image.image} - - class BaseInfrastructure(ABC): RANDOM_STACK_VALUE: str = f"{uuid4()}" - def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") -> None: + def __init__(self, feature_name: str) -> None: self.feature_name = feature_name self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{feature_name}-{self.RANDOM_STACK_VALUE}" - self.handlers_dir = handlers_dir - self.layer_arn = layer_arn self.stack_outputs: Dict[str, str] = {} self.stack_outputs_file = f"{CDK_OUT_PATH / self.feature_name}_stack_outputs.json" # tracer_stack_outputs.json @@ -66,6 +54,7 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - self._feature_path = Path(sys.modules[self.__class__.__module__].__file__).parent self._feature_infra_class_name = self.__class__.__name__ self._feature_infra_module_path = self._feature_path / "infrastructure" + self._handlers_dir = self._feature_path / "handlers" def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir @@ -101,16 +90,26 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict self.create_lambda_functions(function_props={"runtime": Runtime.PYTHON_3_7) ``` """ - handlers = list(self.handlers_dir.rglob("*.py")) - source = Code.from_asset(f"{self.handlers_dir}") + if not self._handlers_dir.exists(): + raise RuntimeError(f"Handlers dir '{self._handlers_dir}' must exist for functions to be created.") + + layer = LayerVersion( + self.stack, + "aws-lambda-powertools-e2e-test", + layer_version_name="aws-lambda-powertools-e2e-test", + compatible_runtimes=[ + Runtime.PYTHON_3_7, + Runtime.PYTHON_3_8, + Runtime.PYTHON_3_9, + ], + code=Code.from_asset(path=build_layer(self.feature_name)), + # code=Code.from_asset(path=f"{LAYER_BUILD_PATH}"), + ) + + handlers = list(self._handlers_dir.rglob("*.py")) + source = Code.from_asset(f"{self._handlers_dir}") logger.debug(f"Creating functions for handlers: {handlers}") - if not self.layer_arn: - raise ValueError( - """Lambda Layer ARN cannot be empty when creating Lambda functions. - Make sure to inject `lambda_layer_arn` fixture and pass at the constructor level""" - ) - layer = LayerVersion.from_layer_version_arn(self.stack, "layer-arn", layer_version_arn=self.layer_arn) function_settings_override = function_props or {} output: Dict[str, Function] = {} @@ -177,7 +176,7 @@ def _deploy_stack(self, cdk_app_file: str) -> Dict: Stack Output values as dict """ stack_file = self._create_temp_cdk_app() - command = f"cdk deploy --app 'python {stack_file}' -O {self.stack_outputs_file}" + command = f"npx cdk deploy --app 'python {stack_file}' -O {self.stack_outputs_file} --require-approval=never" # CDK launches a background task, so we must wait subprocess.check_output(command, shell=True) @@ -213,7 +212,7 @@ def _create_temp_cdk_app(self): code = f""" from {infra_module} import {self._feature_infra_class_name} - stack = {self._feature_infra_class_name}(handlers_dir="{self.handlers_dir}") + stack = {self._feature_infra_class_name}() stack.create_resources() stack.app.synth() """ @@ -272,53 +271,50 @@ def add_cfn_output(self, name: str, value: str, arn: str = ""): CfnOutput(self.stack, f"{name}Arn", value=arn) -def deploy_once( - stack: Type[BaseInfrastructure], - request: pytest.FixtureRequest, +def call_once( + callable: Callable, tmp_path_factory: pytest.TempPathFactory, worker_id: str, - layer_arn: str, -) -> Generator[Dict[str, str], None, None]: - """Deploys provided stack once whether CPU parallelization is enabled or not + callback: Optional[Callable] = None, +) -> Generator[object, None, None]: + """Call function and serialize results once whether CPU parallelization is enabled or not Parameters ---------- - stack : Type[BaseInfrastructure] - stack class to instantiate and deploy, for example MetricStack. - Not to be confused with class instance (MetricStack()). - request : pytest.FixtureRequest - pytest request fixture to introspect absolute path to test being executed + callable : Callable + Function to call once and JSON serialize result whether parallel test is enabled or not. tmp_path_factory : pytest.TempPathFactory pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up worker_id : str pytest-xdist worker identification to detect whether parallelization is enabled + callback : Callable + Function to call when job is complete. Yields ------ - Generator[Dict[str, str], None, None] - stack CloudFormation outputs + Generator[object, None, None] + Callable output when called """ - handlers_dir = f"{request.node.path.parent}/handlers" - stack = stack(handlers_dir=Path(handlers_dir), layer_arn=layer_arn) try: if worker_id == "master": - # no parallelization, deploy stack and let fixture be cached - yield stack.deploy() + # no parallelization, call and return + yield callable() else: # tmp dir shared by all workers root_tmp_dir = tmp_path_factory.getbasetemp().parent cache = root_tmp_dir / f"{PYTHON_RUNTIME_VERSION}_cache.json" with FileLock(f"{cache}.lock"): - # If cache exists, return stack outputs back + # If cache exists, return callable outputs back # otherwise it's the first run by the main worker - # deploy and return stack outputs so subsequent workers can reuse + # run and return callable outputs for subsequent workers reuse if cache.is_file(): - stack_outputs = json.loads(cache.read_text()) + callable_result = json.loads(cache.read_text()) else: - stack_outputs: Dict = stack.deploy() - cache.write_text(json.dumps(stack_outputs)) - yield stack_outputs + callable_result: Dict = callable() + cache.write_text(json.dumps(callable_result)) + yield callable_result finally: - stack.delete() + if callback is not None: + callback() diff --git a/tests/e2e/utils/lambda_layer.py b/tests/e2e/utils/lambda_layer.py new file mode 100644 index 00000000000..6f6ea4e362d --- /dev/null +++ b/tests/e2e/utils/lambda_layer.py @@ -0,0 +1,35 @@ +import subprocess +from pathlib import Path + +from tests.e2e.utils.constants import LAYER_BUILD_PATH, SOURCE_CODE_ROOT_PATH + + +# TODO: Move to BaseInfra to ensure outdir includes feature_name +def build_layer() -> str: + # TODO: Check if source code hasn't changed (dirsum) + package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" + build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" + build_command = f"pip install {package} {build_args} --target {LAYER_BUILD_PATH}/python" + subprocess.run(build_command, shell=True) + + return str(LAYER_BUILD_PATH) + + +# NOTE: For later +class LambdaLayer: + def __init__(self, output_dir: Path, package_name: str, build_command: str): + self.output_dir = output_dir + self.package_name = package_name + self.build_command = build_command + + def build(self): + if not self.output_dir.exists(): + self.output_dir.mkdir() + + subprocess.run(self.build_command, shell=True) + + def before(self): + ... + + def after(self): + ... From c0ed5e5591d4d9ba1aef4b37cdc7ec8c241b41d5 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 4 Sep 2022 08:54:07 +0200 Subject: [PATCH 07/33] chore: migrate to CDK CLI stage 4 - Parallel deployment works --- parallel_run_e2e.py | 2 +- tests/e2e/logger/conftest.py | 2 +- tests/e2e/metrics/conftest.py | 2 +- tests/e2e/utils/infrastructure.py | 26 +++++++++++++++++++++----- tests/e2e/utils/lambda_layer.py | 14 -------------- 5 files changed, 24 insertions(+), 22 deletions(-) diff --git a/parallel_run_e2e.py b/parallel_run_e2e.py index b9603701e5e..4cba3a11c88 100755 --- a/parallel_run_e2e.py +++ b/parallel_run_e2e.py @@ -5,7 +5,7 @@ def main(): features = Path("tests/e2e").rglob("infrastructure.py") - workers = len(list(features)) - 1 + workers = len(list(features)) - 2 # NOTE: Return to 1 once Lambda Layer infra is removed command = f"poetry run pytest -n {workers} --dist loadfile -o log_cli=true tests/e2e" print(f"Running E2E tests with: {command}") diff --git a/tests/e2e/logger/conftest.py b/tests/e2e/logger/conftest.py index c02c63515d4..a31be77031b 100644 --- a/tests/e2e/logger/conftest.py +++ b/tests/e2e/logger/conftest.py @@ -4,7 +4,7 @@ @pytest.fixture(autouse=True, scope="module") -def infrastructure(): +def infrastructure(tmp_path_factory, worker_id): """Setup and teardown logic for E2E test infrastructure Yields diff --git a/tests/e2e/metrics/conftest.py b/tests/e2e/metrics/conftest.py index bb15a12f40e..2f72e7950be 100644 --- a/tests/e2e/metrics/conftest.py +++ b/tests/e2e/metrics/conftest.py @@ -4,7 +4,7 @@ @pytest.fixture(autouse=True, scope="module") -def infrastructure(): +def infrastructure(tmp_path_factory, worker_id): """Setup and teardown logic for E2E test infrastructure Yields diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index b704b3a5f95..af8ea7d5f8f 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -16,7 +16,7 @@ from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient -from tests.e2e.lambda_layer.infrastructure import build_layer +# from tests.e2e.lambda_layer.infrastructure import build_layer from tests.e2e.utils.constants import CDK_OUT_PATH, PYTHON_RUNTIME_VERSION, SOURCE_CODE_ROOT_PATH logger = logging.getLogger(__name__) @@ -32,6 +32,18 @@ def __call__(self) -> Tuple[dict, str]: ... +def build_layer(out_dir: Path, feature_name: str = "") -> str: + LAYER_BUILD_PATH = out_dir / f"layer_build_{feature_name}" + + # TODO: Check if source code hasn't changed (dirsum) + package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" + build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" + build_command = f"pip install {package} {build_args} --target {LAYER_BUILD_PATH}/python" + subprocess.run(build_command, shell=True) + + return str(LAYER_BUILD_PATH) + + class BaseInfrastructure(ABC): RANDOM_STACK_VALUE: str = f"{uuid4()}" @@ -55,6 +67,7 @@ def __init__(self, feature_name: str) -> None: self._feature_infra_class_name = self.__class__.__name__ self._feature_infra_module_path = self._feature_path / "infrastructure" self._handlers_dir = self._feature_path / "handlers" + self._cdk_out_dir = CDK_OUT_PATH / self.feature_name def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir @@ -102,8 +115,7 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict Runtime.PYTHON_3_8, Runtime.PYTHON_3_9, ], - code=Code.from_asset(path=build_layer(self.feature_name)), - # code=Code.from_asset(path=f"{LAYER_BUILD_PATH}"), + code=Code.from_asset(path=build_layer(out_dir=self._cdk_out_dir, feature_name=self.feature_name)), ) handlers = list(self._handlers_dir.rglob("*.py")) @@ -176,10 +188,14 @@ def _deploy_stack(self, cdk_app_file: str) -> Dict: Stack Output values as dict """ stack_file = self._create_temp_cdk_app() - command = f"npx cdk deploy --app 'python {stack_file}' -O {self.stack_outputs_file} --require-approval=never" + synth_command = f"npx cdk synth --app 'python {stack_file}' -o {self._cdk_out_dir}" + deploy_command = ( + f"npx cdk deploy --app '{self._cdk_out_dir}' -O {self.stack_outputs_file} --require-approval=never" + ) # CDK launches a background task, so we must wait - subprocess.check_output(command, shell=True) + subprocess.check_output(synth_command, shell=True) + subprocess.check_output(deploy_command, shell=True) return self._read_stack_output() def _sync_stack_name(self, stack_output: Dict): diff --git a/tests/e2e/utils/lambda_layer.py b/tests/e2e/utils/lambda_layer.py index 6f6ea4e362d..2a46387b4c1 100644 --- a/tests/e2e/utils/lambda_layer.py +++ b/tests/e2e/utils/lambda_layer.py @@ -1,20 +1,6 @@ import subprocess from pathlib import Path -from tests.e2e.utils.constants import LAYER_BUILD_PATH, SOURCE_CODE_ROOT_PATH - - -# TODO: Move to BaseInfra to ensure outdir includes feature_name -def build_layer() -> str: - # TODO: Check if source code hasn't changed (dirsum) - package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" - build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" - build_command = f"pip install {package} {build_args} --target {LAYER_BUILD_PATH}/python" - subprocess.run(build_command, shell=True) - - return str(LAYER_BUILD_PATH) - - # NOTE: For later class LambdaLayer: def __init__(self, output_dir: Path, package_name: str, build_command: str): From 8aef75f565cec9b6031480f2bb531f6db9825616 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 4 Sep 2022 19:03:08 +0200 Subject: [PATCH 08/33] chore: migrate to CDK CLI stage 5 - Abstract Local Powertools Layer --- parallel_run_e2e.py | 3 +-- tests/e2e/lambda_layer/Dockerfile | 14 ---------- tests/e2e/lambda_layer/infrastructure.py | 18 ------------- tests/e2e/utils/infrastructure.py | 27 +++++++------------ tests/e2e/utils/lambda_layer.py | 21 --------------- .../e2e/{ => utils}/lambda_layer/__init__.py | 0 tests/e2e/utils/lambda_layer/base.py | 21 +++++++++++++++ .../utils/lambda_layer/powertools_layer.py | 25 +++++++++++++++++ 8 files changed, 56 insertions(+), 73 deletions(-) delete mode 100644 tests/e2e/lambda_layer/Dockerfile delete mode 100644 tests/e2e/lambda_layer/infrastructure.py delete mode 100644 tests/e2e/utils/lambda_layer.py rename tests/e2e/{ => utils}/lambda_layer/__init__.py (100%) create mode 100644 tests/e2e/utils/lambda_layer/base.py create mode 100644 tests/e2e/utils/lambda_layer/powertools_layer.py diff --git a/parallel_run_e2e.py b/parallel_run_e2e.py index 4cba3a11c88..745f1392f67 100755 --- a/parallel_run_e2e.py +++ b/parallel_run_e2e.py @@ -5,10 +5,9 @@ def main(): features = Path("tests/e2e").rglob("infrastructure.py") - workers = len(list(features)) - 2 # NOTE: Return to 1 once Lambda Layer infra is removed + workers = len(list(features)) - 1 command = f"poetry run pytest -n {workers} --dist loadfile -o log_cli=true tests/e2e" - print(f"Running E2E tests with: {command}") subprocess.run(command.split(), shell=False) diff --git a/tests/e2e/lambda_layer/Dockerfile b/tests/e2e/lambda_layer/Dockerfile deleted file mode 100644 index 586847bb3fa..00000000000 --- a/tests/e2e/lambda_layer/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -# Image used by CDK's LayerVersion construct to create Lambda Layer with Powertools -# library code. -# The correct AWS SAM build image based on the runtime of the function will be -# passed as build arg. The default allows to do `docker build .` when testing. -ARG IMAGE=public.ecr.aws/sam/build-python3.7 -FROM $IMAGE - -ARG PIP_INDEX_URL -ARG PIP_EXTRA_INDEX_URL -ARG HTTPS_PROXY - -RUN pip install --upgrade pip - -CMD [ "python" ] diff --git a/tests/e2e/lambda_layer/infrastructure.py b/tests/e2e/lambda_layer/infrastructure.py deleted file mode 100644 index b1ade8f4626..00000000000 --- a/tests/e2e/lambda_layer/infrastructure.py +++ /dev/null @@ -1,18 +0,0 @@ -import logging -import subprocess - -from tests.e2e.utils.infrastructure import CDK_OUT_PATH, SOURCE_CODE_ROOT_PATH - -logger = logging.getLogger(__name__) - - -def build_layer(feature_name: str = "") -> str: - LAYER_BUILD_PATH = CDK_OUT_PATH / f"layer_build_{feature_name}" - - # TODO: Check if source code hasn't changed (dirsum) - package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" - build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" - build_command = f"pip install {package} {build_args} --target {LAYER_BUILD_PATH}/python" - subprocess.run(build_command, shell=True) - - return str(LAYER_BUILD_PATH) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index af8ea7d5f8f..736da6ab15c 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -16,8 +16,8 @@ from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient -# from tests.e2e.lambda_layer.infrastructure import build_layer from tests.e2e.utils.constants import CDK_OUT_PATH, PYTHON_RUNTIME_VERSION, SOURCE_CODE_ROOT_PATH +from tests.e2e.utils.lambda_layer.powertools_layer import LocalLambdaPowertoolsLayer logger = logging.getLogger(__name__) @@ -32,18 +32,6 @@ def __call__(self) -> Tuple[dict, str]: ... -def build_layer(out_dir: Path, feature_name: str = "") -> str: - LAYER_BUILD_PATH = out_dir / f"layer_build_{feature_name}" - - # TODO: Check if source code hasn't changed (dirsum) - package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" - build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" - build_command = f"pip install {package} {build_args} --target {LAYER_BUILD_PATH}/python" - subprocess.run(build_command, shell=True) - - return str(LAYER_BUILD_PATH) - - class BaseInfrastructure(ABC): RANDOM_STACK_VALUE: str = f"{uuid4()}" @@ -67,6 +55,7 @@ def __init__(self, feature_name: str) -> None: self._feature_infra_class_name = self.__class__.__name__ self._feature_infra_module_path = self._feature_path / "infrastructure" self._handlers_dir = self._feature_path / "handlers" + # TODO: Change to cdk_feature_dir self._cdk_out_dir = CDK_OUT_PATH / self.feature_name def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: @@ -106,6 +95,8 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict if not self._handlers_dir.exists(): raise RuntimeError(f"Handlers dir '{self._handlers_dir}' must exist for functions to be created.") + layer_build = LocalLambdaPowertoolsLayer(output_dir=self._cdk_out_dir).build() + layer = LayerVersion( self.stack, "aws-lambda-powertools-e2e-test", @@ -115,7 +106,7 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict Runtime.PYTHON_3_8, Runtime.PYTHON_3_9, ], - code=Code.from_asset(path=build_layer(out_dir=self._cdk_out_dir, feature_name=self.feature_name)), + code=Code.from_asset(path=layer_build), ) handlers = list(self._handlers_dir.rglob("*.py")) @@ -222,7 +213,7 @@ def _create_temp_cdk_app(self): This allows us to keep our BaseInfrastructure while supporting context lookups. """ - # NOTE: Confirm infrastructure module exists before proceeding. + # TODO: Confirm infrastructure module exists before proceeding. # tests.e2e.tracer.infrastructure infra_module = str(self._feature_infra_module_path.relative_to(SOURCE_CODE_ROOT_PATH)).replace(os.sep, ".") @@ -233,10 +224,10 @@ def _create_temp_cdk_app(self): stack.app.synth() """ - if not CDK_OUT_PATH.is_dir(): - CDK_OUT_PATH.mkdir() + if not self._cdk_out_dir.is_dir(): + self._cdk_out_dir.mkdir(parents=True, exist_ok=True) - temp_file = CDK_OUT_PATH / f"{self.stack_name}_cdk_app.py" + temp_file = self._cdk_out_dir / f"{self.stack_name}_cdk_app.py" with temp_file.open("w") as fd: fd.write(textwrap.dedent(code)) diff --git a/tests/e2e/utils/lambda_layer.py b/tests/e2e/utils/lambda_layer.py deleted file mode 100644 index 2a46387b4c1..00000000000 --- a/tests/e2e/utils/lambda_layer.py +++ /dev/null @@ -1,21 +0,0 @@ -import subprocess -from pathlib import Path - -# NOTE: For later -class LambdaLayer: - def __init__(self, output_dir: Path, package_name: str, build_command: str): - self.output_dir = output_dir - self.package_name = package_name - self.build_command = build_command - - def build(self): - if not self.output_dir.exists(): - self.output_dir.mkdir() - - subprocess.run(self.build_command, shell=True) - - def before(self): - ... - - def after(self): - ... diff --git a/tests/e2e/lambda_layer/__init__.py b/tests/e2e/utils/lambda_layer/__init__.py similarity index 100% rename from tests/e2e/lambda_layer/__init__.py rename to tests/e2e/utils/lambda_layer/__init__.py diff --git a/tests/e2e/utils/lambda_layer/base.py b/tests/e2e/utils/lambda_layer/base.py new file mode 100644 index 00000000000..bb8dbab2be4 --- /dev/null +++ b/tests/e2e/utils/lambda_layer/base.py @@ -0,0 +1,21 @@ +from pathlib import Path + +from abc import ABC, abstractmethod + + +class BaseLocalLambdaLayer(ABC): + def __init__(self, output_dir: Path): + self.output_dir = output_dir / "layer_build" + self.target_dir = f"{self.output_dir}/python" + + @abstractmethod + def build(self) -> str: + raise NotImplementedError() + + def before_build(self): + if not self.output_dir.exists(): + # Create missing parent directories if missing + self.output_dir.mkdir(parents=True, exist_ok=True) + + def after_build(self): + ... diff --git a/tests/e2e/utils/lambda_layer/powertools_layer.py b/tests/e2e/utils/lambda_layer/powertools_layer.py new file mode 100644 index 00000000000..0f57ebaf173 --- /dev/null +++ b/tests/e2e/utils/lambda_layer/powertools_layer.py @@ -0,0 +1,25 @@ +import logging +import subprocess +from pathlib import Path + +from tests.e2e.utils.constants import SOURCE_CODE_ROOT_PATH +from tests.e2e.utils.lambda_layer.base import BaseLocalLambdaLayer + +logger = logging.getLogger(__name__) + + +class LocalLambdaPowertoolsLayer(BaseLocalLambdaLayer): + def __init__(self, output_dir: Path): + super().__init__(output_dir) + self.package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" + self.build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" + self.build_command = f"python -m pip install {self.package} {self.build_args} --target {self.target_dir}" + + def build(self) -> str: + self.before_build() + + subprocess.run(self.build_command, shell=True) + + self.after_build() + + return str(self.output_dir) From 055e2f5092e539967e23e36aa8d17b2d0b5ba04b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 4 Sep 2022 20:20:23 +0200 Subject: [PATCH 09/33] chore: migrate to CDK CLI stage 6 - Build Layer if source has changed --- poetry.lock | 416 ++++-------------- pyproject.toml | 1 + tests/e2e/conftest.py | 49 +-- tests/e2e/utils/infrastructure.py | 17 +- tests/e2e/utils/lambda_layer/base.py | 15 +- .../utils/lambda_layer/powertools_layer.py | 31 +- 6 files changed, 155 insertions(+), 374 deletions(-) diff --git a/poetry.lock b/poetry.lock index eabd0ca4a92..b271f4741c5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,14 +7,14 @@ optional = false python-versions = ">=3.5" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "aws-cdk-lib" -version = "2.41.0" +version = "2.42.1" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false @@ -28,14 +28,14 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk.aws-apigatewayv2-alpha" -version = "2.41.0a0" +version = "2.42.1a0" description = "The CDK Construct Library for AWS::APIGatewayv2" category = "dev" optional = false python-versions = "~=3.7" [package.dependencies] -aws-cdk-lib = ">=2.41.0,<3.0.0" +aws-cdk-lib = ">=2.42.1,<3.0.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.67.0,<2.0.0" publication = ">=0.0.3" @@ -43,15 +43,15 @@ typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-cdk.aws-apigatewayv2-integrations-alpha" -version = "2.41.0a0" +version = "2.42.1a0" description = "Integrations for AWS APIGateway V2" category = "dev" optional = false python-versions = "~=3.7" [package.dependencies] -aws-cdk-lib = ">=2.41.0,<3.0.0" -"aws-cdk.aws-apigatewayv2-alpha" = "2.41.0.a0" +aws-cdk-lib = ">=2.42.1,<3.0.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.42.1.a0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.67.0,<2.0.0" publication = ">=0.0.3" @@ -84,7 +84,7 @@ PyYAML = ">=5.3.1" stevedore = ">=1.20.0" [package.extras] -test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] toml = ["toml"] yaml = ["pyyaml"] @@ -113,14 +113,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.24.72" +version = "1.24.76" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.27.72,<1.28.0" +botocore = ">=1.27.76,<1.28.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.6.0,<0.7.0" @@ -129,7 +129,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.27.72" +version = "1.27.76" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -143,41 +143,6 @@ urllib3 = ">=1.25.4,<1.27" [package.extras] crt = ["awscrt (==0.14.0)"] -[[package]] -name = "cairocffi" -version = "1.3.0" -description = "cffi-based cairo bindings for Python" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -cffi = ">=1.1.0" - -[package.extras] -xcb = ["xcffib (>=0.3.2)"] -test = ["pytest-isort", "pytest-flake8", "pytest-cov", "pytest-runner"] -doc = ["sphinx-rtd-theme", "sphinx"] - -[[package]] -name = "cairosvg" -version = "2.5.2" -description = "A Simple SVG Converter based on Cairo" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -cairocffi = "*" -cssselect2 = "*" -defusedxml = "*" -pillow = "*" -tinycss2 = "*" - -[package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest-runner", "pytest-cov", "pytest-flake8", "pytest-isort"] - [[package]] name = "cattrs" version = "22.1.0" @@ -193,23 +158,12 @@ typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and pyt [[package]] name = "certifi" -version = "2022.6.15.2" +version = "2022.9.14" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = ">=3.6" -[[package]] -name = "cffi" -version = "1.15.1" -description = "Foreign Function Interface for Python calling C code." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -pycparser = "*" - [[package]] name = "charset-normalizer" version = "2.1.1" @@ -221,6 +175,14 @@ python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] +[[package]] +name = "checksumdir" +version = "1.2.0" +description = "Compute a single hash of the file contents of a directory." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + [[package]] name = "click" version = "8.1.3" @@ -243,7 +205,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "constructs" -version = "10.1.102" +version = "10.1.107" description = "A programming model for software-defined state" category = "dev" optional = false @@ -268,22 +230,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] -[[package]] -name = "cssselect2" -version = "0.6.0" -description = "CSS selectors for Python ElementTree" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -tinycss2 = "*" -webencodings = "*" - -[package.extras] -test = ["coverage", "pytest-isort", "pytest-flake8", "pytest-cov", "pytest"] -doc = ["sphinx-rtd-theme", "sphinx"] - [[package]] name = "decorator" version = "5.1.1" @@ -292,14 +238,6 @@ category = "dev" optional = false python-versions = ">=3.5" -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "dnspython" version = "2.2.1" @@ -309,8 +247,8 @@ optional = true python-versions = ">=3.6,<4.0" [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -318,8 +256,8 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "email-validator" -version = "1.2.1" -description = "A robust email syntax and deliverability validation library." +version = "1.3.0" +description = "A robust email address syntax and deliverability validation library." category = "main" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" @@ -360,14 +298,14 @@ testing = ["pre-commit"] [[package]] name = "fastjsonschema" -version = "2.16.1" +version = "2.16.2" description = "Fastest Python implementation of JSON schema" category = "main" optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "filelock" @@ -525,7 +463,7 @@ python-versions = "*" python-dateutil = ">=2.8.1" [package.extras] -dev = ["wheel", "flake8", "markdown", "twine"] +dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "gitdb" @@ -571,9 +509,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -testing = ["importlib-resources (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-black (>=0.3.7)", "pytest-perf (>=0.9.2)", "flufl.flake8", "pyfakefs", "packaging", "pytest-enabler (>=1.3)", "pytest-cov", "pytest-flake8", "pytest-checkdocs (>=2.4)", "pytest (>=6)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -docs = ["rst.linker (>=1.9)", "jaraco.packaging (>=9)", "sphinx"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -592,10 +530,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" @@ -678,7 +616,7 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] -testing = ["pyyaml", "coverage"] +testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" @@ -719,8 +657,8 @@ packaging = "*" "ruamel.yaml" = "*" [package.extras] -test = ["flake8 (>=3.0)", "coverage"] -dev = ["pypandoc (>=1.4)", "flake8 (>=3.0)", "coverage"] +dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] +test = ["coverage", "flake8 (>=3.0)"] [[package]] name = "mkdocs" @@ -760,19 +698,17 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.5.0" +version = "8.5.2" description = "Documentation that simply works" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -cairosvg = ">=2.5" jinja2 = ">=3.0.2" markdown = ">=3.2" mkdocs = ">=1.3.0" mkdocs-material-extensions = ">=1.0.3" -pillow = ">=8.0" pygments = ">=2.12" pymdown-extensions = ">=9.4" requests = ">=2.26" @@ -839,8 +775,8 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-dynamodb" -version = "1.24.60" -description = "Type annotations for boto3.DynamoDB 1.24.60 service generated with mypy-boto3-builder 7.11.8" +version = "1.24.74" +description = "Type annotations for boto3.DynamoDB 1.24.74 service generated with mypy-boto3-builder 7.11.8" category = "dev" optional = false python-versions = ">=3.7" @@ -961,18 +897,6 @@ python-versions = ">= 3.6" mako = "*" markdown = ">=3.0" -[[package]] -name = "pillow" -version = "9.2.0" -description = "Python Imaging Library (Fork)" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] - [[package]] name = "platformdirs" version = "2.5.2" @@ -982,8 +906,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -997,8 +921,8 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] -testing = ["pytest-benchmark", "pytest"] -dev = ["tox", "pre-commit"] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "publication" @@ -1032,14 +956,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "pycparser" -version = "2.21" -description = "C parser in Python" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "pydantic" version = "1.10.2" @@ -1094,7 +1010,7 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" @@ -1161,7 +1077,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-forked" @@ -1187,7 +1103,7 @@ python-versions = ">=3.7" pytest = ">=5.0" [package.extras] -dev = ["pre-commit", "tox", "pytest-asyncio"] +dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" @@ -1353,21 +1269,6 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" -[[package]] -name = "tinycss2" -version = "1.1.1" -description = "A tiny CSS parser" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -test = ["coverage", "pytest-isort", "pytest-flake8", "pytest-cov", "pytest"] -doc = ["sphinx-rtd-theme", "sphinx"] - [[package]] name = "tomli" version = "2.0.1" @@ -1393,8 +1294,8 @@ optional = false python-versions = ">=3.5.3" [package.extras] -test = ["mypy", "typing-extensions", "pytest"] doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] [[package]] name = "types-requests" @@ -1432,8 +1333,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1447,14 +1348,6 @@ python-versions = ">=3.6" [package.extras] watchmedo = ["PyYAML (>=3.10)"] -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "wrapt" version = "1.14.1" @@ -1485,8 +1378,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] pydantic = ["pydantic", "email-validator"] @@ -1494,7 +1387,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.7.4" -content-hash = "222b5dd0147c5acf62dae506fe84f7b67239872110c870b8f8b9e573746e423b" +content-hash = "01292aa33467d3c56cdd663661a20e155cfabb213950d250c331ecf25e906cf1" [metadata.files] attrs = [ @@ -1502,16 +1395,16 @@ attrs = [ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] aws-cdk-lib = [ - {file = "aws-cdk-lib-2.41.0.tar.gz", hash = "sha256:fec5d44c17ce3d59a4aa43b7978de87a8a2bbfa8768ba2620524c84d66d317d6"}, - {file = "aws_cdk_lib-2.41.0-py3-none-any.whl", hash = "sha256:cbb6822a1e08ae6720d9117862ec9605c5a5a0e9d4e8ac8792da33ccd2cbf14b"}, + {file = "aws-cdk-lib-2.42.1.tar.gz", hash = "sha256:dfdac6177cd4702fa5a5102f82ea5ea7aa2620c85628334b89e56c7954dc9262"}, + {file = "aws_cdk_lib-2.42.1-py3-none-any.whl", hash = "sha256:e49aa4bec65d53cec4633f0ce7dd1733e84a5f67333cd656c7549cc058c21d46"}, ] "aws-cdk.aws-apigatewayv2-alpha" = [ - {file = "aws-cdk.aws-apigatewayv2-alpha-2.41.0a0.tar.gz", hash = "sha256:aad00d9c158f9a0bc4d0411c8b7a77ee90d755152275a18dd6f78fa344acb1cd"}, - {file = "aws_cdk.aws_apigatewayv2_alpha-2.41.0a0-py3-none-any.whl", hash = "sha256:3d38c59a80db4d4dc2de560d2695821a7e191622afbe4481271853b39e40bcec"}, + {file = "aws-cdk.aws-apigatewayv2-alpha-2.42.1a0.tar.gz", hash = "sha256:eb599ba74eec855a29855b364b5163c4e614c428a93053431251375d63c26fd6"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.42.1a0-py3-none-any.whl", hash = "sha256:d161c7751556e6918cff2cff16a2764acac53e133b893bc1c43a62d4b383ac18"}, ] "aws-cdk.aws-apigatewayv2-integrations-alpha" = [ - {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.41.0a0.tar.gz", hash = "sha256:04264b2cd746e9fdbebd8dfa206b91d5af97bd302918f1d45ec44eb04e65e754"}, - {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.41.0a0-py3-none-any.whl", hash = "sha256:c7dfa1d87dcc07308dd32a7a2eda3a3c206f3941f5baa6620ba39b4a013c757b"}, + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.42.1a0.tar.gz", hash = "sha256:a22a842ce30171791e9c756053be6f1003ce2db69f470e5744647706756ea74e"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.42.1a0-py3-none-any.whl", hash = "sha256:eb5f901b288bfbb4f82d1ca8d4946c69c906a13f37c7d7dbad3446e1fc1b55f5"}, ] aws-xray-sdk = [] bandit = [ @@ -1544,103 +1437,34 @@ black = [ {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, ] boto3 = [ - {file = "boto3-1.24.72-py3-none-any.whl", hash = "sha256:ef1f8afb832556fad5f90e7c46373edf9011a436df0d676e8450e05264c3ac0f"}, - {file = "boto3-1.24.72.tar.gz", hash = "sha256:2e502227bfb67fe83b6d61ef9dacd49297bcb631d005cf27b5e54f65064c6e6d"}, + {file = "boto3-1.24.76-py3-none-any.whl", hash = "sha256:5bfcced1a30597b06b3426024c7a9220526882e8fc2dd255f03b9389b5ad8623"}, + {file = "boto3-1.24.76.tar.gz", hash = "sha256:aee98e60c7d2ce1396a3beaf47f8ff749e64804cbdaed8e19d4338a6f628f2dc"}, ] botocore = [ - {file = "botocore-1.27.72-py3-none-any.whl", hash = "sha256:e29a777d261360dcb9283dfd460dc5feca3a46b7774e0c79be406d1a85673789"}, - {file = "botocore-1.27.72.tar.gz", hash = "sha256:6184ab43a59118541b88e0ede24ccd671553323ace95bb7c8de3082a5cc581cb"}, -] -cairocffi = [ - {file = "cairocffi-1.3.0.tar.gz", hash = "sha256:108a3a7cb09e203bdd8501d9baad91d786d204561bd71e9364e8b34897c47b91"}, -] -cairosvg = [ - {file = "CairoSVG-2.5.2-py3-none-any.whl", hash = "sha256:98c276b7e4f0caf01e5c7176765c104ffa1aa1461d63b2053b04ab663cf7052b"}, - {file = "CairoSVG-2.5.2.tar.gz", hash = "sha256:b0b9929cf5dba005178d746a8036fcf0025550f498ca54db61873322384783bc"}, + {file = "botocore-1.27.76-py3-none-any.whl", hash = "sha256:0d4a67801e5a4be4cd84795320ad9fb8c315ed1e7a63e1191b2b3f7a7171d43c"}, + {file = "botocore-1.27.76.tar.gz", hash = "sha256:b5c32922eba727a466f171dcc281f309d2a313e2f6dc592d43044caad96de338"}, ] cattrs = [] certifi = [ - {file = "certifi-2022.6.15.2-py3-none-any.whl", hash = "sha256:0aa1a42fbd57645fabeb6290a7687c21755b0344ecaeaa05f4e9f6207ae2e9a8"}, - {file = "certifi-2022.6.15.2.tar.gz", hash = "sha256:aa08c101214127b9b0472ca6338315113c9487d45376fd3e669201b477c71003"}, -] -cffi = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, + {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, ] charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] +checksumdir = [ + {file = "checksumdir-1.2.0-py3-none-any.whl", hash = "sha256:77687e16da95970c94061c74ef2e13666c4b6e0e8c90a5eaf0c8f7591332cf01"}, + {file = "checksumdir-1.2.0.tar.gz", hash = "sha256:10bfd7518da5a14b0e9ac03e9ad105f0e70f58bba52b6e9aa2f21a3f73c7b5a8"}, +] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [] constructs = [ - {file = "constructs-10.1.102-py3-none-any.whl", hash = "sha256:0255e98319e71cdd75e916bf00dc6abc4f3046a74cf3884b8e92db053b837708"}, - {file = "constructs-10.1.102.tar.gz", hash = "sha256:7b32f3ecc0e587cb4e670503666d9d4f408e67b9de7bbe7386e9cb94e18618ae"}, + {file = "constructs-10.1.107-py3-none-any.whl", hash = "sha256:9c764457e12f1a7eb32b7c919794e04c475f790dddd64380e7f453bf41ab84da"}, + {file = "constructs-10.1.107.tar.gz", hash = "sha256:f0ba2f67c5e7da4808057be204fee2325ff9cc63ee7dcc6a83e3c3f6121cb27d"}, ] coverage = [ {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, @@ -1694,20 +1518,15 @@ coverage = [ {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, ] -cssselect2 = [ - {file = "cssselect2-0.6.0-py3-none-any.whl", hash = "sha256:3a83b2a68370c69c9cd3fcb88bbfaebe9d22edeef2c22d1ff3e1ed9c7fa45ed8"}, - {file = "cssselect2-0.6.0.tar.gz", hash = "sha256:5b5d6dea81a5eb0c9ca39f116c8578dd413778060c94c1f51196371618909325"}, -] decorator = [] -defusedxml = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] -email-validator = [] +email-validator = [ + {file = "email_validator-1.3.0-py2.py3-none-any.whl", hash = "sha256:816073f2a7cffef786b29928f58ec16cdac42710a53bb18aa94317e3e145ec5c"}, + {file = "email_validator-1.3.0.tar.gz", hash = "sha256:553a66f8be2ec2dea641ae1d3f29017ab89e9d603d4a25cdaac39eefa283d769"}, +] eradicate = [ {file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"}, {file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"}, @@ -1717,7 +1536,10 @@ exceptiongroup = [ {file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"}, ] execnet = [] -fastjsonschema = [] +fastjsonschema = [ + {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"}, + {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"}, +] filelock = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, @@ -1845,8 +1667,8 @@ mkdocs = [ ] mkdocs-git-revision-date-plugin = [] mkdocs-material = [ - {file = "mkdocs-material-8.5.0.tar.gz", hash = "sha256:1b9e03b93c26db7c1b520480978024916eda73b49eb5818820cc10f4665f00fc"}, - {file = "mkdocs_material-8.5.0-py2.py3-none-any.whl", hash = "sha256:1bfd05e6e159db2c5f95821dc3c7afdc7a5a3a7acc544c3102f7acb28691f407"}, + {file = "mkdocs-material-8.5.2.tar.gz", hash = "sha256:16ca1304a93b085e5dfb0dbcc681b74dad1587d8ba727c89c8fd4259dd8fe004"}, + {file = "mkdocs_material-8.5.2-py2.py3-none-any.whl", hash = "sha256:1962099d8c6eb7571896a0e7fdc52ff4fda1e906969d0e42ae3537418e807868"}, ] mkdocs-material-extensions = [] mypy = [] @@ -1863,8 +1685,8 @@ mypy-boto3-cloudwatch = [ {file = "mypy_boto3_cloudwatch-1.24.55-py3-none-any.whl", hash = "sha256:23faf8fdfe928f9dcce453a60b03bda69177554eb88c2d7e5240ff91b5b14388"}, ] mypy-boto3-dynamodb = [ - {file = "mypy-boto3-dynamodb-1.24.60.tar.gz", hash = "sha256:aa552233fa8357d99f4a1021ef65b98679e26ebc35d04c31a9d70a4db779c236"}, - {file = "mypy_boto3_dynamodb-1.24.60-py3-none-any.whl", hash = "sha256:df8e91bb25dd6e4090aef22d33504a5e9e305e45e3262d81e7223df4b6ddee5f"}, + {file = "mypy-boto3-dynamodb-1.24.74.tar.gz", hash = "sha256:7c5b4dc26e05375d3a2cadc28b253a38060c27aa4d6a9394b3d5deea45171f91"}, + {file = "mypy_boto3_dynamodb-1.24.74-py3-none-any.whl", hash = "sha256:1111c2eb8129bf8c09b5423d95ad483943f5fc08d745801c356f6a1b22b04b37"}, ] mypy-boto3-lambda = [ {file = "mypy-boto3-lambda-1.24.54.tar.gz", hash = "sha256:c76d28d84bdf94c8980acd85bc07f2747559ca11a990fd6785c9c2389e13aff1"}, @@ -1901,66 +1723,6 @@ pbr = [ {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, ] pdoc3 = [] -pillow = [ - {file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"}, - {file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"}, - {file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"}, - {file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"}, - {file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"}, - {file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:adabc0bce035467fb537ef3e5e74f2847c8af217ee0be0455d4fec8adc0462fc"}, - {file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:336b9036127eab855beec9662ac3ea13a4544a523ae273cbf108b228ecac8437"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"}, - {file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"}, - {file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"}, - {file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"}, - {file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"}, - {file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"}, - {file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"}, - {file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"}, - {file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"}, - {file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"}, - {file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"}, - {file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"}, - {file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"}, - {file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"}, - {file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"}, - {file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"}, - {file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"}, - {file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"}, - {file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"}, - {file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"}, - {file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"}, - {file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"}, - {file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"}, - {file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"}, -] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, @@ -1973,10 +1735,6 @@ pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] pydantic = [ {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, @@ -2143,10 +1901,6 @@ six = [ ] smmap = [] stevedore = [] -tinycss2 = [ - {file = "tinycss2-1.1.1-py3-none-any.whl", hash = "sha256:fe794ceaadfe3cf3e686b22155d0da5780dd0e273471a51846d0a02bc204fec8"}, - {file = "tinycss2-1.1.1.tar.gz", hash = "sha256:b2e44dd8883c360c35dd0d1b5aad0b610e5156c2cb3b33434634e539ead9d8bf"}, -] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -2224,10 +1978,6 @@ watchdog = [ {file = "watchdog-2.1.9-py3-none-win_ia64.whl", hash = "sha256:ad576a565260d8f99d97f2e64b0f97a48228317095908568a9d5c786c829d428"}, {file = "watchdog-2.1.9.tar.gz", hash = "sha256:43ce20ebb36a51f21fa376f76d1d4692452b2527ccd601950d69ed36b9e21609"}, ] -webencodings = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, diff --git a/pyproject.toml b/pyproject.toml index 2ca74c0b8b0..79c7738ea94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,6 +73,7 @@ typing-extensions = "^4.3.0" python-snappy = "^0.6.1" mkdocs-material = "^8.5.0" filelock = "^3.8.0" +checksumdir = "^1.2.0" [tool.poetry.extras] pydantic = ["pydantic", "email-validator"] diff --git a/tests/e2e/conftest.py b/tests/e2e/conftest.py index 418b4a742b2..f59eea9a33b 100644 --- a/tests/e2e/conftest.py +++ b/tests/e2e/conftest.py @@ -1,32 +1,29 @@ -# import pytest +import pytest -# from tests.e2e.lambda_layer.infrastructure import build_layer -# from tests.e2e.utils.infrastructure import call_once +from tests.e2e.utils.infrastructure import call_once +from tests.e2e.utils.lambda_layer.powertools_layer import LocalLambdaPowertoolsLayer -# # @pytest.fixture(scope="session") -# # def lambda_layer_arn(lambda_layer_deployment: dict): -# # yield lambda_layer_deployment.get("LayerArn") +@pytest.fixture(scope="session", autouse=True) +def lambda_layer_build(tmp_path_factory: pytest.TempPathFactory, worker_id: str) -> str: + """Build Lambda Layer once before stacks are created + Parameters + ---------- + tmp_path_factory : pytest.TempPathFactory + pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up + worker_id : str + pytest-xdist worker identification to detect whether parallelization is enabled -# @pytest.fixture(scope="session", autouse=True) -# def lambda_layer_deployment(tmp_path_factory: pytest.TempPathFactory, worker_id: str): -# """Setup and teardown logic for E2E test infrastructure + Yields + ------ + str + Lambda Layer artefact location + """ -# Parameters -# ---------- -# tmp_path_factory : pytest.TempPathFactory -# pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up -# worker_id : str -# pytest-xdist worker identification to detect whether parallelization is enabled - -# Yields -# ------ -# Dict[str, str] -# CloudFormation Outputs from deployed infrastructure -# """ -# yield from call_once( -# callable=build_layer, -# tmp_path_factory=tmp_path_factory, -# worker_id=worker_id, -# ) + layer = LocalLambdaPowertoolsLayer() + yield from call_once( + task=layer.build, + tmp_path_factory=tmp_path_factory, + worker_id=worker_id, + ) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 736da6ab15c..c3b329eeed1 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -56,7 +56,7 @@ def __init__(self, feature_name: str) -> None: self._feature_infra_module_path = self._feature_path / "infrastructure" self._handlers_dir = self._feature_path / "handlers" # TODO: Change to cdk_feature_dir - self._cdk_out_dir = CDK_OUT_PATH / self.feature_name + self._cdk_out_dir: Path = CDK_OUT_PATH / self.feature_name def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir @@ -95,8 +95,7 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict if not self._handlers_dir.exists(): raise RuntimeError(f"Handlers dir '{self._handlers_dir}' must exist for functions to be created.") - layer_build = LocalLambdaPowertoolsLayer(output_dir=self._cdk_out_dir).build() - + layer_build = LocalLambdaPowertoolsLayer().build() layer = LayerVersion( self.stack, "aws-lambda-powertools-e2e-test", @@ -279,7 +278,7 @@ def add_cfn_output(self, name: str, value: str, arn: str = ""): def call_once( - callable: Callable, + task: Callable, tmp_path_factory: pytest.TempPathFactory, worker_id: str, callback: Optional[Callable] = None, @@ -288,7 +287,7 @@ def call_once( Parameters ---------- - callable : Callable + task : Callable Function to call once and JSON serialize result whether parallel test is enabled or not. tmp_path_factory : pytest.TempPathFactory pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up @@ -306,20 +305,20 @@ def call_once( try: if worker_id == "master": # no parallelization, call and return - yield callable() + yield task() else: # tmp dir shared by all workers root_tmp_dir = tmp_path_factory.getbasetemp().parent cache = root_tmp_dir / f"{PYTHON_RUNTIME_VERSION}_cache.json" with FileLock(f"{cache}.lock"): - # If cache exists, return callable outputs back + # If cache exists, return task outputs back # otherwise it's the first run by the main worker - # run and return callable outputs for subsequent workers reuse + # run and return task outputs for subsequent workers reuse if cache.is_file(): callable_result = json.loads(cache.read_text()) else: - callable_result: Dict = callable() + callable_result: Dict = task() cache.write_text(json.dumps(callable_result)) yield callable_result finally: diff --git a/tests/e2e/utils/lambda_layer/base.py b/tests/e2e/utils/lambda_layer/base.py index bb8dbab2be4..280fe19d4f8 100644 --- a/tests/e2e/utils/lambda_layer/base.py +++ b/tests/e2e/utils/lambda_layer/base.py @@ -1,6 +1,5 @@ -from pathlib import Path - from abc import ABC, abstractmethod +from pathlib import Path class BaseLocalLambdaLayer(ABC): @@ -10,12 +9,24 @@ def __init__(self, output_dir: Path): @abstractmethod def build(self) -> str: + """Builds a Lambda Layer locally + + Returns + ------- + build_path : str + Path where newly built Lambda Layer is + """ raise NotImplementedError() def before_build(self): + """Any step to run before build process begins. + + By default, it creates output dir and its parents if it doesn't exist. + """ if not self.output_dir.exists(): # Create missing parent directories if missing self.output_dir.mkdir(parents=True, exist_ok=True) def after_build(self): + """Any step after a build succeed""" ... diff --git a/tests/e2e/utils/lambda_layer/powertools_layer.py b/tests/e2e/utils/lambda_layer/powertools_layer.py index 0f57ebaf173..45a22547715 100644 --- a/tests/e2e/utils/lambda_layer/powertools_layer.py +++ b/tests/e2e/utils/lambda_layer/powertools_layer.py @@ -2,24 +2,47 @@ import subprocess from pathlib import Path -from tests.e2e.utils.constants import SOURCE_CODE_ROOT_PATH +from checksumdir import dirhash + +from aws_lambda_powertools import PACKAGE_PATH +from tests.e2e.utils.constants import CDK_OUT_PATH, SOURCE_CODE_ROOT_PATH from tests.e2e.utils.lambda_layer.base import BaseLocalLambdaLayer logger = logging.getLogger(__name__) class LocalLambdaPowertoolsLayer(BaseLocalLambdaLayer): - def __init__(self, output_dir: Path): + IGNORE_EXTENSIONS = ["pyc"] + + def __init__(self, output_dir: Path = CDK_OUT_PATH): super().__init__(output_dir) - self.package = f"{SOURCE_CODE_ROOT_PATH}\[pydantic\]" + self.package = f"{SOURCE_CODE_ROOT_PATH}[pydantic]" self.build_args = "--platform manylinux1_x86_64 --only-binary=:all: --upgrade" self.build_command = f"python -m pip install {self.package} {self.build_args} --target {self.target_dir}" + self.source_diff_file: Path = CDK_OUT_PATH / "layer_build.diff" def build(self) -> str: self.before_build() - subprocess.run(self.build_command, shell=True) + if self._has_source_changed(): + subprocess.run(self.build_command, shell=True) self.after_build() return str(self.output_dir) + + def _has_source_changed(self) -> bool: + """Hashes source code and + + Returns + ------- + change : bool + Whether source code hash has changed + """ + diff = self.source_diff_file.read_text() if self.source_diff_file.exists() else "" + new_diff = dirhash(dirname=PACKAGE_PATH, excluded_extensions=self.IGNORE_EXTENSIONS) + if new_diff != diff or not self.output_dir.exists(): + self.source_diff_file.write_text(new_diff) + return True + + return False From 5f28a53c9d6511fdb312dd1afce051ee3dbfd7d3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 4 Sep 2022 22:02:10 +0200 Subject: [PATCH 10/33] chore: cleanup deploy, guard for lack of infra --- tests/e2e/utils/infrastructure.py | 34 ++++++++++--------------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index c3b329eeed1..44cd26635ba 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -54,10 +54,15 @@ def __init__(self, feature_name: str) -> None: self._feature_path = Path(sys.modules[self.__class__.__module__].__file__).parent self._feature_infra_class_name = self.__class__.__name__ self._feature_infra_module_path = self._feature_path / "infrastructure" + self._feature_infra_file = self._feature_path / "infrastructure.py" self._handlers_dir = self._feature_path / "handlers" - # TODO: Change to cdk_feature_dir self._cdk_out_dir: Path = CDK_OUT_PATH / self.feature_name + if not self._feature_infra_file.exists(): + raise FileNotFoundError( + "You must have your infrastructure defined in 'tests/e2e//infrastructure.py'." + ) + def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir @@ -156,27 +161,6 @@ def deploy(self) -> Dict[str, str]: Dict[str, str] CloudFormation Stack Outputs with output key and value """ - cdk_app_file = self._create_temp_cdk_app() - return self._deploy_stack(cdk_app_file) - - def delete(self) -> None: - """Delete CloudFormation Stack""" - logger.debug(f"Deleting stack: {self.stack_name}") - self.cfn.delete_stack(StackName=self.stack_name) - - def _deploy_stack(self, cdk_app_file: str) -> Dict: - """Deploys CDK App auto-generated using CDK CLI - - Parameters - ---------- - cdk_app_file : str - Path to temporary CDK App - - Returns - ------- - Dict - Stack Output values as dict - """ stack_file = self._create_temp_cdk_app() synth_command = f"npx cdk synth --app 'python {stack_file}' -o {self._cdk_out_dir}" deploy_command = ( @@ -188,6 +172,11 @@ def _deploy_stack(self, cdk_app_file: str) -> Dict: subprocess.check_output(deploy_command, shell=True) return self._read_stack_output() + def delete(self) -> None: + """Delete CloudFormation Stack""" + logger.debug(f"Deleting stack: {self.stack_name}") + self.cfn.delete_stack(StackName=self.stack_name) + def _sync_stack_name(self, stack_output: Dict): """Synchronize initial stack name with CDK's final stack name @@ -212,7 +201,6 @@ def _create_temp_cdk_app(self): This allows us to keep our BaseInfrastructure while supporting context lookups. """ - # TODO: Confirm infrastructure module exists before proceeding. # tests.e2e.tracer.infrastructure infra_module = str(self._feature_infra_module_path.relative_to(SOURCE_CODE_ROOT_PATH)).replace(os.sep, ".") From a322be7b8fd4823f53336ff8c4ad09cf760ca988 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 4 Sep 2022 22:19:37 +0200 Subject: [PATCH 11/33] refactor: remove need for feature_name and super in stacks Signed-off-by: heitorlessa --- tests/e2e/event_handler/infrastructure.py | 6 ----- tests/e2e/logger/infrastructure.py | 5 ---- tests/e2e/metrics/infrastructure.py | 5 ---- tests/e2e/tracer/infrastructure.py | 4 --- tests/e2e/utils/infrastructure.py | 30 ++++++++--------------- 5 files changed, 10 insertions(+), 40 deletions(-) diff --git a/tests/e2e/event_handler/infrastructure.py b/tests/e2e/event_handler/infrastructure.py index bddf8c4d548..da456038a25 100644 --- a/tests/e2e/event_handler/infrastructure.py +++ b/tests/e2e/event_handler/infrastructure.py @@ -1,4 +1,3 @@ -from pathlib import Path from typing import Dict, Optional from aws_cdk import CfnOutput @@ -14,11 +13,6 @@ class EventHandlerStack(BaseInfrastructure): - FEATURE_NAME = "event-handlers" - - def __init__(self, feature_name: str = FEATURE_NAME) -> None: - super().__init__(feature_name) - def create_resources(self): functions = self.create_lambda_functions() diff --git a/tests/e2e/logger/infrastructure.py b/tests/e2e/logger/infrastructure.py index fa8d20af534..242b3c10892 100644 --- a/tests/e2e/logger/infrastructure.py +++ b/tests/e2e/logger/infrastructure.py @@ -2,10 +2,5 @@ class LoggerStack(BaseInfrastructure): - FEATURE_NAME = "logger" - - def __init__(self, feature_name: str = FEATURE_NAME) -> None: - super().__init__(feature_name) - def create_resources(self): self.create_lambda_functions() diff --git a/tests/e2e/metrics/infrastructure.py b/tests/e2e/metrics/infrastructure.py index 76aab7fd7c3..7cc1eb8c498 100644 --- a/tests/e2e/metrics/infrastructure.py +++ b/tests/e2e/metrics/infrastructure.py @@ -2,10 +2,5 @@ class MetricsStack(BaseInfrastructure): - FEATURE_NAME = "metrics" - - def __init__(self, feature_name: str = FEATURE_NAME) -> None: - super().__init__(feature_name) - def create_resources(self): self.create_lambda_functions() diff --git a/tests/e2e/tracer/infrastructure.py b/tests/e2e/tracer/infrastructure.py index 7fa6b3fe970..c36db22e1de 100644 --- a/tests/e2e/tracer/infrastructure.py +++ b/tests/e2e/tracer/infrastructure.py @@ -10,10 +10,6 @@ class TracerStack(BaseInfrastructure): # Maintenance: Tracer doesn't support dynamic service injection (tracer.py L310) # we could move after handler response or adopt env vars usage in e2e tests SERVICE_NAME: str = build_service_name() - FEATURE_NAME = "tracer" - - def __init__(self, feature_name: str = FEATURE_NAME) -> None: - super().__init__(feature_name) def create_resources(self) -> None: # NOTE: Commented out Lambda fns as we don't need them now diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 44cd26635ba..437b420226a 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -6,7 +6,7 @@ import textwrap from abc import ABC, abstractmethod from pathlib import Path -from typing import Callable, Dict, Generator, Optional, Tuple +from typing import Callable, Dict, Generator, Optional from uuid import uuid4 import boto3 @@ -22,22 +22,13 @@ logger = logging.getLogger(__name__) -class BaseInfrastructureStack(ABC): - @abstractmethod - def synthesize(self) -> Tuple[dict, str]: - ... - - @abstractmethod - def __call__(self) -> Tuple[dict, str]: - ... - - class BaseInfrastructure(ABC): RANDOM_STACK_VALUE: str = f"{uuid4()}" - def __init__(self, feature_name: str) -> None: - self.feature_name = feature_name - self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{feature_name}-{self.RANDOM_STACK_VALUE}" + def __init__(self) -> None: + self.feature_path = Path(sys.modules[self.__class__.__module__].__file__).parent # absolute path to feature + self.feature_name = self.feature_path.parts[-1].replace("_", "-") # logger, tracer, event-handler, etc. + self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{self.feature_name}-{self.RANDOM_STACK_VALUE}" self.stack_outputs: Dict[str, str] = {} self.stack_outputs_file = f"{CDK_OUT_PATH / self.feature_name}_stack_outputs.json" # tracer_stack_outputs.json @@ -50,12 +41,11 @@ def __init__(self, feature_name: str) -> None: self.app = App() self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) - # NOTE: Inspect subclass path to generate CDK App (_create_temp_cdk_app method) - self._feature_path = Path(sys.modules[self.__class__.__module__].__file__).parent + # NOTE: Introspect feature details to generate CDK App (_create_temp_cdk_app method) self._feature_infra_class_name = self.__class__.__name__ - self._feature_infra_module_path = self._feature_path / "infrastructure" - self._feature_infra_file = self._feature_path / "infrastructure.py" - self._handlers_dir = self._feature_path / "handlers" + self._feature_infra_module_path = self.feature_path / "infrastructure" + self._feature_infra_file = self.feature_path / "infrastructure.py" + self._handlers_dir = self.feature_path / "handlers" self._cdk_out_dir: Path = CDK_OUT_PATH / self.feature_name if not self._feature_infra_file.exists(): @@ -178,7 +168,7 @@ def delete(self) -> None: self.cfn.delete_stack(StackName=self.stack_name) def _sync_stack_name(self, stack_output: Dict): - """Synchronize initial stack name with CDK's final stack name + """Synchronize initial stack name with CDK final stack name Parameters ---------- From 36e92e615036e908dbb917cdee57434a6cbf9ea0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Sun, 4 Sep 2022 22:55:56 +0200 Subject: [PATCH 12/33] refactor(tracer): allow service to be changed at runtime for e2e testing --- aws_lambda_powertools/tracing/tracer.py | 25 ++++++++++--------- .../tracer/{bkp_conftest.py => conftest.py} | 9 ++----- tests/e2e/tracer/handlers/async_capture.py | 1 + tests/e2e/tracer/handlers/basic_handler.py | 1 + tests/e2e/tracer/infrastructure.py | 13 +--------- .../tracer/{bkp_tracer.py => test_tracer.py} | 19 ++++++++------ 6 files changed, 30 insertions(+), 38 deletions(-) rename tests/e2e/tracer/{bkp_conftest.py => conftest.py} (60%) rename tests/e2e/tracer/{bkp_tracer.py => test_tracer.py} (91%) diff --git a/aws_lambda_powertools/tracing/tracer.py b/aws_lambda_powertools/tracing/tracer.py index 7053497ae6d..0523d53c41d 100644 --- a/aws_lambda_powertools/tracing/tracer.py +++ b/aws_lambda_powertools/tracing/tracer.py @@ -300,16 +300,6 @@ def handler(event, context): @functools.wraps(lambda_handler) def decorate(event, context, **kwargs): with self.provider.in_subsegment(name=f"## {lambda_handler_name}") as subsegment: - global is_cold_start - logger.debug("Annotating cold start") - subsegment.put_annotation(key="ColdStart", value=is_cold_start) - - if is_cold_start: - is_cold_start = False - - if self.service: - subsegment.put_annotation(key="Service", value=self.service) - try: logger.debug("Calling lambda handler") response = lambda_handler(event, context, **kwargs) @@ -325,7 +315,18 @@ def decorate(event, context, **kwargs): self._add_full_exception_as_metadata( method_name=lambda_handler_name, error=err, subsegment=subsegment, capture_error=capture_error ) + raise + finally: + global is_cold_start + logger.debug("Annotating cold start") + subsegment.put_annotation(key="ColdStart", value=is_cold_start) + + if is_cold_start: + is_cold_start = False + + if self.service: + subsegment.put_annotation(key="Service", value=self.service) return response @@ -672,7 +673,7 @@ def _add_response_as_metadata( if data is None or not capture_response or subsegment is None: return - subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self._config["service"]) + subsegment.put_metadata(key=f"{method_name} response", value=data, namespace=self.service) def _add_full_exception_as_metadata( self, @@ -697,7 +698,7 @@ def _add_full_exception_as_metadata( if not capture_error: return - subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self._config["service"]) + subsegment.put_metadata(key=f"{method_name} error", value=error, namespace=self.service) @staticmethod def _disable_tracer_provider(): diff --git a/tests/e2e/tracer/bkp_conftest.py b/tests/e2e/tracer/conftest.py similarity index 60% rename from tests/e2e/tracer/bkp_conftest.py rename to tests/e2e/tracer/conftest.py index 593ca614f54..afb34ffee2b 100644 --- a/tests/e2e/tracer/bkp_conftest.py +++ b/tests/e2e/tracer/conftest.py @@ -4,21 +4,16 @@ @pytest.fixture(autouse=True, scope="module") -def infrastructure(lambda_layer_arn: str): - # # def infrastructure(request: pytest.FixtureRequest): +def infrastructure(): """Setup and teardown logic for E2E test infrastructure - Parameters - ---------- - lambda_layer_arn : str - Lambda Layer ARN Yields ------ Dict[str, str] CloudFormation Outputs from deployed infrastructure """ - stack = TracerStack(layer_arn=lambda_layer_arn) + stack = TracerStack() try: yield stack.deploy() finally: diff --git a/tests/e2e/tracer/handlers/async_capture.py b/tests/e2e/tracer/handlers/async_capture.py index b19840a6f69..814e0b92e02 100644 --- a/tests/e2e/tracer/handlers/async_capture.py +++ b/tests/e2e/tracer/handlers/async_capture.py @@ -13,4 +13,5 @@ async def async_get_users(): def lambda_handler(event: dict, context: LambdaContext): + tracer.service = event.get("service") return asyncio.run(async_get_users()) diff --git a/tests/e2e/tracer/handlers/basic_handler.py b/tests/e2e/tracer/handlers/basic_handler.py index ba94c845ace..89a6b062423 100644 --- a/tests/e2e/tracer/handlers/basic_handler.py +++ b/tests/e2e/tracer/handlers/basic_handler.py @@ -13,4 +13,5 @@ def get_todos(): @tracer.capture_lambda_handler def lambda_handler(event: dict, context: LambdaContext): + tracer.service = event.get("service") return get_todos() diff --git a/tests/e2e/tracer/infrastructure.py b/tests/e2e/tracer/infrastructure.py index c36db22e1de..8562359acf0 100644 --- a/tests/e2e/tracer/infrastructure.py +++ b/tests/e2e/tracer/infrastructure.py @@ -1,17 +1,6 @@ -from pathlib import Path - -from tests.e2e.utils.data_builder import build_service_name from tests.e2e.utils.infrastructure import BaseInfrastructure -PWD = Path(__file__).parent - class TracerStack(BaseInfrastructure): - # Maintenance: Tracer doesn't support dynamic service injection (tracer.py L310) - # we could move after handler response or adopt env vars usage in e2e tests - SERVICE_NAME: str = build_service_name() - def create_resources(self) -> None: - # NOTE: Commented out Lambda fns as we don't need them now - env_vars = {"POWERTOOLS_SERVICE_NAME": self.SERVICE_NAME} - self.create_lambda_functions(function_props={"environment": env_vars}) + self.create_lambda_functions() diff --git a/tests/e2e/tracer/bkp_tracer.py b/tests/e2e/tracer/test_tracer.py similarity index 91% rename from tests/e2e/tracer/bkp_tracer.py rename to tests/e2e/tracer/test_tracer.py index de25bc02ebf..04d5b3dcba0 100644 --- a/tests/e2e/tracer/bkp_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -1,7 +1,8 @@ +import json + import pytest from tests.e2e.tracer.handlers import async_capture, basic_handler -from tests.e2e.tracer.infrastructure import TracerStack from tests.e2e.utils import data_builder, data_fetcher @@ -37,6 +38,7 @@ def async_fn(infrastructure: dict) -> str: def test_lambda_handler_trace_is_visible(basic_handler_fn_arn: str, basic_handler_fn: str): # GIVEN + service = data_builder.build_service_name() handler_name = basic_handler.lambda_handler.__name__ handler_subsegment = f"## {handler_name}" handler_metadata_key = f"{handler_name} response" @@ -48,15 +50,16 @@ def test_lambda_handler_trace_is_visible(basic_handler_fn_arn: str, basic_handle trace_query = data_builder.build_trace_default_query(function_name=basic_handler_fn) # WHEN - _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) - data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) + event = json.dumps({"service": service}) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=event) + data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=event) # THEN trace = data_fetcher.get_traces(start_date=execution_time, filter_expression=trace_query, minimum_traces=2) assert len(trace.get_annotation(key="ColdStart", value=True)) == 1 - assert len(trace.get_metadata(key=handler_metadata_key, namespace=TracerStack.SERVICE_NAME)) == 2 - assert len(trace.get_metadata(key=method_metadata_key, namespace=TracerStack.SERVICE_NAME)) == 2 + assert len(trace.get_metadata(key=handler_metadata_key, namespace=service)) == 2 + assert len(trace.get_metadata(key=method_metadata_key, namespace=service)) == 2 assert len(trace.get_subsegment(name=handler_subsegment)) == 2 assert len(trace.get_subsegment(name=method_subsegment)) == 2 @@ -87,6 +90,7 @@ def test_lambda_handler_trace_multiple_functions_same_name(same_function_name_ar def test_async_trace_is_visible(async_fn_arn: str, async_fn: str): # GIVEN + service = data_builder.build_service_name() async_fn_name = f"async_capture.{async_capture.async_get_users.__name__}" async_fn_name_subsegment = f"## {async_fn_name}" async_fn_name_metadata_key = f"{async_fn_name} response" @@ -94,10 +98,11 @@ def test_async_trace_is_visible(async_fn_arn: str, async_fn: str): trace_query = data_builder.build_trace_default_query(function_name=async_fn) # WHEN - _, execution_time = data_fetcher.get_lambda_response(lambda_arn=async_fn_arn) + event = json.dumps({"service": service}) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=async_fn_arn, payload=event) # THEN trace = data_fetcher.get_traces(start_date=execution_time, filter_expression=trace_query) assert len(trace.get_subsegment(name=async_fn_name_subsegment)) == 1 - assert len(trace.get_metadata(key=async_fn_name_metadata_key, namespace=TracerStack.SERVICE_NAME)) == 1 + assert len(trace.get_metadata(key=async_fn_name_metadata_key, namespace=service)) == 1 From fa3e47c547c5fc5c4f01bcddec80950ffe86e887 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 09:48:30 +0200 Subject: [PATCH 13/33] docs(maintainers): update workflow diagram --- MAINTAINERS.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index fb94090f762..d69f663f421 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -273,13 +273,13 @@ graph TD Spawn -->|Worker1| Worker1_Start["Load tests"] Spawn -->|WorkerN| WorkerN_Start["Load tests"] - Worker0_Start -->|Wait| LambdaLayerStack["Lambda Layer Stack Deployment"] - Worker1_Start -->|Wait| LambdaLayerStack["Lambda Layer Stack Deployment"] - WorkerN_Start -->|Wait| LambdaLayerStack["Lambda Layer Stack Deployment"] + Worker0_Start -->|Wait| LambdaLayer["Lambda Layer build"] + Worker1_Start -->|Wait| LambdaLayer["Lambda Layer build"] + WorkerN_Start -->|Wait| LambdaLayer["Lambda Layer build"] - LambdaLayerStack -->|Worker0| Worker0_Deploy["Launch feature stack"] - LambdaLayerStack -->|Worker1| Worker1_Deploy["Launch feature stack"] - LambdaLayerStack -->|WorkerN| WorkerN_Deploy["Launch feature stack"] + LambdaLayer -->|Worker0| Worker0_Deploy["Launch feature stack"] + LambdaLayer -->|Worker1| Worker1_Deploy["Launch feature stack"] + LambdaLayer -->|WorkerN| WorkerN_Deploy["Launch feature stack"] Worker0_Deploy -->|Worker0| Worker0_Tests["Run tests"] Worker1_Deploy -->|Worker1| Worker1_Tests["Run tests"] From b48d6cf177fda2c9f8146c211c40a51ae844236f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 09:50:51 +0200 Subject: [PATCH 14/33] docs(maintainers): update structure --- MAINTAINERS.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index d69f663f421..f6e96e33c11 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -226,10 +226,12 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes - **`conftest.py`**. Imports and deploys a given feature Infrastructure. Hierarchy matters. Top-level `conftest` deploys stacks only once and blocks I/O across all CPUs. Feature-level `conftest` deploys stacks in parallel, and once complete run all tests in parallel. - **`handlers`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. +**tests/e2e structure** + ```shell . ├── __init__.py -├── conftest.py # deploys Lambda Layer stack +├── conftest.py # builds Lambda Layer once ├── logger │ ├── __init__.py │ ├── conftest.py # deploys LoggerStack @@ -254,11 +256,10 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes │ ├── infrastructure.py # TracerStack definition │ └── test_tracer.py └── utils - ├── Dockerfile ├── __init__.py ├── data_builder # build_service_name(), build_add_dimensions_input, etc. ├── data_fetcher # get_traces(), get_logs(), get_lambda_response(), etc. - ├── infrastructure.py # base infrastructure like deploy logic, Layer Stack, etc. + ├── infrastructure.py # base infrastructure like deploy logic, etc. ``` #### Workflow From 7e52789091c4ac657e56c4aabc44e5294fc3f9ec Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 14:45:46 +0200 Subject: [PATCH 15/33] chore: cleanup cdk.out generated file names --- tests/e2e/utils/infrastructure.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 437b420226a..e87693c63bb 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -30,7 +30,6 @@ def __init__(self) -> None: self.feature_name = self.feature_path.parts[-1].replace("_", "-") # logger, tracer, event-handler, etc. self.stack_name = f"test{PYTHON_RUNTIME_VERSION}-{self.feature_name}-{self.RANDOM_STACK_VALUE}" self.stack_outputs: Dict[str, str] = {} - self.stack_outputs_file = f"{CDK_OUT_PATH / self.feature_name}_stack_outputs.json" # tracer_stack_outputs.json # NOTE: CDK stack account and region are tokens, we need to resolve earlier self.session = boto3.Session() @@ -41,12 +40,13 @@ def __init__(self) -> None: self.app = App() self.stack = Stack(self.app, self.stack_name, env=Environment(account=self.account_id, region=self.region)) - # NOTE: Introspect feature details to generate CDK App (_create_temp_cdk_app method) + # NOTE: Introspect feature details to generate CDK App (_create_temp_cdk_app method), Synth and Deployment self._feature_infra_class_name = self.__class__.__name__ self._feature_infra_module_path = self.feature_path / "infrastructure" self._feature_infra_file = self.feature_path / "infrastructure.py" self._handlers_dir = self.feature_path / "handlers" self._cdk_out_dir: Path = CDK_OUT_PATH / self.feature_name + self._stack_outputs_file = f'{self._cdk_out_dir / "stack_outputs.json"}' if not self._feature_infra_file.exists(): raise FileNotFoundError( @@ -154,7 +154,7 @@ def deploy(self) -> Dict[str, str]: stack_file = self._create_temp_cdk_app() synth_command = f"npx cdk synth --app 'python {stack_file}' -o {self._cdk_out_dir}" deploy_command = ( - f"npx cdk deploy --app '{self._cdk_out_dir}' -O {self.stack_outputs_file} --require-approval=never" + f"npx cdk deploy --app '{self._cdk_out_dir}' -O {self._stack_outputs_file} --require-approval=never" ) # CDK launches a background task, so we must wait @@ -178,7 +178,7 @@ def _sync_stack_name(self, stack_output: Dict): self.stack_name = list(stack_output.keys())[0] def _read_stack_output(self): - content = Path(self.stack_outputs_file).read_text() + content = Path(self._stack_outputs_file).read_text() outputs: Dict = json.loads(content) self._sync_stack_name(stack_output=outputs) @@ -204,7 +204,8 @@ def _create_temp_cdk_app(self): if not self._cdk_out_dir.is_dir(): self._cdk_out_dir.mkdir(parents=True, exist_ok=True) - temp_file = self._cdk_out_dir / f"{self.stack_name}_cdk_app.py" + # cdk.out/tracer/cdk_app_v39.py + temp_file = self._cdk_out_dir / f"cdk_app_{PYTHON_RUNTIME_VERSION}.py" with temp_file.open("w") as fd: fd.write(textwrap.dedent(code)) From 62e92bbf9fb7e418d5b3fc9d74d03b69f175e695 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 14:46:16 +0200 Subject: [PATCH 16/33] chore: add cdk context file --- cdk.context.json | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 cdk.context.json diff --git a/cdk.context.json b/cdk.context.json new file mode 100644 index 00000000000..183318d1ae4 --- /dev/null +++ b/cdk.context.json @@ -0,0 +1,41 @@ +{ + "acknowledged-issue-numbers": [ + 19836 + ], + "vpc-provider:account=231436140809:filter.isDefault=true:region=eu-west-1:returnAsymmetricSubnets=true": { + "vpcId": "vpc-4d79432b", + "vpcCidrBlock": "172.31.0.0/16", + "availabilityZones": [], + "subnetGroups": [ + { + "name": "Public", + "type": "Public", + "subnets": [ + { + "subnetId": "subnet-59e15003", + "cidr": "172.31.32.0/20", + "availabilityZone": "eu-west-1a", + "routeTableId": "rtb-cc0a27b5" + }, + { + "subnetId": "subnet-c99febaf", + "cidr": "172.31.0.0/20", + "availabilityZone": "eu-west-1b", + "routeTableId": "rtb-cc0a27b5" + }, + { + "subnetId": "subnet-d543049d", + "cidr": "172.31.16.0/20", + "availabilityZone": "eu-west-1c", + "routeTableId": "rtb-cc0a27b5" + } + ] + } + ] + }, + "availability-zones:account=231436140809:region=eu-west-1": [ + "eu-west-1a", + "eu-west-1b", + "eu-west-1c" + ] +} From a6a4da014dfb9126fb47a533c3ab33581a2bd2b0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 15:34:59 +0200 Subject: [PATCH 17/33] chore: add more info on sync stack name Signed-off-by: heitorlessa --- tests/e2e/utils/infrastructure.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index e87693c63bb..9209e46ae99 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -170,6 +170,10 @@ def delete(self) -> None: def _sync_stack_name(self, stack_output: Dict): """Synchronize initial stack name with CDK final stack name + When using `cdk synth` with context methods (`from_lookup`), + CDK can initialize the Stack multiple times until it resolves + the context. + Parameters ---------- stack_output : Dict From b6fcc01461d7128179d5fe7ae0b33939225778ba Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 15:46:36 +0200 Subject: [PATCH 18/33] chore: add CDK CLI as a project dependency Signed-off-by: heitorlessa --- .github/workflows/run-e2e-tests.yml | 12 +++++- package-lock.json | 58 +++++++++++++++++++++++++++++ package.json | 7 ++++ 3 files changed, 75 insertions(+), 2 deletions(-) create mode 100644 package-lock.json create mode 100644 package.json diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index 7786903e218..50811a0a8b6 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -28,8 +28,8 @@ jobs: strategy: matrix: # Maintenance: disabled until we discover concurrency lock issue with multiple versions and tmp - # version: ["3.7", "3.8", "3.9"] - version: ["3.7"] + version: ["3.7", "3.8", "3.9"] + # version: ["3.7"] steps: - name: "Checkout" uses: actions/checkout@v3 @@ -41,6 +41,14 @@ jobs: python-version: ${{ matrix.version }} architecture: "x64" cache: "poetry" + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: "16.12" + - name: Install CDK CLI + run: | + npm install + cdk --version - name: Install dependencies run: make dev - name: Configure AWS credentials diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000000..5a72aa1ad10 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,58 @@ +{ + "name": "aws-lambda-powertools-python-e2e", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "aws-lambda-powertools-python-e2e", + "version": "1.0.0", + "devDependencies": { + "aws-cdk": "2.40.0" + } + }, + "node_modules/aws-cdk": { + "version": "2.40.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.40.0.tgz", + "integrity": "sha512-oHacGkLFDELwhpJsZSAhFHWDxIeZW3DgKkwiXlNO81JxNfjcHgPR2rsbh/Gz+n4ErAEzOV6WfuWVMe68zv+iPg==", + "bin": { + "cdk": "bin/cdk" + }, + "engines": { + "node": ">= 14.15.0" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + } + }, + "dependencies": { + "aws-cdk": { + "version": "2.40.0", + "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.40.0.tgz", + "integrity": "sha512-oHacGkLFDELwhpJsZSAhFHWDxIeZW3DgKkwiXlNO81JxNfjcHgPR2rsbh/Gz+n4ErAEzOV6WfuWVMe68zv+iPg==", + "requires": { + "fsevents": "2.3.2" + } + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "optional": true + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000000..6e3a2c1b216 --- /dev/null +++ b/package.json @@ -0,0 +1,7 @@ +{ + "name": "aws-lambda-powertools-python-e2e", + "version": "1.0.0", + "devDependencies": { + "aws-cdk": "2.40.0" + } +} From dbf7fa1fe0a8bcb1898bb33cf7371f01dc4b8d39 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 15:47:42 +0200 Subject: [PATCH 19/33] chore: micro-optimization to not regenerate cdk app if already present Signed-off-by: heitorlessa --- tests/e2e/utils/infrastructure.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index 9209e46ae99..c33768284a6 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -195,7 +195,14 @@ def _create_temp_cdk_app(self): This allows us to keep our BaseInfrastructure while supporting context lookups. """ - # tests.e2e.tracer.infrastructure + # cdk.out/tracer/cdk_app_v39.py + temp_file = self._cdk_out_dir / f"cdk_app_{PYTHON_RUNTIME_VERSION}.py" + + if temp_file.exists(): + # no need to regenerate CDK app since it's just boilerplate + return temp_file + + # Convert from POSIX path to Python module: tests.e2e.tracer.infrastructure infra_module = str(self._feature_infra_module_path.relative_to(SOURCE_CODE_ROOT_PATH)).replace(os.sep, ".") code = f""" @@ -208,8 +215,6 @@ def _create_temp_cdk_app(self): if not self._cdk_out_dir.is_dir(): self._cdk_out_dir.mkdir(parents=True, exist_ok=True) - # cdk.out/tracer/cdk_app_v39.py - temp_file = self._cdk_out_dir / f"cdk_app_{PYTHON_RUNTIME_VERSION}.py" with temp_file.open("w") as fd: fd.write(textwrap.dedent(code)) From 06e1ca5839dc098bc104d2d3a14051b57eeec5e6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 6 Sep 2022 16:40:43 +0200 Subject: [PATCH 20/33] docs(maintainers): cleanup structure and parallelization Signed-off-by: heitorlessa --- MAINTAINERS.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index f6e96e33c11..637961b29e7 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -220,11 +220,7 @@ To run locally, you need [AWS CDK CLI](https://docs.aws.amazon.com/cdk/v2/guide/ #### Structure -Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization (see [Workflow](#workflow)). You'll notice multiple `conftest.py`, `infrastructure.py`, and `handlers`. - -- **`infrastructure`**. Uses CDK to define what a Stack for a given feature should look like. It inherits from `BaseInfrastructure` to handle all boilerplate and deployment logic necessary. -- **`conftest.py`**. Imports and deploys a given feature Infrastructure. Hierarchy matters. Top-level `conftest` deploys stacks only once and blocks I/O across all CPUs. Feature-level `conftest` deploys stacks in parallel, and once complete run all tests in parallel. -- **`handlers`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. +Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization (see [Parallelization](#Parallelization)). **tests/e2e structure** @@ -262,7 +258,15 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes ├── infrastructure.py # base infrastructure like deploy logic, etc. ``` -#### Workflow +You probably notice we have multiple `conftest.py`, `infrastructure.py`, and `handlers` directory. + +- **`infrastructure.py`**. Uses CDK to define the infrastructure a given feature needs. +- **`conftest.py`**. Handles deployment and deletion a given feature Infrastructure. Hierarchy matters + - Top-level `e2e/conftest` deploys stacks only once and blocks I/O across all CPUs. + - Feature-level `e2e//conftest` deploys stacks in parallel and make them independent of each other. +- **`handlers/`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. + +#### Parallelization We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease assessing side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: From 6976e28cffa05fbbfa44731e8b3e87beed14d3ee Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 14:18:02 +0200 Subject: [PATCH 21/33] docs(maintainers): explain framework mechanics --- MAINTAINERS.md | 174 +++++++++++++++++++++++++++++- tests/e2e/utils/base.py | 20 ++++ tests/e2e/utils/infrastructure.py | 9 +- 3 files changed, 197 insertions(+), 6 deletions(-) create mode 100644 tests/e2e/utils/base.py diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 637961b29e7..499007858c9 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -16,7 +16,11 @@ - [Drafting release notes](#drafting-release-notes) - [Run end to end tests](#run-end-to-end-tests) - [Structure](#structure) - - [Workflow](#workflow) + - [Mechanics](#mechanics) + - [Authoring an E2E test](#authoring-an-e2e-test) + - [Internals](#internals) + - [Parallelization](#parallelization) + - [CDK safe parallelization](#cdk-safe-parallelization) - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - [Manage Roadmap](#manage-roadmap) @@ -266,7 +270,169 @@ You probably notice we have multiple `conftest.py`, `infrastructure.py`, and `ha - Feature-level `e2e//conftest` deploys stacks in parallel and make them independent of each other. - **`handlers/`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. -#### Parallelization +#### Mechanics + +Under `BaseInfrastructure`, we hide the complexity of handling CDK parallel deployments, exposing CloudFormation Outputs, building Lambda Layer with the latest available code, and creating Lambda functions found in `handlers`. + +This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run a subset of tests and only deploy their related infrastructure, without any custom configuration. + +> Class diagram to understand abstraction built when defining a new stack (`LoggerStack`) + +```mermaid +classDiagram + class InfrastructureProvider { + <> + +deploy() Dict + +delete() + +create_resources() + +create_lambda_functions(function_props: Dict) + } + + class BaseInfrastructure { + +deploy() Dict + +delete() + +create_lambda_functions(function_props: Dict) Dict~Functions~ + +add_cfn_output(name: str, value: str, arn: Optional[str]) + } + + class TracerStack { + +create_resources() + } + + class LoggerStack { + +create_resources() + } + + class MetricsStack { + +create_resources() + } + + class EventHandlerStack { + +create_resources() + } + + InfrastructureProvider <|-- BaseInfrastructure : implement + BaseInfrastructure <|-- TracerStack : inherit + BaseInfrastructure <|-- LoggerStack : inherit + BaseInfrastructure <|-- MetricsStack : inherit + BaseInfrastructure <|-- EventHandlerStack : inherit +``` + +#### Authoring an E2E test + +Imagine you're going to create E2E for Event Handler feature for the first time. + +As a mental model, you'll need to: **(1)** Define infrastructure, **(2)** Deploy/Delete infrastructure when tests run, and **(3)** Expose resources for E2E tests. + +**Define infrastructure** + +We use CDK as our Infrastructure as Code tool of choice. Before you start using CDK, you need to take the following steps: + +1. Create `tests/e2e/event_handler/infrastructure.py` file +2. Create a new class `EventHandlerStack` and inherit from `BaseInfrastructure` +3. Override `create_resources` method and define your infrastructure using CDK +4. (Optional) Create a Lambda function under `handlers/alb_handler.py` + +> Excerpt `infrastructure.py` for Event Handler + +```python +class EventHandlerStack(BaseInfrastructure): + def create_resources(self): + functions = self.create_lambda_functions() + + self._create_alb(function=functions["AlbHandler"]) + ... + + def _create_alb(self, function: Function): + vpc = ec2.Vpc.from_lookup( + self.stack, + "VPC", + is_default=True, + region=self.region, + ) + + alb = elbv2.ApplicationLoadBalancer(self.stack, "ALB", vpc=vpc, internet_facing=True) + CfnOutput(self.stack, "ALBDnsName", value=alb.load_balancer_dns_name) + ... +``` + +> Excerpt `alb_handler.py` for Event Handler + +```python +from aws_lambda_powertools.event_handler import ALBResolver, Response, content_types + +app = ALBResolver() + + +@app.get("/todos") +def hello(): + return Response( + status_code=200, + content_type=content_types.TEXT_PLAIN, + body="Hello world", + cookies=["CookieMonster", "MonsterCookie"], + headers={"Foo": ["bar", "zbr"]}, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) +``` + +**Deploy/Delete infrastructure when tests run** + +We need to instruct Pytest to deploy our infrastructure when our tests start, and delete it when they complete (successfully or not). + +For this, we create a `test/e2e/event_handler/conftest.py` and create fixture scoped to our test module. This will remain static and will not need any further modification in the future. + +> Excerpt `conftest.py` for Event Handler + +```python +import pytest + +from tests.e2e.event_handler.infrastructure import EventHandlerStack + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(): + """Setup and teardown logic for E2E test infrastructure + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = EventHandlerStack() + try: + yield stack.deploy() + finally: + stack.delete() + +``` + +**Expose resources for E2E tests** + +Within our tests, we should now have access to the `infrastructure` fixture we defined. We can access any Stack Output using pytest dependency injection. + +> Excerpt `test_header_serializer.py` for Event Handler + +```python +@pytest.fixture +def alb_basic_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBBasicListenerPort", "") + return f"http://{dns_name}:{port}" + + +def test_alb_headers_serializer(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + ... +``` + +#### Internals + +##### Parallelization We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease assessing side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: @@ -299,6 +465,10 @@ graph TD ResultCollection --> DeployEnd["Delete Stacks"] ``` +##### CDK safe parallelization + +Describe CDK App, Stack, synth, etc. + ### Releasing a documentation hotfix You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. diff --git a/tests/e2e/utils/base.py b/tests/e2e/utils/base.py new file mode 100644 index 00000000000..2a6e6032e52 --- /dev/null +++ b/tests/e2e/utils/base.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod +from typing import Dict, Optional + + +class InfrastructureProvider(ABC): + @abstractmethod + def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict: + pass + + @abstractmethod + def deploy(self) -> Dict[str, str]: + pass + + @abstractmethod + def delete(self): + pass + + @abstractmethod + def create_resources(self): + pass diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index c33768284a6..82d0463b2aa 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -4,7 +4,6 @@ import subprocess import sys import textwrap -from abc import ABC, abstractmethod from pathlib import Path from typing import Callable, Dict, Generator, Optional from uuid import uuid4 @@ -16,13 +15,14 @@ from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient +from tests.e2e.utils.base import InfrastructureProvider from tests.e2e.utils.constants import CDK_OUT_PATH, PYTHON_RUNTIME_VERSION, SOURCE_CODE_ROOT_PATH from tests.e2e.utils.lambda_layer.powertools_layer import LocalLambdaPowertoolsLayer logger = logging.getLogger(__name__) -class BaseInfrastructure(ABC): +class BaseInfrastructure(InfrastructureProvider): RANDOM_STACK_VALUE: str = f"{uuid4()}" def __init__(self) -> None: @@ -103,6 +103,8 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict code=Code.from_asset(path=layer_build), ) + # NOTE: Agree on a convention if we need to support multi-file handlers + # as we're simply taking any file under `handlers/` to be a Lambda function. handlers = list(self._handlers_dir.rglob("*.py")) source = Code.from_asset(f"{self._handlers_dir}") logger.debug(f"Creating functions for handlers: {handlers}") @@ -222,7 +224,6 @@ def _create_temp_cdk_app(self): temp_file.chmod(0o755) return temp_file - @abstractmethod def create_resources(self) -> None: """Create any necessary CDK resources. It'll be called before deploy @@ -246,7 +247,7 @@ def created_resources(self): self.create_lambda_functions() ``` """ - ... + raise NotImplementedError() def add_cfn_output(self, name: str, value: str, arn: str = ""): """Create {Name} and optionally {Name}Arn CloudFormation Outputs. From ddf4a80c68f4e64ecf926c01ee2e207a03d5bccb Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 15:11:48 +0200 Subject: [PATCH 22/33] docs(maintainers): explain CDK CLI parallelization; reorder TOC --- MAINTAINERS.md | 184 +++++++++++++++++++++++++++++-------------------- 1 file changed, 109 insertions(+), 75 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 499007858c9..40c50e8a672 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -15,12 +15,6 @@ - [Releasing a new version](#releasing-a-new-version) - [Drafting release notes](#drafting-release-notes) - [Run end to end tests](#run-end-to-end-tests) - - [Structure](#structure) - - [Mechanics](#mechanics) - - [Authoring an E2E test](#authoring-an-e2e-test) - - [Internals](#internals) - - [Parallelization](#parallelization) - - [CDK safe parallelization](#cdk-safe-parallelization) - [Releasing a documentation hotfix](#releasing-a-documentation-hotfix) - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - [Manage Roadmap](#manage-roadmap) @@ -34,6 +28,13 @@ - [Is that a bug?](#is-that-a-bug) - [Mentoring contributions](#mentoring-contributions) - [Long running issues or PRs](#long-running-issues-or-prs) +- [E2E framework](#e2e-framework) + - [Structure](#structure) + - [Mechanics](#mechanics) + - [Authoring an E2E test](#authoring-an-e2e-test) + - [Internals](#internals) + - [Test runner parallelization](#test-runner-parallelization) + - [CDK CLI parallelization](#cdk-cli-parallelization) ## Overview @@ -222,7 +223,79 @@ E2E tests are run on every push to `develop` or manually via [run-e2e-tests work To run locally, you need [AWS CDK CLI](https://docs.aws.amazon.com/cdk/v2/guide/getting_started.html#getting_started_prerequisites) and an [account bootstrapped](https://docs.aws.amazon.com/cdk/v2/guide/bootstrapping.html) (`cdk bootstrap`). With a default AWS CLI profile configured, or `AWS_PROFILE` environment variable set, run `make e2e tests`. -#### Structure +### Releasing a documentation hotfix + +You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. + +This workflow will update both user guide and API documentation. + +### Maintain Overall Health of the Repo + +> TODO: Coordinate renaming `develop` to `main` + +Keep the `develop` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. + +### Manage Roadmap + +See [Roadmap section](https://awslabs.github.io/aws-lambda-powertools-python/latest/roadmap/) + +Ensure the repo highlights features that should be elevated to the project roadmap. Be clear about the feature’s status, priority, target version, and whether or not it should be elevated to the roadmap. + +### Add Continuous Integration Checks + +Add integration checks that validate pull requests and pushes to ease the burden on Pull Request reviewers. Continuously revisit areas of improvement to reduce operational burden in all parties involved. + +### Negative Impact on the Project + +Actions that negatively impact the project will be handled by the admins, in coordination with other maintainers, in balance with the urgency of the issue. Examples would be [Code of Conduct](CODE_OF_CONDUCT.md) violations, deliberate harmful or malicious actions, spam, monopolization, and security risks. + +### Becoming a maintainer + +In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. + +We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). + +## Common scenarios + +These are recurring ambiguous situations that new and existing maintainers may encounter. They serve as guidance. It is up to each maintainer to follow, adjust, or handle in a different manner as long as [our conduct is consistent](#uphold-code-of-conduct) + +### Contribution is stuck + +A contribution can get stuck often due to lack of bandwidth and language barrier. For bandwidth issues, check whether the author needs help. Make sure you get their permission before pushing code into their existing PR - do not create a new PR unless strictly necessary. + +For language barrier and others, offer a 1:1 chat to get them unblocked. Often times, English might not be their primary language, and writing in public might put them off, or come across not the way they intended to be. + +In other cases, you may have constrained capacity. Use `help wanted` label when you want to signal other maintainers and external contributors that you could use a hand to move it forward. + +### Insufficient feedback or information + +When in doubt, use `need-more-information` or `need-customer-feedback` labels to signal more context and feedback are necessary before proceeding. You can also use `revisit-in-3-months` label when you expect it might take a while to gather enough information before you can decide. + +### Crediting contributions + +We credit all contributions as part of each [release note](https://github.com/awslabs/aws-lambda-powertools-python/releases) as an automated process. If you find contributors are missing from the release note you're producing, please add them manually. + +### Is that a bug? + +A bug produces incorrect or unexpected results at runtime that differ from its intended behavior. Bugs must be reproducible. They directly affect customers experience at runtime despite following its recommended usage. + +Documentation snippets, use of internal components, or unadvertised functionalities are not considered bugs. + +### Mentoring contributions + +Always favor mentoring issue authors to contribute, unless they're not interested or the implementation is sensitive (_e.g., complexity, time to release, etc._). + +Make use of `help wanted` and `good first issue` to signal additional contributions the community can help. + +### Long running issues or PRs + +Try offering a 1:1 call in the attempt to get to a mutual understanding and clarify areas that maintainers could help. + +In the rare cases where both parties don't have the bandwidth or expertise to continue, it's best to use the `revisit-in-3-months` label. By then, see if it's possible to break the PR or issue in smaller chunks, and eventually close if there is no progress. + +## E2E framework + +### Structure Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization (see [Parallelization](#Parallelization)). @@ -270,7 +343,7 @@ You probably notice we have multiple `conftest.py`, `infrastructure.py`, and `ha - Feature-level `e2e//conftest` deploys stacks in parallel and make them independent of each other. - **`handlers/`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. -#### Mechanics +### Mechanics Under `BaseInfrastructure`, we hide the complexity of handling CDK parallel deployments, exposing CloudFormation Outputs, building Lambda Layer with the latest available code, and creating Lambda functions found in `handlers`. @@ -318,7 +391,7 @@ classDiagram BaseInfrastructure <|-- EventHandlerStack : inherit ``` -#### Authoring an E2E test +### Authoring an E2E test Imagine you're going to create E2E for Event Handler feature for the first time. @@ -430,9 +503,9 @@ def test_alb_headers_serializer(alb_basic_listener_endpoint): ... ``` -#### Internals +### Internals -##### Parallelization +#### Test runner parallelization We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease assessing side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: @@ -465,76 +538,37 @@ graph TD ResultCollection --> DeployEnd["Delete Stacks"] ``` -##### CDK safe parallelization - -Describe CDK App, Stack, synth, etc. - -### Releasing a documentation hotfix - -You can rebuild the latest documentation without a full release via this [GitHub Actions Workflow](https://github.com/awslabs/aws-lambda-powertools-python/actions/workflows/rebuild_latest_docs.yml). Choose `Run workflow`, keep `develop` as the branch, and input the latest Powertools version available. - -This workflow will update both user guide and API documentation. - -### Maintain Overall Health of the Repo +#### CDK CLI parallelization -> TODO: Coordinate renaming `develop` to `main` +For CDK CLI to work with [independent CDK Apps](https://docs.aws.amazon.com/cdk/v2/guide/apps.html), we specify an output directory when running `cdk synth -o cdk.out/` and then deploy from that said output directory with `cdk deploy --app cdk.out/`. -Keep the `develop` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. +We also create a typical CDK `app.py` at runtime with the information we know for a given feature when tests run on a per Python version basis. -### Manage Roadmap - -See [Roadmap section](https://awslabs.github.io/aws-lambda-powertools-python/latest/roadmap/) - -Ensure the repo highlights features that should be elevated to the project roadmap. Be clear about the feature’s status, priority, target version, and whether or not it should be elevated to the roadmap. +> Excerpt `cdk_app_V39.py` for Event Handler created at deploy phase -### Add Continuous Integration Checks - -Add integration checks that validate pull requests and pushes to ease the burden on Pull Request reviewers. Continuously revisit areas of improvement to reduce operational burden in all parties involved. - -### Negative Impact on the Project - -Actions that negatively impact the project will be handled by the admins, in coordination with other maintainers, in balance with the urgency of the issue. Examples would be [Code of Conduct](CODE_OF_CONDUCT.md) violations, deliberate harmful or malicious actions, spam, monopolization, and security risks. - -### Becoming a maintainer - -In 2023, we will revisit this. We need to improve our understanding of how other projects are doing, their mechanisms to promote key contributors, and how they interact daily. - -We suspect this process might look similar to the [OpenSearch project](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md#becoming-a-maintainer). - -## Common scenarios - -These are recurring ambiguous situations that new and existing maintainers may encounter. They serve as guidance. It is up to each maintainer to follow, adjust, or handle in a different manner as long as [our conduct is consistent](#uphold-code-of-conduct) - -### Contribution is stuck - -A contribution can get stuck often due to lack of bandwidth and language barrier. For bandwidth issues, check whether the author needs help. Make sure you get their permission before pushing code into their existing PR - do not create a new PR unless strictly necessary. - -For language barrier and others, offer a 1:1 chat to get them unblocked. Often times, English might not be their primary language, and writing in public might put them off, or come across not the way they intended to be. - -In other cases, you may have constrained capacity. Use `help wanted` label when you want to signal other maintainers and external contributors that you could use a hand to move it forward. - -### Insufficient feedback or information - -When in doubt, use `need-more-information` or `need-customer-feedback` labels to signal more context and feedback are necessary before proceeding. You can also use `revisit-in-3-months` label when you expect it might take a while to gather enough information before you can decide. - -### Crediting contributions - -We credit all contributions as part of each [release note](https://github.com/awslabs/aws-lambda-powertools-python/releases) as an automated process. If you find contributors are missing from the release note you're producing, please add them manually. - -### Is that a bug? - -A bug produces incorrect or unexpected results at runtime that differ from its intended behavior. Bugs must be reproducible. They directly affect customers experience at runtime despite following its recommended usage. - -Documentation snippets, use of internal components, or unadvertised functionalities are not considered bugs. - -### Mentoring contributions +```python +from tests.e2e.event_handler.infrastructure import EventHandlerStack +stack = EventHandlerStack() +stack.create_resources() +stack.app.synth() +``` -Always favor mentoring issue authors to contribute, unless they're not interested or the implementation is sensitive (_e.g., complexity, time to release, etc._). +When E2E tests are run for a single feature or all of them, `cdk.out` looks like the following: -Make use of `help wanted` and `good first issue` to signal additional contributions the community can help. +```shell +total 8 +drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 event-handler +drwxr-xr-x 3 lessa staff 96B Sep 6 15:08 layer_build +-rw-r--r-- 1 lessa staff 32B Sep 6 15:08 layer_build.diff +drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 logger +drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 metrics +drwxr-xr-x 22 lessa staff 704B Sep 9 10:52 tracer +``` -### Long running issues or PRs +Where: -Try offering a 1:1 call in the attempt to get to a mutual understanding and clarify areas that maintainers could help. +- `` has CDK Assets, CDK `manifest.json`, our `cdk_app_.py` and `stack_outputs.json` +- `layer_build` contains our Lambda Layer source code built once, used by all stacks independently +- `layer_build.diff` contains a hash on whether our source code has changed to speed up further deployments and E2E tests -In the rare cases where both parties don't have the bandwidth or expertise to continue, it's best to use the `revisit-in-3-months` label. By then, see if it's possible to break the PR or issue in smaller chunks, and eventually close if there is no progress. +Together, all of this allows us to use Pytest like we would for any project, use CDK CLI and its [context methods](https://docs.aws.amazon.com/cdk/v2/guide/context.html#context_methods) (`from_lookup`), and use step-through debugging for a single E2E test without any extra configuration. From 184d74345c0115aa88f0b026aa49b2a3b4229997 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 15:14:41 +0200 Subject: [PATCH 23/33] docs(maintainers): fix internal link from E2E structure --- MAINTAINERS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 40c50e8a672..43f1fe2f9dd 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -297,7 +297,7 @@ In the rare cases where both parties don't have the bandwidth or expertise to co ### Structure -Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization (see [Parallelization](#Parallelization)). +Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization), and [CDK CLI Parallelization](#cdk-cli-parallelization). **tests/e2e structure** From ab612ed50b774fda2bb9bf1cde2ecb73cffa318c Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 15:16:27 +0200 Subject: [PATCH 24/33] docs(maintainers): fix typos --- MAINTAINERS.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 43f1fe2f9dd..a447ca9ec9e 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -297,7 +297,7 @@ In the rare cases where both parties don't have the bandwidth or expertise to co ### Structure -Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization), and [CDK CLI Parallelization](#cdk-cli-parallelization). +Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization) and [CDK CLI Parallelization](#cdk-cli-parallelization). **tests/e2e structure** @@ -335,17 +335,17 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes ├── infrastructure.py # base infrastructure like deploy logic, etc. ``` -You probably notice we have multiple `conftest.py`, `infrastructure.py`, and `handlers` directory. +You probably noticed we have multiple `conftest.py`, `infrastructure.py`, and `handlers` directory. - **`infrastructure.py`**. Uses CDK to define the infrastructure a given feature needs. -- **`conftest.py`**. Handles deployment and deletion a given feature Infrastructure. Hierarchy matters +- **`conftest.py`**. Handles deployment and deletion a given feature Infrastructure. Hierarchy matters: - Top-level `e2e/conftest` deploys stacks only once and blocks I/O across all CPUs. - Feature-level `e2e//conftest` deploys stacks in parallel and make them independent of each other. -- **`handlers/`**. Lambda function handlers that will be automatically deployed and exported as PascalCase for later use. +- **`handlers/`**. Lambda function handlers that will be automatically deployed and exported in PascalCase (e.g., `BasicHandler`) for later use. ### Mechanics -Under `BaseInfrastructure`, we hide the complexity of handling CDK parallel deployments, exposing CloudFormation Outputs, building Lambda Layer with the latest available code, and creating Lambda functions found in `handlers`. +Under [`BaseInfrastructure`](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of handling CDK parallel deployments, exposing CloudFormation Outputs, building Lambda Layer with the latest available code, and creating Lambda functions found in `handlers`. This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run a subset of tests and only deploy their related infrastructure, without any custom configuration. @@ -358,14 +358,14 @@ classDiagram +deploy() Dict +delete() +create_resources() - +create_lambda_functions(function_props: Dict) + +create_lambda_functions() Dict~Functions~ } class BaseInfrastructure { +deploy() Dict +delete() - +create_lambda_functions(function_props: Dict) Dict~Functions~ - +add_cfn_output(name: str, value: str, arn: Optional[str]) + +create_lambda_functions() Dict~Functions~ + +add_cfn_output() } class TracerStack { From c07e315a54205dae079ed611cdc973b4e4115795 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 15:23:33 +0200 Subject: [PATCH 25/33] docs(maintainers): clean up structure section --- MAINTAINERS.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index a447ca9ec9e..173bccd6994 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -335,13 +335,14 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes ├── infrastructure.py # base infrastructure like deploy logic, etc. ``` -You probably noticed we have multiple `conftest.py`, `infrastructure.py`, and `handlers` directory. +Where: -- **`infrastructure.py`**. Uses CDK to define the infrastructure a given feature needs. +- **`/infrastructure.py`**. Uses CDK to define the infrastructure a given feature needs. +- **`/handlers/`**. Lambda function handlers that will be automatically deployed and exported in PascalCase (e.g., `BasicHandler`) for later use. +- **`util/>`**. Test utilities to build data and fetch AWS data to ease assertion - **`conftest.py`**. Handles deployment and deletion a given feature Infrastructure. Hierarchy matters: - Top-level `e2e/conftest` deploys stacks only once and blocks I/O across all CPUs. - Feature-level `e2e//conftest` deploys stacks in parallel and make them independent of each other. -- **`handlers/`**. Lambda function handlers that will be automatically deployed and exported in PascalCase (e.g., `BasicHandler`) for later use. ### Mechanics From a800f8135ac37d990379294fa22d66d54711dbdb Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 15:29:20 +0200 Subject: [PATCH 26/33] docs(maintainers): typos --- MAINTAINERS.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 173bccd6994..add13deeb0c 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -31,7 +31,7 @@ - [E2E framework](#e2e-framework) - [Structure](#structure) - [Mechanics](#mechanics) - - [Authoring an E2E test](#authoring-an-e2e-test) + - [Authoring a new feature E2E test](#authoring-a-new-feature-e2e-test) - [Internals](#internals) - [Test runner parallelization](#test-runner-parallelization) - [CDK CLI parallelization](#cdk-cli-parallelization) @@ -346,7 +346,7 @@ Where: ### Mechanics -Under [`BaseInfrastructure`](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of handling CDK parallel deployments, exposing CloudFormation Outputs, building Lambda Layer with the latest available code, and creating Lambda functions found in `handlers`. +Under [`BaseInfrastructure`](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of deployment/delete coordination under `deploy`, `delete`, and `create_lambda_functions` methods. This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run a subset of tests and only deploy their related infrastructure, without any custom configuration. @@ -392,7 +392,7 @@ classDiagram BaseInfrastructure <|-- EventHandlerStack : inherit ``` -### Authoring an E2E test +### Authoring a new feature E2E test Imagine you're going to create E2E for Event Handler feature for the first time. @@ -508,7 +508,7 @@ def test_alb_headers_serializer(alb_basic_listener_endpoint): #### Test runner parallelization -We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease assessing side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: +We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease asserting side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: ```mermaid graph TD From b4d7b943c60b796763bf568148af91d65aa5f763 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 16:57:31 +0200 Subject: [PATCH 27/33] docs(maintainers): line editing, additional visuals to make it clearer --- MAINTAINERS.md | 101 ++++++++++++++++++++++++++++++++++++------------- 1 file changed, 74 insertions(+), 27 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index add13deeb0c..819fa298aa4 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -32,6 +32,9 @@ - [Structure](#structure) - [Mechanics](#mechanics) - [Authoring a new feature E2E test](#authoring-a-new-feature-e2e-test) + - [1. Define infrastructure](#1-define-infrastructure) + - [2. Deploy/Delete infrastructure when tests run](#2-deploydelete-infrastructure-when-tests-run) + - [3. Access stack outputs for E2E tests](#3-access-stack-outputs-for-e2e-tests) - [Internals](#internals) - [Test runner parallelization](#test-runner-parallelization) - [CDK CLI parallelization](#cdk-cli-parallelization) @@ -297,7 +300,7 @@ In the rare cases where both parties don't have the bandwidth or expertise to co ### Structure -Our E2E framework relies on pytest fixtures to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization) and [CDK CLI Parallelization](#cdk-cli-parallelization). +Our E2E framework relies on [Pytest fixtures](https://docs.pytest.org/en/6.2.x/fixture.html) to coordinate infrastructure and test parallelization - see [Test Parallelization](#test-runner-parallelization) and [CDK CLI Parallelization](#cdk-cli-parallelization). **tests/e2e structure** @@ -338,17 +341,17 @@ Our E2E framework relies on pytest fixtures to coordinate infrastructure and tes Where: - **`/infrastructure.py`**. Uses CDK to define the infrastructure a given feature needs. -- **`/handlers/`**. Lambda function handlers that will be automatically deployed and exported in PascalCase (e.g., `BasicHandler`) for later use. -- **`util/>`**. Test utilities to build data and fetch AWS data to ease assertion -- **`conftest.py`**. Handles deployment and deletion a given feature Infrastructure. Hierarchy matters: - - Top-level `e2e/conftest` deploys stacks only once and blocks I/O across all CPUs. - - Feature-level `e2e//conftest` deploys stacks in parallel and make them independent of each other. +- **`/handlers/`**. Lambda function handlers to build, deploy, and exposed as stack output in PascalCase (e.g., `BasicHandler`). +- **`utils/`**. Test utilities to build data and fetch AWS data to ease assertion +- **`conftest.py`**. Deploys and deletes a given feature infrastructure. Hierarchy matters: + - **Top-level (`e2e/conftest`)**. Builds Lambda Layer only once and blocks I/O across all CPU workers. + - **Feature-level (`e2e//conftest`)**. Deploys stacks in parallel and make them independent of each other. ### Mechanics -Under [`BaseInfrastructure`](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of deployment/delete coordination under `deploy`, `delete`, and `create_lambda_functions` methods. +Under [`BaseInfrastructure`](https://github.com/awslabs/aws-lambda-powertools-python/blob/develop/tests/e2e/utils/infrastructure.py), we hide the complexity of deployment and delete coordination under `deploy`, `delete`, and `create_lambda_functions` methods. -This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run a subset of tests and only deploy their related infrastructure, without any custom configuration. +This allows us to benefit from test and deployment parallelization, use IDE step-through debugging for a single test, run one, subset, or all tests and only deploy their related infrastructure, without any custom configuration. > Class diagram to understand abstraction built when defining a new stack (`LoggerStack`) @@ -394,20 +397,23 @@ classDiagram ### Authoring a new feature E2E test -Imagine you're going to create E2E for Event Handler feature for the first time. +Imagine you're going to create E2E for Event Handler feature for the first time. Keep the following mental model when reading: -As a mental model, you'll need to: **(1)** Define infrastructure, **(2)** Deploy/Delete infrastructure when tests run, and **(3)** Expose resources for E2E tests. +```mermaid +graph LR + A["1. Define infrastructure"]-->B["2. Deploy/Delete infrastructure"]-->C["3.Access Stack outputs" ] +``` -**Define infrastructure** +#### 1. Define infrastructure -We use CDK as our Infrastructure as Code tool of choice. Before you start using CDK, you need to take the following steps: +We use CDK as our Infrastructure as Code tool of choice. Before you start using CDK, you'd take the following steps: 1. Create `tests/e2e/event_handler/infrastructure.py` file 2. Create a new class `EventHandlerStack` and inherit from `BaseInfrastructure` 3. Override `create_resources` method and define your infrastructure using CDK 4. (Optional) Create a Lambda function under `handlers/alb_handler.py` -> Excerpt `infrastructure.py` for Event Handler +> Excerpt `tests/e2e/event_handler/infrastructure.py` ```python class EventHandlerStack(BaseInfrastructure): @@ -430,7 +436,7 @@ class EventHandlerStack(BaseInfrastructure): ... ``` -> Excerpt `alb_handler.py` for Event Handler +> Excerpt `tests/e2e/event_handler/handlers/alb_handler.py` ```python from aws_lambda_powertools.event_handler import ALBResolver, Response, content_types @@ -453,11 +459,11 @@ def lambda_handler(event, context): return app.resolve(event, context) ``` -**Deploy/Delete infrastructure when tests run** +#### 2. Deploy/Delete infrastructure when tests run -We need to instruct Pytest to deploy our infrastructure when our tests start, and delete it when they complete (successfully or not). +We need to create a Pytest fixture for our new feature under `tests/e2e/event_handler/conftest.py`. -For this, we create a `test/e2e/event_handler/conftest.py` and create fixture scoped to our test module. This will remain static and will not need any further modification in the future. +This will instruct Pytest to deploy our infrastructure when our tests start, and delete it when they complete whether tests are successful or not. Note that this file will not need any modification in the future. > Excerpt `conftest.py` for Event Handler @@ -484,11 +490,13 @@ def infrastructure(): ``` -**Expose resources for E2E tests** +#### 3. Access stack outputs for E2E tests + +Within our tests, we should now have access to the `infrastructure` fixture we defined earlier in `tests/e2e/event_handler/conftest.py`. -Within our tests, we should now have access to the `infrastructure` fixture we defined. We can access any Stack Output using pytest dependency injection. +We can access any Stack Output using pytest dependency injection. -> Excerpt `test_header_serializer.py` for Event Handler +> Excerpt `tests/e2e/event_handler/test_header_serializer.py` ```python @pytest.fixture @@ -508,7 +516,9 @@ def test_alb_headers_serializer(alb_basic_listener_endpoint): #### Test runner parallelization -We parallelize our end-to-end tests to benefit from speed and isolate Lambda functions to ease asserting side effects (e.g., traces, logs, etc.). The following diagram demonstrates the process we take every time you use `make e2e`: +Besides speed, we parallelize our end-to-end tests to ease asserting async side-effects may take a while per test too, _e.g., traces to become available_. + +The following diagram demonstrates the process we take every time you use `make e2e` locally or at CI: ```mermaid graph TD @@ -541,9 +551,20 @@ graph TD #### CDK CLI parallelization -For CDK CLI to work with [independent CDK Apps](https://docs.aws.amazon.com/cdk/v2/guide/apps.html), we specify an output directory when running `cdk synth -o cdk.out/` and then deploy from that said output directory with `cdk deploy --app cdk.out/`. +For CDK CLI to work with [independent CDK Apps](https://docs.aws.amazon.com/cdk/v2/guide/apps.html), we specify an output directory when synthesizing our stack and deploy from said output directory. + +```mermaid +flowchart TD + subgraph "Deploying distinct CDK Apps" + EventHandlerInfra["Event Handler CDK App"] --> EventHandlerSynth + TracerInfra["Tracer CDK App"] --> TracerSynth + EventHandlerSynth["cdk synth --out cdk.out/event_handler"] --> EventHandlerDeploy["cdk deploy --app cdk.out/event_handler"] + + TracerSynth["cdk synth --out cdk.out/tracer"] --> TracerDeploy["cdk deploy --app cdk.out/tracer"] + end +``` -We also create a typical CDK `app.py` at runtime with the information we know for a given feature when tests run on a per Python version basis. +We create the typical CDK `app.py` at runtime when tests run, since we know which feature and Python version we're dealing with (locally or at CI). > Excerpt `cdk_app_V39.py` for Event Handler created at deploy phase @@ -554,7 +575,7 @@ stack.create_resources() stack.app.synth() ``` -When E2E tests are run for a single feature or all of them, `cdk.out` looks like the following: +When we run E2E tests for a single feature or all of them, our `cdk.out` looks like this: ```shell total 8 @@ -566,10 +587,36 @@ drwxr-xr-x 18 lessa staff 576B Sep 6 15:38 metrics drwxr-xr-x 22 lessa staff 704B Sep 9 10:52 tracer ``` +```mermaid +classDiagram + class CdkOutDirectory { + feature_name/ + layer_build/ + layer_build.diff + } + + class EventHandler { + manifest.json + stack_outputs.json + cdk_app_V39.py + asset.uuid/ + ... + } + + class StackOutputsJson { + BasicHandlerArn: str + ALBDnsName: str + ... + } + + CdkOutDirectory <|-- EventHandler : feature_name/ + StackOutputsJson <|-- EventHandler +``` + Where: -- `` has CDK Assets, CDK `manifest.json`, our `cdk_app_.py` and `stack_outputs.json` -- `layer_build` contains our Lambda Layer source code built once, used by all stacks independently -- `layer_build.diff` contains a hash on whether our source code has changed to speed up further deployments and E2E tests +- **``**. Contains CDK Assets, CDK `manifest.json`, our `cdk_app_.py` and `stack_outputs.json` +- **`layer_build`**. Contains our Lambda Layer source code built once, used by all stacks independently +- **`layer_build.diff`**. Contains a hash on whether our source code has changed to speed up further deployments and E2E tests Together, all of this allows us to use Pytest like we would for any project, use CDK CLI and its [context methods](https://docs.aws.amazon.com/cdk/v2/guide/context.html#context_methods) (`from_lookup`), and use step-through debugging for a single E2E test without any extra configuration. From 83e03ccaf1acda680fdf877c5045700ce9f850d8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 17:04:17 +0200 Subject: [PATCH 28/33] docs(maintainers): add note on VSCode subprocess debug --- MAINTAINERS.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 819fa298aa4..260f6628aa3 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -620,3 +620,5 @@ Where: - **`layer_build.diff`**. Contains a hash on whether our source code has changed to speed up further deployments and E2E tests Together, all of this allows us to use Pytest like we would for any project, use CDK CLI and its [context methods](https://docs.aws.amazon.com/cdk/v2/guide/context.html#context_methods) (`from_lookup`), and use step-through debugging for a single E2E test without any extra configuration. + +> NOTE: VSCode doesn't support debugging processes spawning sub-processes (like CDK CLI does w/ shell and CDK App). Maybe [this works](https://stackoverflow.com/a/65339352). PyCharm works just fine. From e845eb16473143aba6cda943478c1a3cf7911676 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 20 Sep 2022 09:52:34 +0200 Subject: [PATCH 29/33] chore: sync pyproject with pypi --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 79c7738ea94..484545f6d18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.26.6" +version = "1.29.2" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] From e8343145280ad5cd6e47bc898d745bfbbafdc184 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 9 Sep 2022 18:30:15 +0200 Subject: [PATCH 30/33] chore(deps): old mike 0.6 was yanked (vuln.), move to 1.x --- poetry.lock | 66 +++++++++++++++++++++----------------------------- pyproject.toml | 2 +- 2 files changed, 29 insertions(+), 39 deletions(-) diff --git a/poetry.lock b/poetry.lock index b271f4741c5..724530c4902 100644 --- a/poetry.lock +++ b/poetry.lock @@ -644,7 +644,7 @@ python-versions = ">=3.6" [[package]] name = "mike" -version = "0.6.0" +version = "1.1.2" description = "Manage multiple versions of your MkDocs-powered documentation" category = "dev" optional = false @@ -653,12 +653,12 @@ python-versions = "*" [package.dependencies] jinja2 = "*" mkdocs = ">=1.0" -packaging = "*" -"ruamel.yaml" = "*" +pyyaml = ">=5.1" +verspec = "*" [package.extras] -dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] -test = ["coverage", "flake8 (>=3.0)"] +dev = ["coverage", "flake8 (>=3.0)", "shtab"] +test = ["coverage", "flake8 (>=3.0)", "shtab"] [[package]] name = "mkdocs" @@ -698,7 +698,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.5.2" +version = "8.5.3" description = "Documentation that simply works" category = "dev" optional = false @@ -1204,29 +1204,6 @@ python-versions = "*" decorator = ">=3.4.2" py = ">=1.4.26,<2.0.0" -[[package]] -name = "ruamel.yaml" -version = "0.17.21" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" -optional = false -python-versions = ">=3" - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""} - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel.yaml.clib" -version = "0.2.6" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "dev" -optional = false -python-versions = ">=3.5" - [[package]] name = "s3transfer" version = "0.6.0" @@ -1337,6 +1314,17 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "verspec" +version = "0.1.0" +description = "Flexible version handling" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] + [[package]] name = "watchdog" version = "2.1.9" @@ -1387,7 +1375,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.7.4" -content-hash = "01292aa33467d3c56cdd663661a20e155cfabb213950d250c331ecf25e906cf1" +content-hash = "03d9159363c50e6138020a6d20bafc2d5e29785fdf54e9475a61ae01327e10e2" [metadata.files] attrs = [ @@ -1660,15 +1648,18 @@ markupsafe = [ ] mccabe = [] mergedeep = [] -mike = [] +mike = [ + {file = "mike-1.1.2-py3-none-any.whl", hash = "sha256:4c307c28769834d78df10f834f57f810f04ca27d248f80a75f49c6fa2d1527ca"}, + {file = "mike-1.1.2.tar.gz", hash = "sha256:56c3f1794c2d0b5fdccfa9b9487beb013ca813de2e3ad0744724e9d34d40b77b"}, +] mkdocs = [ {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, ] mkdocs-git-revision-date-plugin = [] mkdocs-material = [ - {file = "mkdocs-material-8.5.2.tar.gz", hash = "sha256:16ca1304a93b085e5dfb0dbcc681b74dad1587d8ba727c89c8fd4259dd8fe004"}, - {file = "mkdocs_material-8.5.2-py2.py3-none-any.whl", hash = "sha256:1962099d8c6eb7571896a0e7fdc52ff4fda1e906969d0e42ae3537418e807868"}, + {file = "mkdocs_material-8.5.3-py3-none-any.whl", hash = "sha256:d194c38041d1e83560221022b3f85eec4604b35e44f5c3a488c24b88542074ed"}, + {file = "mkdocs_material-8.5.3.tar.gz", hash = "sha256:43b0aa707d6f9acd836024cab2dce9330957c94a4e1e41c23ee6c8ce67b4d8c5"}, ] mkdocs-material-extensions = [] mypy = [] @@ -1886,11 +1877,6 @@ pyyaml-env-tag = [] radon = [] requests = [] retry = [] -"ruamel.yaml" = [ - {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, - {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, -] -"ruamel.yaml.clib" = [] s3transfer = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, @@ -1951,6 +1937,10 @@ urllib3 = [ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] +verspec = [ + {file = "verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31"}, + {file = "verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e"}, +] watchdog = [ {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"}, {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b17d302850c8d412784d9246cfe8d7e3af6bcd45f958abb2d08a6f8bedf695d"}, diff --git a/pyproject.toml b/pyproject.toml index 484545f6d18..418d5cc4ee9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ xenon = "^0.9.0" flake8-eradicate = "^1.2.1" flake8-bugbear = "^22.8.23" mkdocs-git-revision-date-plugin = "^0.3.2" -mike = "^0.6.0" +mike = "^1.1.2" mypy = "^0.971" retry = "^0.9.2" pytest-xdist = "^2.5.0" From c5de8556b026f85b59b4808dd2f4ab638ed33900 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=BAben=20Fonseca?= Date: Tue, 20 Sep 2022 14:22:07 +0200 Subject: [PATCH 31/33] fix(e2e): fix tests --- tests/e2e/tracer/test_tracer.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index 04d5b3dcba0..e56b70898e1 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -66,6 +66,7 @@ def test_lambda_handler_trace_is_visible(basic_handler_fn_arn: str, basic_handle def test_lambda_handler_trace_multiple_functions_same_name(same_function_name_arn: str, same_function_name_fn: str): # GIVEN + service = data_builder.build_service_name() method_name_todos = "same_function_name.Todos.get_all" method_subsegment_todos = f"## {method_name_todos}" method_metadata_key_todos = f"{method_name_todos} response" @@ -77,13 +78,14 @@ def test_lambda_handler_trace_multiple_functions_same_name(same_function_name_ar trace_query = data_builder.build_trace_default_query(function_name=same_function_name_fn) # WHEN - _, execution_time = data_fetcher.get_lambda_response(lambda_arn=same_function_name_arn) + event = json.dumps({"service": service}) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=same_function_name_arn, payload=event) # THEN trace = data_fetcher.get_traces(start_date=execution_time, filter_expression=trace_query) - assert len(trace.get_metadata(key=method_metadata_key_todos, namespace=TracerStack.SERVICE_NAME)) == 1 - assert len(trace.get_metadata(key=method_metadata_key_comments, namespace=TracerStack.SERVICE_NAME)) == 1 + assert len(trace.get_metadata(key=method_metadata_key_todos, namespace=service)) == 1 + assert len(trace.get_metadata(key=method_metadata_key_comments, namespace=service)) == 1 assert len(trace.get_subsegment(name=method_subsegment_todos)) == 1 assert len(trace.get_subsegment(name=method_subsegment_comments)) == 1 @@ -91,7 +93,7 @@ def test_lambda_handler_trace_multiple_functions_same_name(same_function_name_ar def test_async_trace_is_visible(async_fn_arn: str, async_fn: str): # GIVEN service = data_builder.build_service_name() - async_fn_name = f"async_capture.{async_capture.async_get_users.__name__}" + async_fn_name = async_capture.async_get_users.__name__ async_fn_name_subsegment = f"## {async_fn_name}" async_fn_name_metadata_key = f"{async_fn_name} response" From be6dc2eef106a0ee73f6d515aada75120200fb2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=BAben=20Fonseca?= Date: Tue, 20 Sep 2022 14:52:03 +0200 Subject: [PATCH 32/33] fix(tests): fix tracer e2e tests --- tests/e2e/tracer/handlers/same_function_name.py | 2 ++ tests/e2e/tracer/test_tracer.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/e2e/tracer/handlers/same_function_name.py b/tests/e2e/tracer/handlers/same_function_name.py index 78ef99d42fa..240e3329bc8 100644 --- a/tests/e2e/tracer/handlers/same_function_name.py +++ b/tests/e2e/tracer/handlers/same_function_name.py @@ -26,6 +26,8 @@ def get_all(self): def lambda_handler(event: dict, context: LambdaContext): + # Maintenance: create a public method to set these explicitly + tracer.service = event["service"] todos = Todos() comments = Comments() diff --git a/tests/e2e/tracer/test_tracer.py b/tests/e2e/tracer/test_tracer.py index e56b70898e1..e2abc5af6bc 100644 --- a/tests/e2e/tracer/test_tracer.py +++ b/tests/e2e/tracer/test_tracer.py @@ -93,7 +93,7 @@ def test_lambda_handler_trace_multiple_functions_same_name(same_function_name_ar def test_async_trace_is_visible(async_fn_arn: str, async_fn: str): # GIVEN service = data_builder.build_service_name() - async_fn_name = async_capture.async_get_users.__name__ + async_fn_name = f"async_capture.{async_capture.async_get_users.__name__}" async_fn_name_subsegment = f"## {async_fn_name}" async_fn_name_metadata_key = f"{async_fn_name} response" From 7f62b2948d179d5038f9067e78148daf23ea2e88 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 20 Sep 2022 17:07:44 +0200 Subject: [PATCH 33/33] chore(gitignore): remove cdk context json file --- .gitignore | 2 ++ cdk.context.json | 41 ----------------------------------------- 2 files changed, 2 insertions(+), 41 deletions(-) delete mode 100644 cdk.context.json diff --git a/.gitignore b/.gitignore index 8947c546ec2..a69b4eaf618 100644 --- a/.gitignore +++ b/.gitignore @@ -312,3 +312,5 @@ site/ examples/**/sam/.aws-sam cdk.out +# NOTE: different accounts will be used for E2E thus creating unnecessary git clutter +cdk.context.json diff --git a/cdk.context.json b/cdk.context.json deleted file mode 100644 index 183318d1ae4..00000000000 --- a/cdk.context.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "acknowledged-issue-numbers": [ - 19836 - ], - "vpc-provider:account=231436140809:filter.isDefault=true:region=eu-west-1:returnAsymmetricSubnets=true": { - "vpcId": "vpc-4d79432b", - "vpcCidrBlock": "172.31.0.0/16", - "availabilityZones": [], - "subnetGroups": [ - { - "name": "Public", - "type": "Public", - "subnets": [ - { - "subnetId": "subnet-59e15003", - "cidr": "172.31.32.0/20", - "availabilityZone": "eu-west-1a", - "routeTableId": "rtb-cc0a27b5" - }, - { - "subnetId": "subnet-c99febaf", - "cidr": "172.31.0.0/20", - "availabilityZone": "eu-west-1b", - "routeTableId": "rtb-cc0a27b5" - }, - { - "subnetId": "subnet-d543049d", - "cidr": "172.31.16.0/20", - "availabilityZone": "eu-west-1c", - "routeTableId": "rtb-cc0a27b5" - } - ] - } - ] - }, - "availability-zones:account=231436140809:region=eu-west-1": [ - "eu-west-1a", - "eu-west-1b", - "eu-west-1c" - ] -}