diff --git a/Makefile b/Makefile index 73667eb5f58..709453ca08d 100644 --- a/Makefile +++ b/Makefile @@ -90,3 +90,23 @@ changelog: mypy: poetry run mypy --pretty aws_lambda_powertools + +format-examples: + poetry run isort docs/shared + poetry run black docs/shared/*.py + poetry run isort docs/examples + poetry run black docs/examples/*/*.py + poetry run black docs/examples/*/*/*.py + poetry run black docs/examples/*/*/*/*.py + poetry run black docs/examples/*/*/*/*/*.py + +lint-examples: + poetry run python3 -m py_compile docs/shared/*.py + poetry run python3 -m py_compile docs/examples/*/*.py + poetry run python3 -m py_compile docs/examples/*/*/*.py + poetry run python3 -m py_compile docs/examples/*/*/*/*.py + poetry run python3 -m py_compile docs/examples/*/*/*/*/*.py + cfn-lint docs/examples/*/*.yml + cfn-lint docs/examples/*/*/*.yml + cfn-lint docs/examples/*/*/*/*.yml + cfn-lint docs/examples/*/*/*/*/*.yml diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 4f86dc8fdf3..cad0429cf93 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -23,44 +23,7 @@ You must have an existing [API Gateway Proxy integration](https://docs.aws.amazo This is the sample infrastructure for API Gateway we are using for the examples in this documentation. ```yaml title="AWS Serverless Application Model (SAM) example" -AWSTemplateFormatVersion: '2010-09-09' -Transform: AWS::Serverless-2016-10-31 -Description: Hello world event handler API Gateway - -Globals: - Api: - TracingEnabled: true - Cors: # see CORS section - AllowOrigin: "'https://example.com'" - AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" - MaxAge: "'300'" - BinaryMediaTypes: # see Binary responses section - - '*~1*' # converts to */* for any binary type - Function: - Timeout: 5 - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: my_api-service - -Resources: - ApiFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: api_handler/ - Description: API handler function - Events: - ApiEvent: - Type: Api - Properties: - Path: /{proxy+} # Send requests on any path to the lambda function - Method: ANY # Send requests using any http method to the lambda function +--8<-- "docs/examples/core/event_handler/api_gateway/template.yml" ``` ### Event Resolvers @@ -85,25 +48,8 @@ Here's an example on how we can handle the `/hello` path. === "app.py" - ```python hl_lines="3 7 9 12 18" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("/hello") - @tracer.capture_method - def get_hello_universe(): - return {"message": "hello universe"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="2 7 10 13 20" + --8<-- "docs/examples/core/event_handler/api_gateway/app_rest_api.py" ``` === "hello_event.json" @@ -196,50 +142,16 @@ When using Amazon API Gateway HTTP API to front your Lambda functions, you can u Here's an example on how we can handle the `/hello` path. -```python hl_lines="3 7" title="Using HTTP API resolver" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import APIGatewayHttpResolver - -tracer = Tracer() -logger = Logger() -app = APIGatewayHttpResolver() - -@app.get("/hello") -@tracer.capture_method -def get_hello_universe(): - return {"message": "hello universe"} - -# You can continue to use other utilities just as before -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="2 7 20" title="Using HTTP API resolver" +--8<-- "docs/examples/core/event_handler/api_gateway/app_http_api.py" ``` #### Application Load Balancer When using Amazon Application Load Balancer to front your Lambda functions, you can use `ALBResolver`. -```python hl_lines="3 7" title="Using ALB resolver" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import ALBResolver - -tracer = Tracer() -logger = Logger() -app = ALBResolver() - -@app.get("/hello") -@tracer.capture_method -def get_hello_universe(): - return {"message": "hello universe"} - -# You can continue to use other utilities just as before -@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="2 7 20" title="Using ALB resolver" +--8<-- "docs/examples/core/event_handler/api_gateway/app_alb.py" ``` ### Dynamic routes @@ -248,25 +160,8 @@ You can use `/path/{dynamic_value}` when configuring dynamic URL paths. This all === "app.py" - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("/hello/") - @tracer.capture_method - def get_hello_you(name): - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="10 12" + --8<-- "docs/examples/core/event_handler/api_gateway/app_dynamic_routes.py" ``` === "sample_request.json" @@ -286,25 +181,8 @@ You can also nest paths as configured earlier in [our sample infrastructure](#re === "app.py" - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - @app.get("//") - @tracer.capture_method - def get_message(message, name): - return {"message": f"{message}, {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="10 12" + --8<-- "docs/examples/core/event_handler/api_gateway/app_nested_routes.py" ``` === "sample_request.json" @@ -332,17 +210,8 @@ You can also combine nested paths with greedy regex to catch in between routes. === "app.py" - ```python hl_lines="5" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - @app.get(".+") - def catch_any_route_after_any(): - return {"path_received": app.current_event.path} - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="6" + --8<-- "docs/examples/core/event_handler/api_gateway/app_catch_all_routes.py" ``` === "sample_request.json" @@ -361,27 +230,8 @@ You can use named decorators to specify the HTTP method that should be handled i === "app.py" - ```python hl_lines="9-10" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - # Only POST HTTP requests to the path /hello will route to this function - @app.post("/hello") - @tracer.capture_method - def get_hello_you(): - name = app.current_event.json_body.get("name") - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="10-11" + --8<-- "docs/examples/core/event_handler/api_gateway/app_http_methods.py" ``` === "sample_request.json" @@ -400,27 +250,8 @@ HTTP methods. === "app.py" - ```python hl_lines="9-10" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - # PUT and POST HTTP requests to the path /hello will route to this function - @app.route("/hello", method=["PUT", "POST"]) - @tracer.capture_method - def get_hello_you(): - name = app.current_event.json_body.get("name") - return {"message": f"hello {name}"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="10-11" + --8<-- "docs/examples/core/event_handler/api_gateway/app_multi_http_methods.py" ``` === "sample_request.json" @@ -449,124 +280,34 @@ Within `app.current_event` property, you can access query strings as dictionary You can access the raw payload via `body` property, or if it's a JSON string you can quickly deserialize it via `json_body` property. -```python hl_lines="7-9 11" title="Accessing query strings, JSON payload, and raw payload" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver() - -@app.get("/hello") -def get_hello_you(): - query_strings_as_dict = app.current_event.query_string_parameters - json_payload = app.current_event.json_body - payload = app.current_event.body - - name = app.current_event.get_query_string_value(name="name", default_value="") - return {"message": f"hello {name}"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="8-10 12" title="Accessing query strings, JSON payload, and raw payload" +--8<-- "docs/examples/core/event_handler/api_gateway/app_query_string.py" ``` #### Headers Similarly to [Query strings](#query-strings-and-payload), you can access headers as dictionary via `app.current_event.headers`, or by name via `get_header_value`. -```python hl_lines="7-8" title="Accessing HTTP Headers" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver() - -@app.get("/hello") -def get_hello_you(): - headers_as_dict = app.current_event.headers - name = app.current_event.get_header_value(name="X-Name", default_value="") - - return {"message": f"hello {name}"} - -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="8-9" title="Accessing HTTP Headers" +--8<-- "docs/examples/core/event_handler/api_gateway/app_headers.py" ``` - ### Handling not found routes By default, we return `404` for any unmatched route. You can use **`not_found`** decorator to override this behaviour, and return a custom **`Response`**. -```python hl_lines="11 13 16" title="Handling not found" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response -from aws_lambda_powertools.event_handler.exceptions import NotFoundError - -tracer = Tracer() -logger = Logger() -app = APIGatewayRestResolver() - -@app.not_found -@tracer.capture_method -def handle_not_found_errors(exc: NotFoundError) -> Response: - # Return 418 upon 404 errors - logger.info(f"Not found route: {app.current_event.path}") - return Response( - status_code=418, - content_type=content_types.TEXT_PLAIN, - body="I'm a teapot!" - ) - - -@app.get("/catch/me/if/you/can") -@tracer.capture_method -def catch_me_if_you_can(): - return {"message": "oh hey"} - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="12 14 17" title="Handling not found" +--8<-- "docs/examples/core/event_handler/api_gateway/app_not_found.py" ``` - ### Exception handling You can use **`exception_handler`** decorator with any Python exception. This allows you to handle a common exception outside your route, for example validation errors. -```python hl_lines="10 15" title="Exception handling" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import content_types -from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - -tracer = Tracer() -logger = Logger() -app = APIGatewayRestResolver() - -@app.exception_handler(ValueError) -def handle_value_error(ex: ValueError): - metadata = {"path": app.current_event.path} - logger.error(f"Malformed request: {ex}", extra=metadata) - - return Response( - status_code=400, - content_type=content_types.TEXT_PLAIN, - body="Invalid request", - ) - - -@app.get("/hello") -@tracer.capture_method -def hello_name(): - name = app.current_event.get_query_string_value(name="name") - if name is not None: - raise ValueError("name query string must be present") - return {"message": f"hello {name}"} - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +```python hl_lines="11 16" title="Exception handling" +--8<-- "docs/examples/core/event_handler/api_gateway/app_exception_handler.py" ``` ### Raising HTTP errors @@ -578,52 +319,8 @@ You can easily raise any HTTP Error back to the client using `ServiceError` exce Additionally, we provide pre-defined errors for the most popular ones such as HTTP 400, 401, 404, 500. -```python hl_lines="4-10 20 25 30 35 39" title="Raising common HTTP Status errors (4xx, 5xx)" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.event_handler.exceptions import ( - BadRequestError, - InternalServerError, - NotFoundError, - ServiceError, - UnauthorizedError, -) - -tracer = Tracer() -logger = Logger() - -app = APIGatewayRestResolver() - -@app.get(rule="/bad-request-error") -def bad_request_error(): - # HTTP 400 - raise BadRequestError("Missing required parameter") - -@app.get(rule="/unauthorized-error") -def unauthorized_error(): - # HTTP 401 - raise UnauthorizedError("Unauthorized") - -@app.get(rule="/not-found-error") -def not_found_error(): - # HTTP 404 - raise NotFoundError - -@app.get(rule="/internal-server-error") -def internal_server_error(): - # HTTP 500 - raise InternalServerError("Internal server error") - -@app.get(rule="/service-error", cors=True) -def service_error(): - raise ServiceError(502, "Something went wrong!") - # alternatively - # from http import HTTPStatus - # raise ServiceError(HTTPStatus.BAD_GATEWAY.value, "Something went wrong) - -def handler(event, context): - return app.resolve(event, context) +```python hl_lines="3-9 21 27 33 39 44" title="Raising common HTTP Status errors (4xx, 5xx)" +--8<-- "docs/examples/core/event_handler/api_gateway/app_http_errors.py" ``` ### Custom Domain API Mappings @@ -637,23 +334,7 @@ This will lead to a HTTP 404 despite having your Lambda configured correctly. Se === "app.py" ```python hl_lines="7" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver(strip_prefixes=["/payment"]) - - @app.get("/subscriptions/") - @tracer.capture_method - def get_subscription(subscription): - return {"subscription_id": subscription} - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "docs/examples/core/event_handler/api_gateway/app_custom_domain.py" ``` === "sample_request.json" @@ -682,32 +363,8 @@ This will ensure that CORS headers are always returned as part of the response w === "app.py" - ```python hl_lines="9 11" - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, CORSConfig - - tracer = Tracer() - logger = Logger() - - cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) - app = APIGatewayRestResolver(cors=cors_config) - - @app.get("/hello/") - @tracer.capture_method - def get_hello_you(name): - return {"message": f"hello {name}"} - - @app.get("/hello", cors=False) # optionally exclude CORS from response, if needed - @tracer.capture_method - def get_hello_no_cors_needed(): - return {"message": "hello, no CORS needed for this path ;)"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="8-9 12 18" + --8<-- "docs/examples/core/event_handler/api_gateway/app_cors.py" ``` === "response.json" @@ -768,26 +425,8 @@ You can use the `Response` class to have full control over the response, for exa === "app.py" - ```python hl_lines="11-16" - import json - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - - app = APIGatewayRestResolver() - - @app.get("/hello") - def get_hello_you(): - payload = json.dumps({"message": "I'm a teapot"}) - custom_headers = {"X-Custom": "X-Value"} - - return Response( - status_code=418, - content_type="application/json", - body=payload, - headers=custom_headers, - ) - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="13-18" + --8<-- "docs/examples/core/event_handler/api_gateway/app_response.py" ``` === "response.json" @@ -802,6 +441,7 @@ You can use the `Response` class to have full control over the response, for exa "isBase64Encoded": false, "statusCode": 418 } + ``` ### Compress @@ -812,17 +452,8 @@ You can compress with gzip and base64 encode your responses via `compress` param === "app.py" - ```python hl_lines="5 7" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - @app.get("/hello", compress=True) - def get_hello_you(): - return {"message": "hello universe"} - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="6 8" + --8<-- "docs/examples/core/event_handler/api_gateway/app_compress.py" ``` === "sample_request.json" @@ -863,21 +494,8 @@ Like `compress` feature, the client must send the `Accept` header with the corre === "app.py" - ```python hl_lines="4 7 11" - import os - from pathlib import Path - - from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response - - app = APIGatewayRestResolver() - logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() - - @app.get("/logo") - def get_logo(): - return Response(status_code=200, content_type="image/svg+xml", body=logo_file) - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="4 7 12" + --8<-- "docs/examples/core/event_handler/api_gateway/app_binary.py" ``` === "logo.svg" @@ -960,61 +578,15 @@ This will enable full tracebacks errors in the response, print request and respo It's best to use for local development only! ```python hl_lines="3" title="Enabling debug mode" -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -app = APIGatewayRestResolver(debug=True) - -@app.get("/hello") -def get_hello_universe(): - return {"message": "hello universe"} - -def lambda_handler(event, context): - return app.resolve(event, context) +--8<-- "docs/examples/core/event_handler/api_gateway/app_debug.py" ``` ### Custom serializer You can instruct API Gateway handler to use a custom serializer to best suit your needs, for example take into account Enums when serializing. -```python hl_lines="21-22 26" title="Using a custom JSON serializer for responses" -import json -from enum import Enum -from json import JSONEncoder -from typing import Dict - -from aws_lambda_powertools.event_handler import APIGatewayRestResolver - -class CustomEncoder(JSONEncoder): - """Your customer json encoder""" - def default(self, obj): - if isinstance(obj, Enum): - return obj.value - try: - iterable = iter(obj) - except TypeError: - pass - else: - return sorted(iterable) - return JSONEncoder.default(self, obj) - -def custom_serializer(obj) -> str: - """Your custom serializer function APIGatewayRestResolver will use""" - return json.dumps(obj, cls=CustomEncoder) - -# Assigning your custom serializer -app = APIGatewayRestResolver(serializer=custom_serializer) - -class Color(Enum): - RED = 1 - BLUE = 2 - -@app.get("/colors") -def get_color() -> Dict: - return { - # Color.RED will be serialized to 1 as expected now - "color": Color.RED, - "variations": {"light", "dark"}, - } +```python hl_lines="24-25 30" title="Using a custom JSON serializer for responses" +--8<-- "docs/examples/core/event_handler/api_gateway/app_custom_serializer.py" ``` ### Split routes with Router @@ -1028,54 +600,15 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in We import **Router** instead of **APIGatewayRestResolver**; syntax wise is exactly the same. ```python hl_lines="5 8 12 15 21" - import itertools - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - logger = Logger(child=True) - router = Router() - USERS = {"user1": "details_here", "user2": "details_here", "user3": "details_here"} - - - @router.get("/users") - def get_users() -> Dict: - # /users?limit=1 - pagination_limit = router.current_event.get_query_string_value(name="limit", default_value=10) - - logger.info(f"Fetching the first {pagination_limit} users...") - ret = dict(itertools.islice(USERS.items(), int(pagination_limit))) - return {"items": [ret]} - - @router.get("/users/") - def get_user(username: str) -> Dict: - logger.info(f"Fetching username {username}") - return {"details": USERS.get(username, {})} - - # many other related /users routing + --8<-- "docs/examples/core/event_handler/api_gateway/users_split_routes.py" ``` === "app.py" We use `include_router` method and include all user routers registered in the `router` global object. - ```python hl_lines="7 10-11" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.typing import LambdaContext - - import users - - logger = Logger() - app = APIGatewayRestResolver() - app.include_router(users.router) - - - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + ```python hl_lines="3 10-11" + --8<-- "docs/examples/core/event_handler/api_gateway/app_split_routes.py" ``` #### Route prefix @@ -1087,43 +620,13 @@ When necessary, you can set a prefix when including a router object. This means === "app.py" ```python hl_lines="9" - from typing import Dict - - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.typing import LambdaContext - - import users - - app = APIGatewayRestResolver() - app.include_router(users.router, prefix="/users") # prefix '/users' to any route in `users.router` - - - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + --8<-- "docs/examples/core/event_handler/api_gateway/app_route_prefix.py" ``` === "users.py" - ```python hl_lines="11 15" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - logger = Logger(child=True) - router = Router() - USERS = {"user1": "details", "user2": "details", "user3": "details"} - - - @router.get("/") # /users, when we set the prefix in app.py - def get_users() -> Dict: - ... - - @router.get("/") - def get_user(username: str) -> Dict: - ... - - # many other related /users routing + ```python hl_lines="11 16" + --8<-- "docs/examples/core/event_handler/api_gateway/users_route_prefix.py" ``` #### Sample layout @@ -1132,7 +635,6 @@ This sample project contains a Users function with two distinct set of routes, ` === "Project layout" - ```python hl_lines="1 8 10 12-15" . ├── Pipfile # project app & dev dependencies; poetry, pipenv, etc. @@ -1164,121 +666,25 @@ This sample project contains a Users function with two distinct set of routes, ` === "template.yml" ```yaml hl_lines="22-23" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Example service with multiple routes - Globals: - Function: - Timeout: 10 - MemorySize: 512 - Runtime: python3.9 - Tracing: Active - Architectures: - - x86_64 - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication - POWERTOOLS_SERVICE_NAME: users - Resources: - UsersService: - Type: AWS::Serverless::Function - Properties: - Handler: users.main.lambda_handler - CodeUri: src - Layers: - # Latest version: https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:4 - Events: - ByUser: - Type: Api - Properties: - Path: /users/{name} - Method: GET - AllUsers: - Type: Api - Properties: - Path: /users - Method: GET - HealthCheck: - Type: Api - Properties: - Path: /status - Method: GET - Outputs: - UsersApiEndpoint: - Description: "API Gateway endpoint URL for Prod environment for Users Function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod" - AllUsersURL: - Description: "URL to fetch all registered users" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users" - ByUserURL: - Description: "URL to retrieve details by user" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users/test" - UsersServiceFunctionArn: - Description: "Users Lambda Function ARN" - Value: !GetAtt UsersService.Arn + --8<-- "docs/examples/core/event_handler/api_gateway/layout/template.yml" ``` === "src/users/main.py" ```python hl_lines="8 14-15" - from typing import Dict - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.logging.correlation_paths import APPLICATION_LOAD_BALANCER - from aws_lambda_powertools.utilities.typing import LambdaContext - - from .routers import health, users - - tracer = Tracer() - logger = Logger() - app = APIGatewayRestResolver() - - app.include_router(health.router) - app.include_router(users.router) - - - @logger.inject_lambda_context(correlation_id_path=API_GATEWAY_REST) - @tracer.capture_lambda_handler - def lambda_handler(event: Dict, context: LambdaContext): - return app.resolve(event, context) + --8<-- "docs/examples/core/event_handler/api_gateway/layout/main.py" ``` === "src/users/routers/health.py" ```python hl_lines="4 6-7 10" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.api_gateway import Router - - router = Router() - logger = Logger(child=True) - - - @router.get("/status") - def health() -> Dict: - logger.debug("Health check called") - return {"status": "OK"} + --8<-- "docs/examples/core/event_handler/api_gateway/layout/health.py" ``` === "tests/functional/test_users.py" - ```python hl_lines="3" - import json - - from src.users import main # follows namespace package from root - - - def test_lambda_handler(apigw_event, lambda_context): - ret = main.lambda_handler(apigw_event, lambda_context) - expected = json.dumps({"message": "hello universe"}, separators=(",", ":")) - - assert ret["statusCode"] == 200 - assert ret["body"] == expected + ```python hl_lines="3" + --8<-- "docs/examples/core/event_handler/api_gateway/layout/test_users.py" ``` ### Considerations @@ -1338,53 +744,14 @@ You can test your routes by passing a proxy event request where `path` and `http === "test_app.py" - ```python hl_lines="18-24" - from dataclasses import dataclass - - import pytest - import app - - @pytest.fixture - def lambda_context(): - @dataclass - class LambdaContext: - function_name: str = "test" - memory_limit_in_mb: int = 128 - invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" - aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" - - return LambdaContext() - - def test_lambda_handler(lambda_context): - minimal_event = { - "path": "/hello", - "httpMethod": "GET", - "requestContext": { # correlation ID - "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef" - } - } - - app.lambda_handler(minimal_event, lambda_context) + ```python hl_lines="20-26" + --8<-- "docs/examples/core/event_handler/api_gateway/test_app.py" ``` === "app.py" ```python - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - logger = Logger() - app = APIGatewayRestResolver() # API Gateway REST API (v1) - - @app.get("/hello") - def get_hello_universe(): - return {"message": "hello universe"} - - # You can continue to use other utilities just as before - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "docs/examples/core/event_handler/api_gateway/app_test.py" ``` ## FAQ diff --git a/docs/core/event_handler/appsync.md b/docs/core/event_handler/appsync.md index 95457aa7736..bd90251a645 100644 --- a/docs/core/event_handler/appsync.md +++ b/docs/core/event_handler/appsync.md @@ -38,136 +38,8 @@ This is the sample infrastructure we are using for the initial examples with a A === "template.yml" - ```yaml hl_lines="37-42 50-55 61-62 78-91 96-120" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Hello world Direct Lambda Resolver - - Globals: - Function: - Timeout: 5 - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables - LOG_LEVEL: INFO - POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 - POWERTOOLS_LOGGER_LOG_EVENT: true - POWERTOOLS_SERVICE_NAME: sample_resolver - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Description: Sample Lambda Powertools Direct Lambda Resolver - Tags: - SOLUTION: LambdaPowertoolsPython - - # IAM Permissions and Roles - - AppSyncServiceRole: - Type: "AWS::IAM::Role" - Properties: - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - - Effect: "Allow" - Principal: - Service: - - "appsync.amazonaws.com" - Action: - - "sts:AssumeRole" - - InvokeLambdaResolverPolicy: - Type: "AWS::IAM::Policy" - Properties: - PolicyName: "DirectAppSyncLambda" - PolicyDocument: - Version: "2012-10-17" - Statement: - - - Effect: "Allow" - Action: "lambda:invokeFunction" - Resource: - - !GetAtt HelloWorldFunction.Arn - Roles: - - !Ref AppSyncServiceRole - - # GraphQL API - - HelloWorldApi: - Type: "AWS::AppSync::GraphQLApi" - Properties: - Name: HelloWorldApi - AuthenticationType: "API_KEY" - XrayEnabled: true - - HelloWorldApiKey: - Type: AWS::AppSync::ApiKey - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - - HelloWorldApiSchema: - Type: "AWS::AppSync::GraphQLSchema" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - Definition: | - schema { - query:Query - } - - type Query { - getTodo(id: ID!): Todo - listTodos: [Todo] - } - - type Todo { - id: ID! - title: String - description: String - done: Boolean - } - - # Lambda Direct Data Source and Resolver - - HelloWorldFunctionDataSource: - Type: "AWS::AppSync::DataSource" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - Name: "HelloWorldLambdaDirectResolver" - Type: "AWS_LAMBDA" - ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn - LambdaConfig: - LambdaFunctionArn: !GetAtt HelloWorldFunction.Arn - - ListTodosResolver: - Type: "AWS::AppSync::Resolver" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - TypeName: "Query" - FieldName: "listTodos" - DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name - - GetTodoResolver: - Type: "AWS::AppSync::Resolver" - Properties: - ApiId: !GetAtt HelloWorldApi.ApiId - TypeName: "Query" - FieldName: "getTodo" - DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name - - - Outputs: - HelloWorldFunction: - Description: "Hello World Lambda Function ARN" - Value: !GetAtt HelloWorldFunction.Arn - - HelloWorldAPI: - Value: !GetAtt HelloWorldApi.Arn + ```yaml hl_lines="37-42 50-55 61-62 78-92 96-120" + --8<-- "docs/examples/core/event_handler/appsync/template.yml" ``` ### Resolver decorator @@ -181,54 +53,8 @@ Here's an example where we have two separate functions to resolve `getTodo` and === "app.py" - ```python hl_lines="3-5 9 31-32 39-40 47" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() - - # Note that `creation_time` isn't available in the schema - # This utility also takes into account what info you make available at API level vs what's stored - TODOS = [ - { - "id": scalar_types_utils.make_id(), # type ID or String - "title": "First task", - "description": "String", - "done": False, - "creation_time": scalar_types_utils.aws_datetime(), # type AWSDateTime - }, - { - "id": scalar_types_utils.make_id(), - "title": "Second task", - "description": "String", - "done": True, - "creation_time": scalar_types_utils.aws_datetime(), - }, - ] - - - @app.resolver(type_name="Query", field_name="getTodo") - def get_todo(id: str = ""): - logger.info(f"Fetching Todo {id}") - todo = [todo for todo in TODOS if todo["id"] == id] - - return todo - - - @app.resolver(type_name="Query", field_name="listTodos") - def list_todos(): - return TODOS - - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="2-4 8 30-31 38-39 46" + --8<-- "docs/examples/core/event_handler/appsync/app_resolver_decorator.py" ``` === "schema.graphql" @@ -345,25 +171,8 @@ You can nest `app.resolver()` decorator multiple times when resolving fields wit === "nested_mappings.py" - ```python hl_lines="4 8 10-12 18" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() - - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - return name + description - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="2 7 10-12 19" + --8<-- "docs/examples/core/event_handler/appsync/app_nested_mappings.py" ``` === "schema.graphql" @@ -396,28 +205,8 @@ You can nest `app.resolver()` decorator multiple times when resolving fields wit For Lambda Python3.8+ runtime, this utility supports async functions when you use in conjunction with `asyncio.run`. -```python hl_lines="5 9 11-13 21" title="Resolving GraphQL resolvers async" -import asyncio -from aws_lambda_powertools import Logger, Tracer - -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.event_handler import AppSyncResolver - -tracer = Tracer(service="sample_resolver") -logger = Logger(service="sample_resolver") -app = AppSyncResolver() - -@app.resolver(type_name="Query", field_name="listTodos") -async def list_todos(): - todos = await some_async_io_call() - return todos - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - result = app.resolve(event, context) - - return asyncio.run(result) +```python hl_lines="4 9 12-14 23" title="Resolving GraphQL resolvers async" +--8<-- "docs/examples/core/event_handler/appsync/app_async_functions.py" ``` ### Amplify GraphQL Transformer @@ -463,53 +252,13 @@ Use the following code for `merchantInfo` and `searchMerchant` functions respect === "merchantInfo/src/app.py" - ```python hl_lines="4-5 9 11-12 15-16 23" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - tracer = Tracer(service="sample_graphql_transformer_resolver") - logger = Logger(service="sample_graphql_transformer_resolver") - app = AppSyncResolver() - - @app.resolver(type_name="Query", field_name="listLocations") - def list_locations(page: int = 0, size: int = 10): - return [{"id": 100, "name": "Smooth Grooves"}] - - @app.resolver(field_name="commonField") - def common_field(): - # Would match all fieldNames matching 'commonField' - return scalar_types_utils.make_id() - - @tracer.capture_lambda_handler - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - def lambda_handler(event, context): - app.resolve(event, context) + ```python hl_lines="2 4 8 11-12 16-17 25" + --8<-- "docs/examples/core/event_handler/appsync/app_merchant_info.py" ``` === "searchMerchant/src/app.py" - ```python hl_lines="1 4 6-7" - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils - - app = AppSyncResolver() - - @app.resolver(type_name="Query", field_name="findMerchant") - def find_merchant(search: str): - return [ - { - "id": scalar_types_utils.make_id(), - "name": "Brewer Brewing", - "description": "Mike Brewer's IPA brewing place" - }, - { - "id": scalar_types_utils.make_id(), - "name": "Serverlessa's Bakery", - "description": "Lessa's sourdough place" - }, - ] + ```python hl_lines="1 4 7-8" + --8<-- "docs/examples/core/event_handler/appsync/app_merchant_search.py" ``` **Example AppSync GraphQL Transformer Function resolver events** @@ -604,34 +353,8 @@ You can subclass `AppSyncResolverEvent` to bring your own set of methods to hand === "custom_model.py" - ```python hl_lines="12-15 20 27" - from aws_lambda_powertools import Logger, Tracer - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent - - tracer = Tracer(service="sample_resolver") - logger = Logger(service="sample_resolver") - app = AppSyncResolver() - - - class MyCustomModel(AppSyncResolverEvent): - @property - def country_viewer(self) -> str: - return self.request_headers.get("cloudfront-viewer-country") - - @app.resolver(field_name="listLocations") - @app.resolver(field_name="locations") - def get_locations(name: str, description: str = ""): - if app.current_event.country_viewer == "US": - ... - return name + description - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context, data_model=MyCustomModel) + ```python hl_lines="11-14 20 28" + --8<-- "docs/examples/core/event_handler/appsync/app_custom_model.py" ``` === "schema.graphql" @@ -723,51 +446,16 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in We import **Router** instead of **AppSyncResolver**; syntax wise is exactly the same. - ```python hl_lines="4 7 10 15" - from typing import Any, Dict, List - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler.appsync import Router - - logger = Logger(child=True) - router = Router() - - - @router.resolver(type_name="Query", field_name="listLocations") - def list_locations(merchant_id: str) -> List[Dict[str, Any]]: - return [{"name": "Location name", "merchant_id": merchant_id}] - - - @router.resolver(type_name="Location", field_name="status") - def resolve_status(merchant_id: str) -> str: - logger.debug(f"Resolve status for merchant_id: {merchant_id}") - return "FOO" - ``` + ```python hl_lines="4 7 10 15" + --8<-- "docs/examples/core/event_handler/appsync/resolvers_location.py" + ``` === "app.py" We use `include_router` method and include all `location` operations registered in the `router` global object. - ```python hl_lines="8 13" - from typing import Dict - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import AppSyncResolver - from aws_lambda_powertools.logging.correlation_paths import APPSYNC_RESOLVER - from aws_lambda_powertools.utilities.typing import LambdaContext - - from resolvers import location - - tracer = Tracer() - logger = Logger() - app = AppSyncResolver() - app.include_router(location.router) - - - @tracer.capture_lambda_handler - @logger.inject_lambda_context(correlation_id_path=APPSYNC_RESOLVER) - def lambda_handler(event: Dict, context: LambdaContext): - app.resolve(event, context) + ```python hl_lines="3 13" + --8<-- "docs/examples/core/event_handler/appsync/app_router.py" ``` @@ -782,36 +470,13 @@ Here's an example of how you can test your synchronous resolvers: === "test_resolver.py" ```python - import json - import pytest - from pathlib import Path - - from src.index import app # import the instance of AppSyncResolver from your code - - def test_direct_resolver(): - # Load mock event from a file - json_file_path = Path("appSyncDirectResolver.json") - with open(json_file_path) as json_file: - mock_event = json.load(json_file) - - # Call the implicit handler - result = app(mock_event, {}) - - assert result == "created this value" + --8<-- "docs/examples/core/event_handler/appsync/test_resolver.py" ``` === "src/index.py" ```python - - from aws_lambda_powertools.event_handler import AppSyncResolver - - app = AppSyncResolver() - - @app.resolver(field_name="createSomething") - def create_something(): - return "created this value" - + --8<-- "docs/examples/core/event_handler/appsync/app_test.py" ``` === "appSyncDirectResolver.json" @@ -825,39 +490,13 @@ And an example for testing asynchronous resolvers. Note that this requires the ` === "test_async_resolver.py" ```python - import json - import pytest - from pathlib import Path - - from src.index import app # import the instance of AppSyncResolver from your code - - @pytest.mark.asyncio - async def test_direct_resolver(): - # Load mock event from a file - json_file_path = Path("appSyncDirectResolver.json") - with open(json_file_path) as json_file: - mock_event = json.load(json_file) - - # Call the implicit handler - result = await app(mock_event, {}) - - assert result == "created this value" + --8<-- "docs/examples/core/event_handler/appsync/test_async_resolver.py" ``` === "src/index.py" ```python - import asyncio - - from aws_lambda_powertools.event_handler import AppSyncResolver - - app = AppSyncResolver() - - @app.resolver(field_name="createSomething") - async def create_something_async(): - await asyncio.sleep(1) # Do async stuff - return "created this value" - + --8<-- "docs/examples/core/event_handler/appsync/app_async_test.py" ``` === "appSyncDirectResolver.json" diff --git a/docs/core/logger.md b/docs/core/logger.md index 0edc4aa3ba7..9747ee003d1 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -24,26 +24,16 @@ Setting | Description | Environment variable | Constructor parameter ???+ example **AWS Serverless Application Model (SAM)** -=== "template.yaml" - - ```yaml hl_lines="9 10" - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Runtime: python3.8 - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: example - ``` -=== "app.py" - - ```python hl_lines="2 4" - from aws_lambda_powertools import Logger - logger = Logger() # Sets service via env var - # OR logger = Logger(service="example") - ``` + === "template.yaml" + + ```yaml hl_lines="12 13" + --8<-- "docs/examples/core/logger/getting_started_template.yml" + ``` + === "app.py" + + ```python hl_lines="3-4" + --8<-- "docs/examples/core/logger/getting_started_app.py" + ``` ### Standard structured keys @@ -67,21 +57,8 @@ You can enrich your structured logs with key Lambda context information via `inj === "collect.py" - ```python hl_lines="5" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - @logger.inject_lambda_context - def handler(event, context): - logger.info("Collecting payment") - - # You can log entire objects too - logger.info({ - "operation": "collect_payment", - "charge_id": event['charge_id'] - }) - ... + ```python hl_lines="6" + --8<-- "docs/examples/core/logger/inject_lambda_context.py" ``` === "Example CloudWatch Logs excerpt" @@ -133,14 +110,8 @@ When debugging in non-production environments, you can instruct Logger to log th ???+ warning This is disabled by default to prevent sensitive info being logged -```python hl_lines="5" title="Logging incoming event" -from aws_lambda_powertools import Logger - -logger = Logger(service="payment") - -@logger.inject_lambda_context(log_event=True) -def handler(event, context): - ... +```python hl_lines="6" title="Logging incoming event" +--8<-- "docs/examples/core/logger/inject_lambda_context_log_event.py" ``` #### Setting a Correlation ID @@ -152,15 +123,8 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME === "collect.py" - ```python hl_lines="5" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - @logger.inject_lambda_context(correlation_id_path="headers.my_request_id_header") - def handler(event, context): - logger.debug(f"Correlation ID => {logger.get_correlation_id()}") - logger.info("Collecting payment") + ```python hl_lines="6" + --8<-- "docs/examples/core/logger/inject_lambda_context_correlation_id_path.py" ``` === "Example Event" @@ -195,16 +159,8 @@ We provide [built-in JMESPath expressions](#built-in-correlation-id-expressions) === "collect.py" - ```python hl_lines="2 6" - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging import correlation_paths - - logger = Logger(service="payment") - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) - def handler(event, context): - logger.debug(f"Correlation ID => {logger.get_correlation_id()}") - logger.info("Collecting payment") + ```python hl_lines="2 7" + --8<-- "docs/examples/core/logger/inject_lambda_context_correlation_paths.py" ``` === "Example Event" @@ -254,18 +210,8 @@ You can append your own keys to your existing Logger via `append_keys(**addition === "collect.py" - ```python hl_lines="9" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - def handler(event, context): - order_id = event.get("order_id") - - # this will ensure order_id key always has the latest value before logging - logger.append_keys(order_id=order_id) - - logger.info("Collecting payment") + ```python hl_lines="10" + --8<-- "docs/examples/core/logger/logger_append_keys.py" ``` === "Example CloudWatch Logs excerpt" @@ -297,12 +243,7 @@ It accepts any dictionary, and all keyword arguments will be added as part of th === "extra_parameter.py" ```python hl_lines="6" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - fields = { "request_id": "1123" } - logger.info("Collecting payment", extra=fields) + --8<-- "docs/examples/core/logger/logger_extra_parameter.py" ``` === "Example CloudWatch Logs excerpt" @@ -323,14 +264,8 @@ You can set a correlation_id to your existing Logger via `set_correlation_id(val === "collect.py" - ```python hl_lines="6" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - def handler(event, context): - logger.set_correlation_id(event["requestContext"]["requestId"]) - logger.info("Collecting payment") + ```python hl_lines="7" + --8<-- "docs/examples/core/logger/logger_set_correlation_id.py" ``` === "Example Event" @@ -338,7 +273,7 @@ You can set a correlation_id to your existing Logger via `set_correlation_id(val ```json hl_lines="3" { "requestContext": { - "requestId": "correlation_id_value" + "requestId": "correlation_id_value" } } ``` @@ -360,23 +295,15 @@ Alternatively, you can combine [Data Classes utility](../utilities/data_classes. === "collect.py" - ```python hl_lines="2 7-8" - from aws_lambda_powertools import Logger - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent - - logger = Logger(service="payment") - - def handler(event, context): - event = APIGatewayProxyEvent(event) - logger.set_correlation_id(event.request_context.request_id) - logger.info("Collecting payment") + ```python hl_lines="2 8-9" + --8<-- "docs/examples/core/logger/logger_set_correlation_id_data_class.py" ``` === "Example Event" ```json hl_lines="3" { "requestContext": { - "requestId": "correlation_id_value" + "requestId": "correlation_id_value" } } ``` @@ -401,17 +328,8 @@ You can remove any additional key from Logger state using `remove_keys`. === "collect.py" - ```python hl_lines="9" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - def handler(event, context): - logger.append_keys(sample_key="value") - logger.info("Collecting payment") - - logger.remove_keys(["sample_key"]) - logger.info("Collecting payment without sample key") + ```python hl_lines="10" + --8<-- "docs/examples/core/logger/logger_remove_keys.py" ``` === "Example CloudWatch Logs excerpt" @@ -450,19 +368,8 @@ Logger is commonly initialized in the global scope. Due to [Lambda Execution Con === "collect.py" - ```python hl_lines="5 8" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - @logger.inject_lambda_context(clear_state=True) - def handler(event, context): - if event.get("special_key"): - # Should only be available in the first request log - # as the second request doesn't contain `special_key` - logger.append_keys(debugging_key="value") - - logger.info("Collecting payment") + ```python hl_lines="6 9-11" + --8<-- "docs/examples/core/logger/inject_lambda_context_clear_state.py" ``` === "#1 request" @@ -510,14 +417,7 @@ Use `logger.exception` method to log contextual information about exceptions. Lo === "collect.py" ```python hl_lines="8" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - try: - raise ValueError("something went wrong") - except Exception: - logger.exception("Received an exception") + --8<-- "docs/examples/core/logger/logger_exception.py" ``` === "Example CloudWatch Logs excerpt" @@ -557,26 +457,14 @@ Logger supports inheritance via `child` parameter. This allows you to create mul === "collect.py" - ```python hl_lines="1 7" - import shared # Creates a child logger named "payment.shared" - from aws_lambda_powertools import Logger - - logger = Logger() # POWERTOOLS_SERVICE_NAME: "payment" - - def handler(event, context): - shared.inject_payment_id(event) - ... + ```python hl_lines="1 9" + --8<-- "docs/examples/core/logger/shared_logger_app.py" ``` === "shared.py" - ```python hl_lines="6" - from aws_lambda_powertools import Logger - - logger = Logger(child=True) # POWERTOOLS_SERVICE_NAME: "payment" - - def inject_payment_id(event): - logger.structure_logs(append=True, payment_id=event.get("payment_id")) + ```python hl_lines="7" + --8<-- "docs/examples/core/logger/shared_logger_child.py" ``` In this example, `Logger` will create a parent logger named `payment` and a child logger named `payment.shared`. Changes in either parent or child logger will be propagated bi-directionally. @@ -602,15 +490,8 @@ Sampling decision happens at the Logger initialization. This means sampling may === "collect.py" - ```python hl_lines="4 7" - from aws_lambda_powertools import Logger - - # Sample 10% of debug logs e.g. 0.1 - logger = Logger(service="payment", sample_rate=0.1) - - def handler(event, context): - logger.debug("Verifying whether order_id is present") - logger.info("Collecting payment") + ```python hl_lines="4 8" + --8<-- "docs/examples/core/logger/logger_sample_rate.py" ``` === "Example CloudWatch Logs excerpt" @@ -661,12 +542,8 @@ Parameter | Description | Default **`log_record_order`** | set order of log keys when logging | `["level", "location", "message", "timestamp"]` **`kwargs`** | key-value to be included in log messages | `None` -```python hl_lines="2 4-5" title="Pre-configuring Lambda Powertools Formatter" -from aws_lambda_powertools import Logger -from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter - -formatter = LambdaPowertoolsFormatter(utc=True, log_record_order=["message"]) -logger = Logger(service="example", logger_formatter=formatter) +```python hl_lines="2 4-8" title="Pre-configuring Lambda Powertools Formatter" +--8<-- "docs/examples/core/logger/logging_formatter.py" ``` ### Migrating from other Loggers @@ -692,32 +569,14 @@ For child Loggers, we introspect the name of your module where `Logger(child=Tru === "incorrect_logger_inheritance.py" - ```python hl_lines="4 10" - import my_module - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - ... - - # my_module.py - from aws_lambda_powertools import Logger - - logger = Logger(child=True) + ```python hl_lines="5 11" + --8<-- "docs/examples/core/logger/incorrect_logger_inheritance.py" ``` === "correct_logger_inheritance.py" - ```python hl_lines="4 10" - import my_module - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - ... - - # my_module.py - from aws_lambda_powertools import Logger - - logger = Logger(service="payment", child=True) + ```python hl_lines="5 11" + --8<-- "docs/examples/core/logger/correct_logger_inheritance.py" ``` In this case, Logger will register a Logger named `payment`, and a Logger named `service_undefined`. The latter isn't inheriting from the parent, and will have no handler, resulting in no message being logged to standard output. @@ -736,21 +595,10 @@ You might want to continue to use the same date formatting style, or override `l Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`. - === "lambda_handler.py" - ```python hl_lines="7 10" - from aws_lambda_powertools import Logger - - date_format = "%m/%d/%Y %I:%M:%S %p" - location_format = "[%(funcName)s] %(module)s" - - # override location and timestamp format - logger = Logger(service="payment", location=location_format, datefmt=date_format) - # suppress the location key with a None value - logger_two = Logger(service="payment", location=None) - - logger.info("Collecting payment") + ```python hl_lines="3-4 7-11 14" + --8<-- "docs/examples/core/logger/overriding_log_records.py" ``` === "Example CloudWatch Logs excerpt" @@ -770,17 +618,8 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or === "lambda_handler.py" - ```python hl_lines="4 7" - from aws_lambda_powertools import Logger - - # make message as the first key - logger = Logger(service="payment", log_record_order=["message"]) - - # make request_id that will be added later as the first key - # Logger(service="payment", log_record_order=["request_id"]) - - # Default key sorting order when omit - # Logger(service="payment", log_record_order=["level","location","message","timestamp"]) + ```python hl_lines="4 7 10" + --8<-- "docs/examples/core/logger/logger_log_record_order.py" ``` === "Example CloudWatch Logs excerpt" @@ -800,13 +639,7 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or By default, this Logger and standard logging library emits records using local time timestamp. You can override this behaviour via `utc` parameter: ```python hl_lines="6" title="Setting UTC timestamp by default" -from aws_lambda_powertools import Logger - -logger = Logger(service="payment") -logger.info("Local time") - -logger_in_utc = Logger(service="payment", utc=True) -logger_in_utc.info("GMT time zone") +--8<-- "docs/examples/core/logger/logger_utc.py" ``` #### Custom function for unserializable values @@ -815,19 +648,8 @@ By default, Logger uses `str` to handle values non-serializable by JSON. You can === "collect.py" - ```python hl_lines="3-4 9 12" - from aws_lambda_powertools import Logger - - def custom_json_default(value): - return f"" - - class Unserializable: - pass - - logger = Logger(service="payment", json_default=custom_json_default) - - def handler(event, context): - logger.info(Unserializable()) + ```python hl_lines="4-5 12 16" + --8<-- "docs/examples/core/logger/logger_json_default.py" ``` === "Example CloudWatch Logs excerpt" @@ -845,17 +667,8 @@ By default, Logger uses `str` to handle values non-serializable by JSON. You can By default, Logger uses StreamHandler and logs to standard output. You can override this behaviour via `logger_handler` parameter: -```python hl_lines="3-4 9 12" title="Configure Logger to output to a file" -import logging -from pathlib import Path - -from aws_lambda_powertools import Logger - -log_file = Path("/tmp/log.json") -log_file_handler = logging.FileHandler(filename=log_file) -logger = Logger(service="payment", logger_handler=log_file_handler) - -logger.info("Collecting payment") +```python hl_lines="6-8" title="Configure Logger to output to a file" +--8<-- "docs/examples/core/logger/logger_logger_handler.py" ``` #### Bring your own formatter @@ -869,31 +682,20 @@ For these, you can override the `serialize` method from [LambdaPowertoolsFormatt === "custom_formatter.py" - ```python hl_lines="6-7 12" - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter - - from typing import Dict - - class CustomFormatter(LambdaPowertoolsFormatter): - def serialize(self, log: Dict) -> str: - """Serialize final structured log dict to JSON str""" - log["event"] = log.pop("message") # rename message key to event - return self.json_serializer(log) # use configured json serializer - - logger = Logger(service="example", logger_formatter=CustomFormatter()) - logger.info("hello") + ```python hl_lines="7-8 14" + --8<-- "docs/examples/core/logger/logger_logger_formatter.py" ``` === "Example CloudWatch Logs excerpt" - ```json hl_lines="5" - { - "level": "INFO", - "location": ":16", - "timestamp": "2021-12-30 13:41:53,413+0100", - "event": "hello" - } - ``` + + ```json hl_lines="5" + { + "level": "INFO", + "location": ":16", + "timestamp": "2021-12-30 13:41:53,413+0100", + "event": "hello" + } + ``` The `log` argument is the final log record containing [our standard keys](#standard-structured-keys), optionally [Lambda context keys](#capturing-lambda-context-info), and any custom key you might have added via [append_keys](#append_keys-method) or the [extra parameter](#extra-parameter). @@ -902,49 +704,10 @@ For exceptional cases where you want to completely replace our formatter logic, ???+ warning You will need to implement `append_keys`, `clear_state`, override `format`, and optionally `remove_keys` to keep the same feature set Powertools Logger provides. This also means keeping state of logging keys added. - === "collect.py" - ```python hl_lines="5 7 9-10 13 17 21 24 35" - import logging - from typing import Iterable, List, Optional - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter - - class CustomFormatter(BasePowertoolsFormatter): - def __init__(self, log_record_order: Optional[List[str]], *args, **kwargs): - self.log_record_order = log_record_order or ["level", "location", "message", "timestamp"] - self.log_format = dict.fromkeys(self.log_record_order) - super().__init__(*args, **kwargs) - - def append_keys(self, **additional_keys): - # also used by `inject_lambda_context` decorator - self.log_format.update(additional_keys) - - def remove_keys(self, keys: Iterable[str]): - for key in keys: - self.log_format.pop(key, None) - - def clear_state(self): - self.log_format = dict.fromkeys(self.log_record_order) - - def format(self, record: logging.LogRecord) -> str: # noqa: A003 - """Format logging record as structured JSON str""" - return json.dumps( - { - "event": super().format(record), - "timestamp": self.formatTime(record), - "my_default_key": "test", - **self.log_format, - } - ) - - logger = Logger(service="payment", logger_formatter=CustomFormatter()) - - @logger.inject_lambda_context - def handler(event, context): - logger.info("Collecting payment") + ```python hl_lines="5 8 10-11 14 18 22 25 37" + --8<-- "docs/examples/core/logger/logger_logger_formatter_base_powertools_formatter.py" ``` === "Example CloudWatch Logs excerpt" @@ -967,21 +730,8 @@ By default, Logger uses `json.dumps` and `json.loads` as serializer and deserial As parameters don't always translate well between them, you can pass any callable that receives a `Dict` and return a `str`: -```python hl_lines="1 5-6 9-10" title="Using Rust orjson library as serializer" -import orjson - -from aws_lambda_powertools import Logger - -custom_serializer = orjson.dumps -custom_deserializer = orjson.loads - -logger = Logger(service="payment", - json_serializer=custom_serializer, - json_deserializer=custom_deserializer -) - -# when using parameters, you can pass a partial -# custom_serializer=functools.partial(orjson.dumps, option=orjson.OPT_SERIALIZE_NUMPY) +```python hl_lines="1 5-6 10-11" title="Using Rust orjson library as serializer" +--8<-- "docs/examples/core/logger/logger_json_serializer.py" ``` ## Testing your code @@ -996,48 +746,12 @@ This is a Pytest sample that provides the minimum information necessary for Logg Note that dataclasses are available in Python 3.7+ only. ```python - from dataclasses import dataclass - - import pytest - - @pytest.fixture - def lambda_context(): - @dataclass - class LambdaContext: - function_name: str = "test" - memory_limit_in_mb: int = 128 - invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" - aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" - - return LambdaContext() - - def test_lambda_handler(lambda_context): - test_event = {'test': 'event'} - your_lambda_handler(test_event, lambda_context) # this will now have a Context object populated + --8<-- "docs/examples/core/logger/fake_lambda_context_for_logger.py" ``` === "fake_lambda_context_for_logger_py36.py" ```python - from collections import namedtuple - - import pytest - - @pytest.fixture - def lambda_context(): - lambda_context = { - "function_name": "test", - "memory_limit_in_mb": 128, - "invoked_function_arn": "arn:aws:lambda:eu-west-1:809313241:function:test", - "aws_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", - } - - return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values()) - - def test_lambda_handler(lambda_context): - test_event = {'test': 'event'} - - # this will now have a Context object populated - your_lambda_handler(test_event, lambda_context) + --8<-- "docs/examples/core/logger/fake_lambda_context_for_logger_py36.py" ``` ???+ tip @@ -1061,42 +775,16 @@ POWERTOOLS_LOG_DEDUPLICATION_DISABLED="1" pytest -o log_cli=1 You can enable the `botocore` and `boto3` logs by using the `set_stream_logger` method, this method will add a stream handler for the given name and level to the logging module. By default, this logs all boto3 messages to stdout. -```python hl_lines="6-7" title="Enabling AWS SDK logging" -from typing import Dict, List -from aws_lambda_powertools.utilities.typing import LambdaContext -from aws_lambda_powertools import Logger - -import boto3 -boto3.set_stream_logger() -boto3.set_stream_logger('botocore') - -logger = Logger() -client = boto3.client('s3') - - -def handler(event: Dict, context: LambdaContext) -> List: - response = client.list_buckets() - - return response.get("Buckets", []) +```python hl_lines="7-8" title="Enabling AWS SDK logging" +--8<-- "docs/examples/core/logger/faq_enable_boto3_logger.py" ``` **How can I enable powertools logging for imported libraries?** You can copy the Logger setup to all or sub-sets of registered external loggers. Use the `copy_config_to_registered_logger` method to do this. By default all registered loggers will be modified. You can change this behaviour by providing `include` and `exclude` attributes. You can also provide optional `log_level` attribute external loggers will be configured with. - ```python hl_lines="10" title="Cloning Logger config to all other registered standard loggers" -import logging - -from aws_lambda_powertools import Logger -from aws_lambda_powertools.logging import utils - -logger = Logger() - -external_logger = logging.logger() - -utils.copy_config_to_registered_loggers(source_logger=logger) -external_logger.info("test message") +--8<-- "docs/examples/core/logger/faq_utils_copy_config_to_registered_loggers.py" ``` **What's the difference between `append_keys` and `extra`?** @@ -1107,21 +795,8 @@ Here's an example where we persist `payment_id` not `request_id`. Note that `pay === "lambda_handler.py" - ```python hl_lines="6 10" - from aws_lambda_powertools import Logger - - logger = Logger(service="payment") - - def handler(event, context): - logger.append_keys(payment_id="123456789") - - try: - booking_id = book_flight() - logger.info("Flight booked successfully", extra={ "booking_id": booking_id}) - except BookingReservationError: - ... - - logger.info("goodbye") + ```python hl_lines="7 11" + --8<-- "docs/examples/core/logger/faq_append_keys_vs_extra.py" ``` === "Example CloudWatch Logs excerpt" diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 99ee17106b3..ba7639e3fa3 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -41,31 +41,17 @@ Setting | Description | Environment variable | Constructor parameter ???+ example **AWS Serverless Application Model (SAM)** -=== "template.yml" - - ```yaml hl_lines="9 10" - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Runtime: python3.8 - Environment: - Variables: - POWERTOOLS_SERVICE_NAME: payment - POWERTOOLS_METRICS_NAMESPACE: ServerlessAirline - ``` + === "template.yml" -=== "app.py" - - ```python hl_lines="4 6" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit + ```yaml hl_lines="12-13" + --8<-- "docs/examples/core/metrics/template.yml" + ``` - metrics = Metrics() # Sets metric namespace and service via env var - # OR - metrics = Metrics(namespace="ServerlessAirline", service="orders") # Sets metric namespace, and service as a metric dimension - ``` + === "app.py" + ```python hl_lines="4 6-8" + --8<-- "docs/examples/core/metrics/example_app.py" + ``` ### Creating metrics @@ -76,28 +62,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all === "Metrics" - ```python hl_lines="8" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - metrics = Metrics(namespace="ExampleApplication", service="booking") - - @metrics.log_metrics - def lambda_handler(evt, ctx): - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + ```python hl_lines="9" + --8<-- "docs/examples/core/metrics/metrics_app.py" ``` === "Metrics with custom dimensions" - ```python hl_lines="8-9" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - metrics = Metrics(namespace="ExampleApplication", service="booking") - - @metrics.log_metrics - def lambda_handler(evt, ctx): - metrics.add_dimension(name="environment", value="prod") - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + ```python hl_lines="9-10" + --8<-- "docs/examples/core/metrics/metrics_custom_dimensions_app.py" ``` ???+ tip "Tip: Autocomplete Metric Units" @@ -118,28 +89,12 @@ If you'd like to remove them at some point, you can use `clear_default_dimension === "set_default_dimensions method" ```python hl_lines="5" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - metrics = Metrics(namespace="ExampleApplication", service="booking") - metrics.set_default_dimensions(environment="prod", another="one") - - @metrics.log_metrics - def lambda_handler(evt, ctx): - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + --8<-- "docs/examples/core/metrics/set_default_dimensions.py" ``` === "with log_metrics decorator" - ```python hl_lines="5 7" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - metrics = Metrics(namespace="ExampleApplication", service="booking") - DEFAULT_DIMENSIONS = {"environment": "prod", "another": "one"} - - @metrics.log_metrics(default_dimensions=DEFAULT_DIMENSIONS) - def lambda_handler(evt, ctx): - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + ```python hl_lines="5 8" + --8<-- "docs/examples/core/metrics/log_metrics_default_dimensions.py" ``` ### Flushing metrics @@ -150,15 +105,8 @@ This decorator also **validates**, **serializes**, and **flushes** all your metr === "app.py" - ```python hl_lines="6" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - metrics = Metrics(namespace="ExampleApplication", service="ExampleService") - - @metrics.log_metrics - def lambda_handler(evt, ctx): - metrics.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) + ```python hl_lines="7" + --8<-- "docs/examples/core/metrics/flush_metrics.py" ``` === "Example CloudWatch Logs excerpt" @@ -199,14 +147,8 @@ This decorator also **validates**, **serializes**, and **flushes** all your metr If you want to ensure at least one metric is always emitted, you can pass `raise_on_empty_metrics` to the **log_metrics** decorator: -```python hl_lines="5" title="Raising SchemaValidationError exception if no metrics are added" -from aws_lambda_powertools.metrics import Metrics - -metrics = Metrics() - -@metrics.log_metrics(raise_on_empty_metrics=True) -def lambda_handler(evt, ctx): - ... +```python hl_lines="6" title="Raising SchemaValidationError exception if no metrics are added" +--8<-- "docs/examples/core/metrics/log_metrics_raise_on_empty_metrics.py" ``` ???+ tip "Suppressing warning messages on empty metrics" @@ -216,31 +158,16 @@ def lambda_handler(evt, ctx): When using multiple middlewares, use `log_metrics` as your **last decorator** wrapping all subsequent ones to prevent early Metric validations when code hasn't been run yet. -```python hl_lines="7-8" title="Example with multiple decorators" -from aws_lambda_powertools import Metrics, Tracer -from aws_lambda_powertools.metrics import MetricUnit - -tracer = Tracer(service="booking") -metrics = Metrics(namespace="ExampleApplication", service="booking") - -@metrics.log_metrics -@tracer.capture_lambda_handler -def lambda_handler(evt, ctx): - metrics.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) +```python hl_lines="8-9" title="Example with multiple decorators" +--8<-- "docs/examples/core/metrics/log_metrics_multiple_decorators.py" ``` ### Capturing cold start metric You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. -```python hl_lines="5" title="Generating function cold start metric" -from aws_lambda_powertools import Metrics - -metrics = Metrics(service="ExampleService") - -@metrics.log_metrics(capture_cold_start_metric=True) -def lambda_handler(evt, ctx): - ... +```python hl_lines="6" title="Generating function cold start metric" +--8<-- "docs/examples/core/metrics/log_metrics_capture_cold_start_metric.py" ``` If it's a cold start invocation, this feature will: @@ -264,16 +191,8 @@ You can add high-cardinality data as part of your Metrics log with `add_metadata === "app.py" - ```python hl_lines="9" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - metrics = Metrics(namespace="ExampleApplication", service="booking") - - @metrics.log_metrics - def lambda_handler(evt, ctx): - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) - metrics.add_metadata(key="booking_id", value="booking_uuid") + ```python hl_lines="10" + --8<-- "docs/examples/core/metrics/add_metadata.py" ``` === "Example CloudWatch Logs excerpt" @@ -315,14 +234,7 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met **unique metric = (metric_name + dimension_name + dimension_value)** ```python hl_lines="6-7" title="Generating an EMF blob with a single metric" -from aws_lambda_powertools import single_metric -from aws_lambda_powertools.metrics import MetricUnit - - -def lambda_handler(evt, ctx): - with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ExampleApplication") as metric: - metric.add_dimension(name="function_context", value="$LATEST") - ... +--8<-- "docs/examples/core/metrics/single_metric.py" ``` ### Flushing metrics manually @@ -332,18 +244,8 @@ If you prefer not to use `log_metrics` because you might want to encapsulate add ???+ warning Metrics, dimensions and namespace validation still applies -```python hl_lines="9-11" title="Manually flushing and clearing metrics from memory" -import json -from aws_lambda_powertools import Metrics -from aws_lambda_powertools.metrics import MetricUnit - -metrics = Metrics(namespace="ExampleApplication", service="booking") - -def lambda_handler(evt, ctx): - metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) - your_metrics_object = metrics.serialize_metric_set() - metrics.clear_metrics() - print(json.dumps(your_metrics_object)) +```python hl_lines="11-13" title="Manually flushing and clearing metrics from memory" +--8<-- "docs/examples/core/metrics/flush_metrics_manually.py" ``` ## Testing your code @@ -366,14 +268,7 @@ POWERTOOLS_SERVICE_NAME="Example" POWERTOOLS_METRICS_NAMESPACE="Application" pyt `Metrics` keep metrics in memory across multiple instances. If you need to test this behaviour, you can use the following Pytest fixture to ensure metrics are reset incl. cold start: ```python title="Clearing metrics between tests" -@pytest.fixture(scope="function", autouse=True) -def reset_metric_set(): - # Clear out every metric data prior to every test - metrics = Metrics() - metrics.clear_metrics() - metrics_global.is_cold_start = True # ensure each test has cold start - metrics.clear_default_dimensions() # remove persisted default dimensions, if any - yield +--8<-- "docs/examples/core/metrics/clear_metrics_between_tests.py" ``` ### Functional testing @@ -382,68 +277,14 @@ As metrics are logged to standard output, you can read standard output and asser === "Assert single EMF blob with pytest.py" - ```python hl_lines="6 9-10 23-34" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - import json - - def test_log_metrics(capsys): - # GIVEN Metrics is initialized - metrics = Metrics(namespace="ServerlessAirline") - - # WHEN we utilize log_metrics to serialize - # and flush all metrics at the end of a function execution - @metrics.log_metrics - def lambda_handler(evt, ctx): - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) - metrics.add_dimension(name="environment", value="prod") - - lambda_handler({}, {}) - log = capsys.readouterr().out.strip() # remove any extra line - metrics_output = json.loads(log) # deserialize JSON str - - # THEN we should have no exceptions - # and a valid EMF object should be flushed correctly - assert "SuccessfulBooking" in log # basic string assertion in JSON str - assert "SuccessfulBooking" in metrics_output["_aws"]["CloudWatchMetrics"][0]["Metrics"][0]["Name"] + ```python hl_lines="7 15-16 24-25" + --8<-- "docs/examples/core/metrics/functional_testing.py" ``` === "Assert multiple EMF blobs with pytest" - ```python hl_lines="8-9 11 21-23 25 29-30 32" - from aws_lambda_powertools import Metrics - from aws_lambda_powertools.metrics import MetricUnit - - from collections import namedtuple - - import json - - def capture_metrics_output_multiple_emf_objects(capsys): - return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line] - - def test_log_metrics(capsys): - # GIVEN Metrics is initialized - metrics = Metrics(namespace="ServerlessAirline") - - # WHEN log_metrics is used with capture_cold_start_metric - @metrics.log_metrics(capture_cold_start_metric=True) - def lambda_handler(evt, ctx): - metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) - metrics.add_dimension(name="environment", value="prod") - - # log_metrics uses function_name property from context to add as a dimension for cold start metric - LambdaContext = namedtuple("LambdaContext", "function_name") - lambda_handler({}, LambdaContext("example_fn") - - cold_start_blob, custom_metrics_blob = capture_metrics_output_multiple_emf_objects(capsys) - - # THEN ColdStart metric and function_name dimension should be logged - # in a separate EMF blob than the application metrics - assert cold_start_blob["ColdStart"] == [1.0] - assert cold_start_blob["function_name"] == "example_fn" - - assert "SuccessfulBooking" in custom_metrics_blob # as per previous example + ```python hl_lines="8-9 12 22-24 26 30-31 33" + --8<-- "docs/examples/core/metrics/functional_testing_multiple_blobs.py" ``` ???+ tip diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 363611bbbc0..e9bb3ed400c 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -3,7 +3,7 @@ title: Tracer description: Core utility --- -Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github.com/aws/aws-xray-sdk-python/). +Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github.com/aws/aws-xray-sdk-python/){target="_blank"}. ![Tracer showcase](../media/tracer_utility_showcase.png) @@ -18,35 +18,18 @@ Tracer is an opinionated thin wrapper for [AWS X-Ray Python SDK](https://github. ### Permissions -Before your use this utility, your AWS Lambda function [must have permissions](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html#services-xray-permissions) to send traces to AWS X-Ray. - -```yaml hl_lines="6 9" title="AWS Serverless Application Model (SAM) example" -Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Runtime: python3.8 - Tracing: Active - Environment: - Variables: - POWERTOOLS_SERVICE_NAME: example +Before your use this utility, your AWS Lambda function [must have permissions](https://docs.aws.amazon.com/lambda/latest/dg/services-xray.html#services-xray-permissions){target="_blank"} to send traces to AWS X-Ray. + +```yaml hl_lines="10 13" title="AWS Serverless Application Model (SAM) example" +--8<-- "docs/examples/core/tracer/template.yml" ``` ### Lambda handler You can quickly start by initializing `Tracer` and use `capture_lambda_handler` decorator for your Lambda handler. -```python hl_lines="1 3 6" title="Tracing Lambda handler with capture_lambda_handler" -from aws_lambda_powertools import Tracer - -tracer = Tracer() # Sets service via env var -# OR tracer = Tracer(service="example") - -@tracer.capture_lambda_handler -def handler(event, context): - charge_id = event.get('charge_id') - payment = collect_payment(charge_id) - ... +```python hl_lines="1 3 7" title="Tracing Lambda handler with capture_lambda_handler" +--8<-- "docs/examples/core/tracer/capture_lambda_handler.py" ``` `capture_lambda_handler` performs these additional tasks to ease operations: @@ -57,41 +40,24 @@ def handler(event, context): ### Annotations & Metadata -**Annotations** are key-values associated with traces and indexed by AWS X-Ray. You can use them to filter traces and to create [Trace Groups](https://aws.amazon.com/about-aws/whats-new/2018/11/aws-xray-adds-the-ability-to-group-traces/) to slice and dice your transactions. +**Annotations** are key-values associated with traces and indexed by AWS X-Ray. You can use them to filter traces and to create [Trace Groups](https://aws.amazon.com/about-aws/whats-new/2018/11/aws-xray-adds-the-ability-to-group-traces/){target="_blank"} to slice and dice your transactions. -```python hl_lines="7" title="Adding annotations with put_annotation method" -from aws_lambda_powertools import Tracer -tracer = Tracer() - -@tracer.capture_lambda_handler -def handler(event, context): - ... - tracer.put_annotation(key="PaymentStatus", value="SUCCESS") +```python hl_lines="9" title="Adding annotations with put_annotation method" +--8<-- "docs/examples/core/tracer/put_annotation.py" ``` **Metadata** are key-values also associated with traces but not indexed by AWS X-Ray. You can use them to add additional context for an operation using any native object. -```python hl_lines="8" title="Adding arbitrary metadata with put_metadata method" -from aws_lambda_powertools import Tracer -tracer = Tracer() - -@tracer.capture_lambda_handler -def handler(event, context): - ... - ret = some_logic() - tracer.put_metadata(key="payment_response", value=ret) +```python hl_lines="10" title="Adding arbitrary metadata with put_metadata method" +--8<-- "docs/examples/core/tracer/put_metadata.py" ``` ### Synchronous functions You can trace synchronous functions using the `capture_method` decorator. -```python hl_lines="7 13" title="Tracing an arbitrary function with capture_method" -@tracer.capture_method -def collect_payment(charge_id): - ret = requests.post(PAYMENT_ENDPOINT) # logic - tracer.put_annotation("PAYMENT_STATUS", "SUCCESS") # custom annotation - return ret +```python hl_lines="6 9" title="Tracing an arbitrary function with capture_method" +--8<-- "docs/examples/core/tracer/capture_method_sync.py" ``` ???+ note "Note: Function responses are auto-captured and stored as JSON, by default." @@ -101,7 +67,6 @@ def collect_payment(charge_id): The serialization is performed by aws-xray-sdk via `jsonpickle` module. This can cause side effects for file-like objects like boto S3 `StreamingBody`, where its response will be read only once during serialization. - ### Asynchronous and generator functions ???+ warning @@ -111,47 +76,20 @@ You can trace asynchronous functions and generator functions (including context === "Async" - ```python hl_lines="7" - import asyncio - import contextlib - from aws_lambda_powertools import Tracer - - tracer = Tracer() - - @tracer.capture_method - async def collect_payment(): - ... + ```python hl_lines="9" + --8<-- "docs/examples/core/tracer/capture_method_async.py" ``` === "Context manager" - ```python hl_lines="7-8" - import asyncio - import contextlib - from aws_lambda_powertools import Tracer - - tracer = Tracer() - - @contextlib.contextmanager - @tracer.capture_method - def collect_payment_ctxman(): - yield result - ... + ```python hl_lines="9-10" + --8<-- "docs/examples/core/tracer/capture_method_context_manager.py" ``` === "Generators" ```python hl_lines="9" - import asyncio - import contextlib - from aws_lambda_powertools import Tracer - - tracer = Tracer() - - @tracer.capture_method - def collect_payment_gen(): - yield result - ... + --8<-- "docs/examples/core/tracer/capture_method_generators.py" ``` ## Advanced @@ -163,13 +101,7 @@ Tracer automatically patches all [supported libraries by X-Ray](https://docs.aws If you're looking to shave a few microseconds, or milliseconds depending on your function memory configuration, you can patch specific modules using `patch_modules` param: ```python hl_lines="7" title="Example of explicitly patching boto3 and requests only" -import boto3 -import requests - -from aws_lambda_powertools import Tracer - -modules_to_be_patched = ["boto3", "requests"] -tracer = Tracer(patch_modules=modules_to_be_patched) +--8<-- "docs/examples/core/tracer/patch_modules.py" ``` ### Disabling response auto-capture @@ -183,27 +115,13 @@ Use **`capture_response=False`** parameter in both `capture_lambda_handler` and === "sensitive_data_scenario.py" - ```python hl_lines="3 7" - from aws_lambda_powertools import Tracer - - @tracer.capture_method(capture_response=False) - def fetch_sensitive_information(): - return "sensitive_information" - - @tracer.capture_lambda_handler(capture_response=False) - def handler(event, context): - sensitive_information = fetch_sensitive_information() + ```python hl_lines="6 11" + --8<-- "docs/examples/core/tracer/sensitive_data_scenario.py" ``` === "streaming_object_scenario.py" - ```python hl_lines="3" - from aws_lambda_powertools import Tracer - - @tracer.capture_method(capture_response=False) - def get_s3_object(bucket_name, object_key): - s3 = boto3.client("s3") - s3_object = get_object(Bucket=bucket_name, Key=object_key) - return s3_object + ```python hl_lines="8" + --8<-- "docs/examples/core/tracer/streaming_object_scenario.py" ``` ### Disabling exception auto-capture @@ -213,12 +131,8 @@ Use **`capture_error=False`** parameter in both `capture_lambda_handler` and `ca ???+ info Useful when returning sensitive information in exceptions/stack traces you don't control -```python hl_lines="3 5" title="Disabling exception auto-capture for tracing metadata" -from aws_lambda_powertools import Tracer - -@tracer.capture_lambda_handler(capture_error=False) -def handler(event, context): - raise ValueError("some sensitive info in the stack trace...") +```python hl_lines="6 8" title="Disabling exception auto-capture for tracing metadata" +--8<-- "docs/examples/core/tracer/capture_error_disable.py" ``` ### Ignoring certain HTTP endpoints @@ -227,93 +141,40 @@ You might have endpoints you don't want requests to be traced, perhaps due to th You can use `ignore_endpoint` method with the hostname and/or URLs you'd like it to be ignored - globs (`*`) are allowed. -```python title="Ignoring certain HTTP endpoints from being traced" -from aws_lambda_powertools import Tracer - -tracer = Tracer() -# ignore all calls to `ec2.amazon.com` -tracer.ignore_endpoint(hostname="ec2.amazon.com") -# ignore calls to `*.sensitive.com/password` and `*.sensitive.com/credit-card` -tracer.ignore_endpoint(hostname="*.sensitive.com", urls=["/password", "/credit-card"]) - - -def ec2_api_calls(): - return "suppress_api_responses" - -@tracer.capture_lambda_handler -def handler(event, context): - for x in long_list: - ec2_api_calls() +```python hl_lines="5 7" title="Ignoring certain HTTP endpoints from being traced" +--8<-- "docs/examples/core/tracer/ignore_endpoint.py" ``` - ### Tracing aiohttp requests ???+ info This snippet assumes you have aiohttp as a dependency -You can use `aiohttp_trace_config` function to create a valid [aiohttp trace_config object](https://docs.aiohttp.org/en/stable/tracing_reference.html). This is necessary since X-Ray utilizes aiohttp trace hooks to capture requests end-to-end. +You can use `aiohttp_trace_config` function to create a valid [aiohttp trace_config object](https://docs.aiohttp.org/en/stable/tracing_reference.html){target="_blank"}. This is necessary since X-Ray utilizes aiohttp trace hooks to capture requests end-to-end. -```python hl_lines="5 10" title="Tracing aiohttp requests" -import asyncio -import aiohttp - -from aws_lambda_powertools import Tracer -from aws_lambda_powertools.tracing import aiohttp_trace_config - -tracer = Tracer() - -async def aiohttp_task(): - async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session: - async with session.get("https://httpbin.org/json") as resp: - resp = await resp.json() - return resp +```python hl_lines="6 12" title="Tracing aiohttp requests" +--8<-- "docs/examples/core/tracer/aiohttp_trace_config.py" ``` ### Escape hatch mechanism You can use `tracer.provider` attribute to access all methods provided by AWS X-Ray `xray_recorder` object. -This is useful when you need a feature available in X-Ray that is not available in the Tracer utility, for example [thread-safe](https://github.com/aws/aws-xray-sdk-python/#user-content-trace-threadpoolexecutor), or [context managers](https://github.com/aws/aws-xray-sdk-python/#user-content-start-a-custom-segmentsubsegment). - -```python hl_lines="7" title="Tracing a code block with in_subsegment escape hatch" -from aws_lambda_powertools import Tracer +This is useful when you need a feature available in X-Ray that is not available in the Tracer utility, for example [thread-safe](https://github.com/aws/aws-xray-sdk-python/#user-content-trace-threadpoolexecutor){target="_blank"}, or [context managers](https://github.com/aws/aws-xray-sdk-python/#user-content-start-a-custom-segmentsubsegment){target="_blank"}. -tracer = Tracer() - -@tracer.capture_lambda_handler -def handler(event, context): - with tracer.provider.in_subsegment('## custom subsegment') as subsegment: - ret = some_work() - subsegment.put_metadata('response', ret) +```python hl_lines="8" title="Tracing a code block with in_subsegment escape hatch" +--8<-- "docs/examples/core/tracer/tracer_provider_escape_hatches.py" ``` ### Concurrent asynchronous functions ???+ warning - [X-Ray SDK will raise an exception](https://github.com/aws/aws-xray-sdk-python/issues/164) when async functions are run and traced concurrently + [X-Ray SDK will raise an exception](https://github.com/aws/aws-xray-sdk-python/issues/164){target="_blank"} when async functions are run and traced concurrently A safe workaround mechanism is to use `in_subsegment_async` available via Tracer escape hatch (`tracer.provider`). -```python hl_lines="6 7 12 15 17" title="Workaround to safely trace async concurrent functions" -import asyncio - -from aws_lambda_powertools import Tracer -tracer = Tracer() - -async def another_async_task(): - async with tracer.provider.in_subsegment_async("## another_async_task") as subsegment: - subsegment.put_annotation(key="key", value="value") - subsegment.put_metadata(key="key", value="value", namespace="namespace") - ... - -async def another_async_task_2(): - ... - -@tracer.capture_method -async def collect_payment(charge_id): - asyncio.gather(another_async_task(), another_async_task_2()) - ... +```python hl_lines="8-9 15 19 21" title="Workaround to safely trace async concurrent functions" +--8<-- "docs/examples/core/tracer/concurrent_asynchronous_functions.py" ``` ### Reusing Tracer across your code @@ -329,28 +190,14 @@ Tracer keeps a copy of its configuration after the first initialization. This is === "handler.py" - ```python hl_lines="2 4 9" - from aws_lambda_powertools import Tracer - from payment import collect_payment - - tracer = Tracer(service="payment") - - @tracer.capture_lambda_handler - def handler(event, context): - charge_id = event.get('charge_id') - payment = collect_payment(charge_id) + ```python hl_lines="1 5 11" + --8<-- "docs/examples/core/tracer/reuse_handler.py" ``` === "payment.py" A new instance of Tracer will be created but will reuse the previous Tracer instance configuration, similar to a Singleton. - ```python hl_lines="3 5" - from aws_lambda_powertools import Tracer - - tracer = Tracer(service="payment") - - @tracer.capture_method - def collect_payment(charge_id: str): - ... + ```python hl_lines="3 6" + --8<-- "docs/examples/core/tracer/reuse_payment.py" ``` ## Testing your code @@ -361,4 +208,4 @@ Tracer is disabled by default when not running in the AWS Lambda environment - T * Use annotations on key operations to slice and dice traces, create unique views, and create metrics from it via Trace Groups * Use a namespace when adding metadata to group data more easily -* Annotations and metadata are added to the current subsegment opened. If you want them in a specific subsegment, use a [context manager](https://github.com/aws/aws-xray-sdk-python/#start-a-custom-segmentsubsegment) via the escape hatch mechanism +* Annotations and metadata are added to the current subsegment opened. If you want them in a specific subsegment, use a [context manager](https://github.com/aws/aws-xray-sdk-python/#start-a-custom-segmentsubsegment){target="_blank"} via the escape hatch mechanism diff --git a/docs/examples/core/event_handler/api_gateway/app_alb.py b/docs/examples/core/event_handler/api_gateway/app_alb.py new file mode 100644 index 00000000000..7083e8fc6be --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_alb.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import ALBResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = ALBResolver() + + +@app.get("/hello") +@tracer.capture_method +def get_hello_universe(): + return {"message": "hello universe"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPLICATION_LOAD_BALANCER) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_binary.py b/docs/examples/core/event_handler/api_gateway/app_binary.py new file mode 100644 index 00000000000..e9d79de33fb --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_binary.py @@ -0,0 +1,16 @@ +import os +from pathlib import Path + +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response + +app = APIGatewayRestResolver() +logo_file: bytes = Path(os.getenv("LAMBDA_TASK_ROOT") + "/logo.svg").read_bytes() + + +@app.get("/logo") +def get_logo(): + return Response(status_code=200, content_type="image/svg+xml", body=logo_file) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_catch_all_routes.py b/docs/examples/core/event_handler/api_gateway/app_catch_all_routes.py new file mode 100644 index 00000000000..2448ce95c3d --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_catch_all_routes.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +app = APIGatewayRestResolver() + + +@app.get(".+") +def catch_any_route_after_any(): + return {"path_received": app.current_event.path} + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_compress.py b/docs/examples/core/event_handler/api_gateway/app_compress.py new file mode 100644 index 00000000000..8c51ab035ba --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_compress.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +app = APIGatewayRestResolver() + + +@app.get("/hello", compress=True) +def get_hello_you(): + return {"message": "hello universe"} + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_cors.py b/docs/examples/core/event_handler/api_gateway/app_cors.py new file mode 100644 index 00000000000..47de8e74452 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_cors.py @@ -0,0 +1,28 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, CORSConfig +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() + +cors_config = CORSConfig(allow_origin="https://example.com", max_age=300) +app = APIGatewayRestResolver(cors=cors_config) + + +@app.get("/hello/") +@tracer.capture_method +def get_hello_you(name): + return {"message": f"hello {name}"} + + +@app.get("/hello", cors=False) # optionally exclude CORS from response, if needed +@tracer.capture_method +def get_hello_no_cors_needed(): + return {"message": "hello, no CORS needed for this path ;)"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_custom_domain.py b/docs/examples/core/event_handler/api_gateway/app_custom_domain.py new file mode 100644 index 00000000000..3aeece271a1 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_custom_domain.py @@ -0,0 +1,19 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver(strip_prefixes=["/payment"]) + + +@app.get("/subscriptions/") +@tracer.capture_method +def get_subscription(subscription): + return {"subscription_id": subscription} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_custom_serializer.py b/docs/examples/core/event_handler/api_gateway/app_custom_serializer.py new file mode 100644 index 00000000000..a4b5ae029f5 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_custom_serializer.py @@ -0,0 +1,44 @@ +import json +from enum import Enum +from json import JSONEncoder +from typing import Dict + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + + +class CustomEncoder(JSONEncoder): + """Your customer json encoder""" + + def default(self, obj): + if isinstance(obj, Enum): + return obj.value + try: + iterable = iter(obj) + except TypeError: + pass + else: + return sorted(iterable) + return JSONEncoder.default(self, obj) + + +def custom_serializer(obj) -> str: + """Your custom serializer function APIGatewayRestResolver will use""" + return json.dumps(obj, cls=CustomEncoder) + + +# Assigning your custom serializer +app = APIGatewayRestResolver(serializer=custom_serializer) + + +class Color(Enum): + RED = 1 + BLUE = 2 + + +@app.get("/colors") +def get_color() -> Dict: + return { + # Color.RED will be serialized to 1 as expected now + "color": Color.RED, + "variations": {"light", "dark"}, + } diff --git a/docs/examples/core/event_handler/api_gateway/app_debug.py b/docs/examples/core/event_handler/api_gateway/app_debug.py new file mode 100644 index 00000000000..91c189575e2 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_debug.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +app = APIGatewayRestResolver(debug=True) + + +@app.get("/hello") +def get_hello_universe(): + return {"message": "hello universe"} + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_dynamic_routes.py b/docs/examples/core/event_handler/api_gateway/app_dynamic_routes.py new file mode 100644 index 00000000000..ec57a0c01e2 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_dynamic_routes.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/hello/") +@tracer.capture_method +def get_hello_you(name): + return {"message": f"hello {name}"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_exception_handler.py b/docs/examples/core/event_handler/api_gateway/app_exception_handler.py new file mode 100644 index 00000000000..d13f1b3d75e --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_exception_handler.py @@ -0,0 +1,35 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import content_types +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.exception_handler(ValueError) +def handle_value_error(ex: ValueError): + metadata = {"path": app.current_event.path} + logger.error(f"Malformed request: {ex}", extra=metadata) + + return Response( + status_code=400, + content_type=content_types.TEXT_PLAIN, + body="Invalid request", + ) + + +@app.get("/hello") +@tracer.capture_method +def hello_name(): + name = app.current_event.get_query_string_value(name="name") + if name is not None: + raise ValueError("name query string must be present") + return {"message": f"hello {name}"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_headers.py b/docs/examples/core/event_handler/api_gateway/app_headers.py new file mode 100644 index 00000000000..fad6df20e26 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_headers.py @@ -0,0 +1,15 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +app = APIGatewayRestResolver() + + +@app.get("/hello") +def get_hello_you(): + headers_as_dict = app.current_event.headers + name = app.current_event.get_header_value(name="X-Name", default_value="") + + return {"message": f"hello {name}"} + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_http_api.py b/docs/examples/core/event_handler/api_gateway/app_http_api.py new file mode 100644 index 00000000000..6a86b2a4b9f --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_http_api.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayHttpResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayHttpResolver() + + +@app.get("/hello") +@tracer.capture_method +def get_hello_universe(): + return {"message": "hello universe"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_HTTP) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_http_errors.py b/docs/examples/core/event_handler/api_gateway/app_http_errors.py new file mode 100644 index 00000000000..806cb0bbc3b --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_http_errors.py @@ -0,0 +1,51 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.event_handler.exceptions import ( + BadRequestError, + InternalServerError, + NotFoundError, + ServiceError, + UnauthorizedError, +) +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() + +app = APIGatewayRestResolver() + + +@app.get(rule="/bad-request-error") +def bad_request_error(): + # HTTP 400 + raise BadRequestError("Missing required parameter") + + +@app.get(rule="/unauthorized-error") +def unauthorized_error(): + # HTTP 401 + raise UnauthorizedError("Unauthorized") + + +@app.get(rule="/not-found-error") +def not_found_error(): + # HTTP 404 + raise NotFoundError + + +@app.get(rule="/internal-server-error") +def internal_server_error(): + # HTTP 500 + raise InternalServerError("Internal server error") + + +@app.get(rule="/service-error", cors=True) +def service_error(): + raise ServiceError(502, "Something went wrong!") + # alternatively + # from http import HTTPStatus + # raise ServiceError(HTTPStatus.BAD_GATEWAY.value, "Something went wrong) + + +def handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_http_methods.py b/docs/examples/core/event_handler/api_gateway/app_http_methods.py new file mode 100644 index 00000000000..124881939d8 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_http_methods.py @@ -0,0 +1,22 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +# Only POST HTTP requests to the path /hello will route to this function +@app.post("/hello") +@tracer.capture_method +def get_hello_you(): + name = app.current_event.json_body.get("name") + return {"message": f"hello {name}"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_multi_http_methods.py b/docs/examples/core/event_handler/api_gateway/app_multi_http_methods.py new file mode 100644 index 00000000000..e8a5d2c1aa1 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_multi_http_methods.py @@ -0,0 +1,22 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +# PUT and POST HTTP requests to the path /hello will route to this function +@app.route("/hello", method=["PUT", "POST"]) +@tracer.capture_method +def get_hello_you(): + name = app.current_event.json_body.get("name") + return {"message": f"hello {name}"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_nested_routes.py b/docs/examples/core/event_handler/api_gateway/app_nested_routes.py new file mode 100644 index 00000000000..7bf5ecffb1d --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_nested_routes.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("//") +@tracer.capture_method +def get_message(message, name): + return {"message": f"{message}, {name}"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_not_found.py b/docs/examples/core/event_handler/api_gateway/app_not_found.py new file mode 100644 index 00000000000..858515afa15 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_not_found.py @@ -0,0 +1,33 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import content_types +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response +from aws_lambda_powertools.event_handler.exceptions import NotFoundError +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.not_found +@tracer.capture_method +def handle_not_found_errors(exc: NotFoundError) -> Response: + # Return 418 upon 404 errors + logger.info(f"Not found route: {app.current_event.path}") + return Response( + status_code=418, + content_type=content_types.TEXT_PLAIN, + body="I'm a teapot!", + ) + + +@app.get("/catch/me/if/you/can") +@tracer.capture_method +def catch_me_if_you_can(): + return {"message": "oh hey"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_query_string.py b/docs/examples/core/event_handler/api_gateway/app_query_string.py new file mode 100644 index 00000000000..d5ebd87a450 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_query_string.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +app = APIGatewayRestResolver() + + +@app.get("/hello") +def get_hello_you(): + query_strings_as_dict = app.current_event.query_string_parameters + json_payload = app.current_event.json_body + payload = app.current_event.body + + name = app.current_event.get_query_string_value(name="name", default_value="") + return {"message": f"hello {name}"} + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_response.py b/docs/examples/core/event_handler/api_gateway/app_response.py new file mode 100644 index 00000000000..d71c99cb172 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_response.py @@ -0,0 +1,22 @@ +import json + +from aws_lambda_powertools.event_handler.api_gateway import APIGatewayRestResolver, Response + +app = APIGatewayRestResolver() + + +@app.get("/hello") +def get_hello_you(): + payload = json.dumps({"message": "I'm a teapot"}) + custom_headers = {"X-Custom": "X-Value"} + + return Response( + status_code=418, + content_type="application/json", + body=payload, + headers=custom_headers, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_rest_api.py b/docs/examples/core/event_handler/api_gateway/app_rest_api.py new file mode 100644 index 00000000000..edb981bd833 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_rest_api.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + + +@app.get("/hello") +@tracer.capture_method +def get_hello_universe(): + return {"message": "hello universe"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_route_prefix.py b/docs/examples/core/event_handler/api_gateway/app_route_prefix.py new file mode 100644 index 00000000000..ea329173683 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_route_prefix.py @@ -0,0 +1,13 @@ +from typing import Dict + +import users + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.typing import LambdaContext + +app = APIGatewayRestResolver() +app.include_router(users.router, prefix="/users") # prefix '/users' to any route in `users.router` + + +def lambda_handler(event: Dict, context: LambdaContext): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_split_routes.py b/docs/examples/core/event_handler/api_gateway/app_split_routes.py new file mode 100644 index 00000000000..855462f78ca --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_split_routes.py @@ -0,0 +1,15 @@ +from typing import Dict + +import users + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.typing import LambdaContext + +logger = Logger() +app = APIGatewayRestResolver() +app.include_router(users.router) + + +def lambda_handler(event: Dict, context: LambdaContext): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/app_test.py b/docs/examples/core/event_handler/api_gateway/app_test.py new file mode 100644 index 00000000000..dcf506c8239 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/app_test.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +logger = Logger() +app = APIGatewayRestResolver() # API Gateway REST API (v1) + + +@app.get("/hello") +def get_hello_universe(): + return {"message": "hello universe"} + + +# You can continue to use other utilities just as before +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/layout/health.py b/docs/examples/core/event_handler/api_gateway/layout/health.py new file mode 100644 index 00000000000..66778a56b24 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/layout/health.py @@ -0,0 +1,13 @@ +from typing import Dict + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler.api_gateway import Router + +router = Router() +logger = Logger(child=True) + + +@router.get("/status") +def health() -> Dict: + logger.debug("Health check called") + return {"status": "OK"} diff --git a/docs/examples/core/event_handler/api_gateway/layout/main.py b/docs/examples/core/event_handler/api_gateway/layout/main.py new file mode 100644 index 00000000000..6b2d40edb8a --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/layout/main.py @@ -0,0 +1,21 @@ +from typing import Dict + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging.correlation_paths import APPLICATION_LOAD_BALANCER +from aws_lambda_powertools.utilities.typing import LambdaContext + +from .routers import health, users + +tracer = Tracer() +logger = Logger() +app = APIGatewayRestResolver() + +app.include_router(health.router) +app.include_router(users.router) + + +@logger.inject_lambda_context(correlation_id_path=API_GATEWAY_REST) +@tracer.capture_lambda_handler +def lambda_handler(event: Dict, context: LambdaContext): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/api_gateway/layout/template.yml b/docs/examples/core/event_handler/api_gateway/layout/template.yml new file mode 100644 index 00000000000..cca998f5aa0 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/layout/template.yml @@ -0,0 +1,55 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Example service with multiple routes +Globals: + Function: + Timeout: 10 + MemorySize: 512 + Runtime: python3.9 + Tracing: Active + Architectures: + - x86_64 + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication + POWERTOOLS_SERVICE_NAME: users +Resources: + UsersService: + Type: AWS::Serverless::Function + Properties: + Handler: users.main.lambda_handler + CodeUri: src + Layers: + # Latest version: https://awslabs.github.io/aws-lambda-powertools-python/latest/#lambda-layer + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:17 + Events: + ByUser: + Type: Api + Properties: + Path: /users/{name} + Method: GET + AllUsers: + Type: Api + Properties: + Path: /users + Method: GET + HealthCheck: + Type: Api + Properties: + Path: /status + Method: GET +Outputs: + UsersApiEndpoint: + Description: "API Gateway endpoint URL for Prod environment for Users Function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod" + AllUsersURL: + Description: "URL to fetch all registered users" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users" + ByUserURL: + Description: "URL to retrieve details by user" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/users/test" + UsersServiceFunctionArn: + Description: "Users Lambda Function ARN" + Value: !GetAtt UsersService.Arn diff --git a/docs/examples/core/event_handler/api_gateway/layout/test_users.py b/docs/examples/core/event_handler/api_gateway/layout/test_users.py new file mode 100644 index 00000000000..226c78580ac --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/layout/test_users.py @@ -0,0 +1,11 @@ +import json + +from src.users import main # follows namespace package from root + + +def test_lambda_handler(apigw_event, lambda_context): + ret = main.lambda_handler(apigw_event, lambda_context) + expected = json.dumps({"message": "hello universe"}, separators=(",", ":")) + + assert ret["statusCode"] == 200 + assert ret["body"] == expected diff --git a/docs/examples/core/event_handler/api_gateway/template.yml b/docs/examples/core/event_handler/api_gateway/template.yml new file mode 100644 index 00000000000..cabae51bda6 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/template.yml @@ -0,0 +1,38 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Hello world event handler API Gateway + +Globals: + Api: + TracingEnabled: true + Cors: # see CORS section + AllowOrigin: "'https://example.com'" + AllowHeaders: "'Content-Type,Authorization,X-Amz-Date'" + MaxAge: "'300'" + BinaryMediaTypes: # see Binary responses section + - '*~1*' # converts to */* for any binary type + Function: + Timeout: 5 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_METRICS_NAMESPACE: MyServerlessApplication + POWERTOOLS_SERVICE_NAME: my_api-service + +Resources: + ApiFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: api_handler/ + Description: API handler function + Events: + ApiEvent: + Type: Api + Properties: + Path: /{proxy+} # Send requests on any path to the lambda function + Method: ANY # Send requests using any http method to the lambda function diff --git a/docs/examples/core/event_handler/api_gateway/test_app.py b/docs/examples/core/event_handler/api_gateway/test_app.py new file mode 100644 index 00000000000..cd789b8940f --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/test_app.py @@ -0,0 +1,28 @@ +from dataclasses import dataclass + +import app +import pytest + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + return LambdaContext() + + +def test_lambda_handler(lambda_context): + minimal_event = { + "path": "/hello", + "httpMethod": "GET", + "requestContext": { + "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", # correlation ID + }, + } + + app.lambda_handler(minimal_event, lambda_context) diff --git a/docs/examples/core/event_handler/api_gateway/users_route_prefix.py b/docs/examples/core/event_handler/api_gateway/users_route_prefix.py new file mode 100644 index 00000000000..8eac2e6c116 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/users_route_prefix.py @@ -0,0 +1,21 @@ +from typing import Dict + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler.api_gateway import Router + +logger = Logger(child=True) +router = Router() +USERS = {"user1": "details", "user2": "details", "user3": "details"} + + +@router.get("/") # /users, when we set the prefix in app.py +def get_users() -> Dict: + ... + + +@router.get("/") +def get_user(username: str) -> Dict: + ... + + +# many other related /users routing diff --git a/docs/examples/core/event_handler/api_gateway/users_split_routes.py b/docs/examples/core/event_handler/api_gateway/users_split_routes.py new file mode 100644 index 00000000000..827e81412c3 --- /dev/null +++ b/docs/examples/core/event_handler/api_gateway/users_split_routes.py @@ -0,0 +1,28 @@ +import itertools +from typing import Dict + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler.api_gateway import Router + +logger = Logger(child=True) +router = Router() +USERS = {"user1": "details_here", "user2": "details_here", "user3": "details_here"} + + +@router.get("/users") +def get_users() -> Dict: + # /users?limit=1 + pagination_limit = router.current_event.get_query_string_value(name="limit", default_value=10) + + logger.info(f"Fetching the first {pagination_limit} users...") + ret = dict(itertools.islice(USERS.items(), int(pagination_limit))) + return {"items": [ret]} + + +@router.get("/users/") +def get_user(username: str) -> Dict: + logger.info(f"Fetching username {username}") + return {"details": USERS.get(username, {})} + + +# many other related /users routing diff --git a/docs/examples/core/event_handler/appsync/app_async_functions.py b/docs/examples/core/event_handler/appsync/app_async_functions.py new file mode 100644 index 00000000000..02b91605811 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_async_functions.py @@ -0,0 +1,23 @@ +import asyncio + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer(service="sample_resolver") +logger = Logger(service="sample_resolver") +app = AppSyncResolver() + + +@app.resolver(type_name="Query", field_name="listTodos") +async def list_todos(): + todos = await some_async_io_call() + return todos + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + result = app.resolve(event, context) + + return asyncio.run(result) diff --git a/docs/examples/core/event_handler/appsync/app_async_test.py b/docs/examples/core/event_handler/appsync/app_async_test.py new file mode 100644 index 00000000000..24bf8bc0c37 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_async_test.py @@ -0,0 +1,11 @@ +import asyncio + +from aws_lambda_powertools.event_handler import AppSyncResolver + +app = AppSyncResolver() + + +@app.resolver(field_name="createSomething") +async def create_something_async(): + await asyncio.sleep(1) # Do async stuff + return "created this value" diff --git a/docs/examples/core/event_handler/appsync/app_custom_model.py b/docs/examples/core/event_handler/appsync/app_custom_model.py new file mode 100644 index 00000000000..26d56975323 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_custom_model.py @@ -0,0 +1,28 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import AppSyncResolverEvent + +tracer = Tracer(service="sample_resolver") +logger = Logger(service="sample_resolver") +app = AppSyncResolver() + + +class MyCustomModel(AppSyncResolverEvent): + @property + def country_viewer(self) -> str: + return self.request_headers.get("cloudfront-viewer-country") + + +@app.resolver(field_name="listLocations") +@app.resolver(field_name="locations") +def get_locations(name: str, description: str = ""): + if app.current_event.country_viewer == "US": + ... + return name + description + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context, data_model=MyCustomModel) diff --git a/docs/examples/core/event_handler/appsync/app_merchant_info.py b/docs/examples/core/event_handler/appsync/app_merchant_info.py new file mode 100644 index 00000000000..e19ca996da8 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_merchant_info.py @@ -0,0 +1,25 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils + +tracer = Tracer(service="sample_graphql_transformer_resolver") +logger = Logger(service="sample_graphql_transformer_resolver") +app = AppSyncResolver() + + +@app.resolver(type_name="Query", field_name="listLocations") +def list_locations(page: int = 0, size: int = 10): + return [{"id": 100, "name": "Smooth Grooves"}] + + +@app.resolver(field_name="commonField") +def common_field(): + # Would match all fieldNames matching 'commonField' + return scalar_types_utils.make_id() + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event, context): + app.resolve(event, context) diff --git a/docs/examples/core/event_handler/appsync/app_merchant_search.py b/docs/examples/core/event_handler/appsync/app_merchant_search.py new file mode 100644 index 00000000000..9360b7e5148 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_merchant_search.py @@ -0,0 +1,16 @@ +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils + +app = AppSyncResolver() + + +@app.resolver(type_name="Query", field_name="findMerchant") +def find_merchant(search: str): + return [ + { + "id": scalar_types_utils.make_id(), + "name": "Brewer Brewing", + "description": "Mike Brewer's IPA brewing place", + }, + {"id": scalar_types_utils.make_id(), "name": "Serverlessa's Bakery", "description": "Lessa's sourdough place"}, + ] diff --git a/docs/examples/core/event_handler/appsync/app_nested_mappings.py b/docs/examples/core/event_handler/appsync/app_nested_mappings.py new file mode 100644 index 00000000000..333b1e44aaa --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_nested_mappings.py @@ -0,0 +1,19 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths + +tracer = Tracer(service="sample_resolver") +logger = Logger(service="sample_resolver") +app = AppSyncResolver() + + +@app.resolver(field_name="listLocations") +@app.resolver(field_name="locations") +def get_locations(name: str, description: str = ""): + return name + description + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/appsync/app_resolver_decorator.py b/docs/examples/core/event_handler/appsync/app_resolver_decorator.py new file mode 100644 index 00000000000..9ed8d7b6b83 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_resolver_decorator.py @@ -0,0 +1,46 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync import scalar_types_utils + +tracer = Tracer(service="sample_resolver") +logger = Logger(service="sample_resolver") +app = AppSyncResolver() + +# Note that `creation_time` isn't available in the schema +# This utility also takes into account what info you make available at API level vs what's stored +TODOS = [ + { + "id": scalar_types_utils.make_id(), # type ID or String + "title": "First task", + "description": "String", + "done": False, + "creation_time": scalar_types_utils.aws_datetime(), # type AWSDateTime + }, + { + "id": scalar_types_utils.make_id(), + "title": "Second task", + "description": "String", + "done": True, + "creation_time": scalar_types_utils.aws_datetime(), + }, +] + + +@app.resolver(type_name="Query", field_name="getTodo") +def get_todo(id: str = ""): + logger.info(f"Fetching Todo {id}") + todo = [todo for todo in TODOS if todo["id"] == id] + + return todo + + +@app.resolver(type_name="Query", field_name="listTodos") +def list_todos(): + return TODOS + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/core/event_handler/appsync/app_router.py b/docs/examples/core/event_handler/appsync/app_router.py new file mode 100644 index 00000000000..50e42f4d55a --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_router.py @@ -0,0 +1,19 @@ +from typing import Dict + +from resolvers import location + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging.correlation_paths import APPSYNC_RESOLVER +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() +logger = Logger() +app = AppSyncResolver() +app.include_router(location.router) + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=APPSYNC_RESOLVER) +def lambda_handler(event: Dict, context: LambdaContext): + app.resolve(event, context) diff --git a/docs/examples/core/event_handler/appsync/app_test.py b/docs/examples/core/event_handler/appsync/app_test.py new file mode 100644 index 00000000000..0e37a1952f7 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/app_test.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.event_handler import AppSyncResolver + +app = AppSyncResolver() + + +@app.resolver(field_name="createSomething") +def create_something(): + return "created this value" diff --git a/docs/examples/core/event_handler/appsync/resolvers_location.py b/docs/examples/core/event_handler/appsync/resolvers_location.py new file mode 100644 index 00000000000..67c2c7a4814 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/resolvers_location.py @@ -0,0 +1,18 @@ +from typing import Any, Dict, List + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler.appsync import Router + +logger = Logger(child=True) +router = Router() + + +@router.resolver(type_name="Query", field_name="listLocations") +def list_locations(merchant_id: str) -> List[Dict[str, Any]]: + return [{"name": "Location name", "merchant_id": merchant_id}] + + +@router.resolver(type_name="Location", field_name="status") +def resolve_status(merchant_id: str) -> str: + logger.debug(f"Resolve status for merchant_id: {merchant_id}") + return "FOO" diff --git a/docs/examples/core/event_handler/appsync/template.yml b/docs/examples/core/event_handler/appsync/template.yml new file mode 100644 index 00000000000..6c14a72b968 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/template.yml @@ -0,0 +1,129 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Hello world Direct Lambda Resolver + +Globals: + Function: + Timeout: 5 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/latest/#environment-variables + LOG_LEVEL: INFO + POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1 + POWERTOOLS_LOGGER_LOG_EVENT: true + POWERTOOLS_SERVICE_NAME: sample_resolver + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Description: Sample Lambda Powertools Direct Lambda Resolver + Tags: + SOLUTION: LambdaPowertoolsPython + + # IAM Permissions and Roles + + AppSyncServiceRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - + Effect: "Allow" + Principal: + Service: + - "appsync.amazonaws.com" + Action: + - "sts:AssumeRole" + + InvokeLambdaResolverPolicy: + Type: "AWS::IAM::Policy" + Properties: + PolicyName: "DirectAppSyncLambda" + PolicyDocument: + Version: "2012-10-17" + Statement: + - + Effect: "Allow" + Action: "lambda:invokeFunction" + Resource: + - !GetAtt HelloWorldFunction.Arn + Roles: + - !Ref AppSyncServiceRole + + # GraphQL API + + HelloWorldApi: + Type: "AWS::AppSync::GraphQLApi" + Properties: + Name: HelloWorldApi + AuthenticationType: "API_KEY" + XrayEnabled: true + + HelloWorldApiKey: + Type: AWS::AppSync::ApiKey + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + + HelloWorldApiSchema: + Type: "AWS::AppSync::GraphQLSchema" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + Definition: | + schema { + query:Query + } + + type Query { + getTodo(id: ID!): Todo + listTodos: [Todo] + } + + type Todo { + id: ID! + title: String + description: String + done: Boolean + } + + # Lambda Direct Data Source and Resolver + + HelloWorldFunctionDataSource: + Type: "AWS::AppSync::DataSource" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + Name: "HelloWorldLambdaDirectResolver" + Type: "AWS_LAMBDA" + ServiceRoleArn: !GetAtt AppSyncServiceRole.Arn + LambdaConfig: + LambdaFunctionArn: !GetAtt HelloWorldFunction.Arn + + ListTodosResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + TypeName: "Query" + FieldName: "listTodos" + DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name + + GetTodoResolver: + Type: "AWS::AppSync::Resolver" + Properties: + ApiId: !GetAtt HelloWorldApi.ApiId + TypeName: "Query" + FieldName: "getTodo" + DataSourceName: !GetAtt HelloWorldFunctionDataSource.Name + + +Outputs: + HelloWorldFunction: + Description: "Hello World Lambda Function ARN" + Value: !GetAtt HelloWorldFunction.Arn + + HelloWorldAPI: + Value: !GetAtt HelloWorldApi.Arn diff --git a/docs/examples/core/event_handler/appsync/test_async_resolver.py b/docs/examples/core/event_handler/appsync/test_async_resolver.py new file mode 100644 index 00000000000..8b01db6b059 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/test_async_resolver.py @@ -0,0 +1,18 @@ +import json +from pathlib import Path + +import pytest +from src.index import app # import the instance of AppSyncResolver from your code + + +@pytest.mark.asyncio +async def test_direct_resolver(): + # Load mock event from a file + json_file_path = Path("appSyncDirectResolver.json") + with open(json_file_path) as json_file: + mock_event = json.load(json_file) + + # Call the implicit handler + result = await app(mock_event, {}) + + assert result == "created this value" diff --git a/docs/examples/core/event_handler/appsync/test_resolver.py b/docs/examples/core/event_handler/appsync/test_resolver.py new file mode 100644 index 00000000000..ac2f417b176 --- /dev/null +++ b/docs/examples/core/event_handler/appsync/test_resolver.py @@ -0,0 +1,17 @@ +import json +from pathlib import Path + +import pytest +from src.index import app # import the instance of AppSyncResolver from your code + + +def test_direct_resolver(): + # Load mock event from a file + json_file_path = Path("appSyncDirectResolver.json") + with open(json_file_path) as json_file: + mock_event = json.load(json_file) + + # Call the implicit handler + result = app(mock_event, {}) + + assert result == "created this value" diff --git a/docs/examples/core/logger/correct_logger_inheritance.py b/docs/examples/core/logger/correct_logger_inheritance.py new file mode 100644 index 00000000000..dc7acd5d5f3 --- /dev/null +++ b/docs/examples/core/logger/correct_logger_inheritance.py @@ -0,0 +1,11 @@ +import my_module + +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") +... + +# my_module.py +from aws_lambda_powertools import Logger + +logger = Logger(service="payment", child=True) diff --git a/docs/examples/core/logger/fake_lambda_context_for_logger.py b/docs/examples/core/logger/fake_lambda_context_for_logger.py new file mode 100644 index 00000000000..c57caf0e026 --- /dev/null +++ b/docs/examples/core/logger/fake_lambda_context_for_logger.py @@ -0,0 +1,20 @@ +from dataclasses import dataclass + +import pytest + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + return LambdaContext() + + +def test_lambda_handler(lambda_context): + test_event = {"test": "event"} + your_lambda_handler(test_event, lambda_context) # this will now have a Context object populated diff --git a/docs/examples/core/logger/fake_lambda_context_for_logger_py36.py b/docs/examples/core/logger/fake_lambda_context_for_logger_py36.py new file mode 100644 index 00000000000..c2f4f787600 --- /dev/null +++ b/docs/examples/core/logger/fake_lambda_context_for_logger_py36.py @@ -0,0 +1,22 @@ +from collections import namedtuple + +import pytest + + +@pytest.fixture +def lambda_context(): + lambda_context = { + "function_name": "test", + "memory_limit_in_mb": 128, + "invoked_function_arn": "arn:aws:lambda:eu-west-1:809313241:function:test", + "aws_request_id": "52fdfc07-2182-154f-163f-5f0f9a621d72", + } + + return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values()) + + +def test_lambda_handler(lambda_context): + test_event = {"test": "event"} + + # this will now have a Context object populated + your_lambda_handler(test_event, lambda_context) diff --git a/docs/examples/core/logger/faq_append_keys_vs_extra.py b/docs/examples/core/logger/faq_append_keys_vs_extra.py new file mode 100644 index 00000000000..dcb40809ba8 --- /dev/null +++ b/docs/examples/core/logger/faq_append_keys_vs_extra.py @@ -0,0 +1,15 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +def handler(event, context): + logger.append_keys(payment_id="123456789") + + try: + booking_id = book_flight() + logger.info("Flight booked successfully", extra={"booking_id": booking_id}) + except BookingReservationError: + ... + + logger.info("goodbye") diff --git a/docs/examples/core/logger/faq_enable_boto3_logger.py b/docs/examples/core/logger/faq_enable_boto3_logger.py new file mode 100644 index 00000000000..cce8dc6f8e7 --- /dev/null +++ b/docs/examples/core/logger/faq_enable_boto3_logger.py @@ -0,0 +1,18 @@ +from typing import Dict, List + +import boto3 + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.typing import LambdaContext + +boto3.set_stream_logger() +boto3.set_stream_logger("botocore") + +logger = Logger() +client = boto3.client("s3") + + +def handler(event: Dict, context: LambdaContext) -> List: + response = client.list_buckets() + + return response.get("Buckets", []) diff --git a/docs/examples/core/logger/faq_utils_copy_config_to_registered_loggers.py b/docs/examples/core/logger/faq_utils_copy_config_to_registered_loggers.py new file mode 100644 index 00000000000..bce7042bf97 --- /dev/null +++ b/docs/examples/core/logger/faq_utils_copy_config_to_registered_loggers.py @@ -0,0 +1,11 @@ +import logging + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging import utils + +logger = Logger() + +external_logger = logging.getLogger(name="example") + +utils.copy_config_to_registered_loggers(source_logger=logger) +external_logger.info("test message") diff --git a/docs/examples/core/logger/getting_started_app.py b/docs/examples/core/logger/getting_started_app.py new file mode 100644 index 00000000000..d3d114f187f --- /dev/null +++ b/docs/examples/core/logger/getting_started_app.py @@ -0,0 +1,4 @@ +from aws_lambda_powertools import Logger + +logger = Logger() # Sets service via env var +# OR logger = Logger(service="example") diff --git a/docs/examples/core/logger/getting_started_template.yml b/docs/examples/core/logger/getting_started_template.yml new file mode 100644 index 00000000000..5f3a7fde387 --- /dev/null +++ b/docs/examples/core/logger/getting_started_template.yml @@ -0,0 +1,13 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: src/ + Handler: app.lambda_handler + Runtime: python3.9 + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: example diff --git a/docs/examples/core/logger/incorrect_logger_inheritance.py b/docs/examples/core/logger/incorrect_logger_inheritance.py new file mode 100644 index 00000000000..b519d6bb3e1 --- /dev/null +++ b/docs/examples/core/logger/incorrect_logger_inheritance.py @@ -0,0 +1,11 @@ +import my_module + +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") +... + +# my_module.py +from aws_lambda_powertools import Logger + +logger = Logger(child=True) diff --git a/docs/examples/core/logger/inject_lambda_context.py b/docs/examples/core/logger/inject_lambda_context.py new file mode 100644 index 00000000000..c24face02b9 --- /dev/null +++ b/docs/examples/core/logger/inject_lambda_context.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +@logger.inject_lambda_context +def handler(event, context): + logger.info("Collecting payment") + + # You can log entire objects too + logger.info( + { + "operation": "collect_payment", + "charge_id": event["charge_id"], + } + ) + ... diff --git a/docs/examples/core/logger/inject_lambda_context_clear_state.py b/docs/examples/core/logger/inject_lambda_context_clear_state.py new file mode 100644 index 00000000000..bafb2058115 --- /dev/null +++ b/docs/examples/core/logger/inject_lambda_context_clear_state.py @@ -0,0 +1,13 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +@logger.inject_lambda_context(clear_state=True) +def handler(event, context): + if event.get("special_key"): + # Should only be available in the first request log + # as the second request doesn't contain `special_key` + logger.append_keys(debugging_key="value") + + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/inject_lambda_context_correlation_id_path.py b/docs/examples/core/logger/inject_lambda_context_correlation_id_path.py new file mode 100644 index 00000000000..d7c7e5cb0b5 --- /dev/null +++ b/docs/examples/core/logger/inject_lambda_context_correlation_id_path.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +@logger.inject_lambda_context(correlation_id_path="headers.my_request_id_header") +def handler(event, context): + logger.debug(f"Correlation ID => {logger.get_correlation_id()}") + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/inject_lambda_context_correlation_paths.py b/docs/examples/core/logger/inject_lambda_context_correlation_paths.py new file mode 100644 index 00000000000..c883d8da416 --- /dev/null +++ b/docs/examples/core/logger/inject_lambda_context_correlation_paths.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging import correlation_paths + +logger = Logger(service="payment") + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST) +def handler(event, context): + logger.debug(f"Correlation ID => {logger.get_correlation_id()}") + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/inject_lambda_context_log_event.py b/docs/examples/core/logger/inject_lambda_context_log_event.py new file mode 100644 index 00000000000..4b82fa519cc --- /dev/null +++ b/docs/examples/core/logger/inject_lambda_context_log_event.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +@logger.inject_lambda_context(log_event=True) +def handler(event, context): + ... diff --git a/docs/examples/core/logger/logger_append_keys.py b/docs/examples/core/logger/logger_append_keys.py new file mode 100644 index 00000000000..5d62167a0e8 --- /dev/null +++ b/docs/examples/core/logger/logger_append_keys.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +def handler(event, context): + order_id = event.get("order_id") + + # this will ensure order_id key always has the latest value before logging + logger.append_keys(order_id=order_id) + + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/logger_exception.py b/docs/examples/core/logger/logger_exception.py new file mode 100644 index 00000000000..4056582c786 --- /dev/null +++ b/docs/examples/core/logger/logger_exception.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + +try: + raise ValueError("something went wrong") +except Exception: + logger.exception("Received an exception") diff --git a/docs/examples/core/logger/logger_extra_parameter.py b/docs/examples/core/logger/logger_extra_parameter.py new file mode 100644 index 00000000000..9b83dac2971 --- /dev/null +++ b/docs/examples/core/logger/logger_extra_parameter.py @@ -0,0 +1,6 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + +fields = {"request_id": "1123"} +logger.info("Collecting payment", extra=fields) diff --git a/docs/examples/core/logger/logger_json_default.py b/docs/examples/core/logger/logger_json_default.py new file mode 100644 index 00000000000..a7b41abec19 --- /dev/null +++ b/docs/examples/core/logger/logger_json_default.py @@ -0,0 +1,16 @@ +from aws_lambda_powertools import Logger + + +def custom_json_default(value): + return f"" + + +class Unserializable: + pass + + +logger = Logger(service="payment", json_default=custom_json_default) + + +def handler(event, context): + logger.info(Unserializable()) diff --git a/docs/examples/core/logger/logger_json_serializer.py b/docs/examples/core/logger/logger_json_serializer.py new file mode 100644 index 00000000000..6bace8f5426 --- /dev/null +++ b/docs/examples/core/logger/logger_json_serializer.py @@ -0,0 +1,15 @@ +import orjson + +from aws_lambda_powertools import Logger + +custom_serializer = orjson.dumps +custom_deserializer = orjson.loads + +logger = Logger( + service="payment", + json_serializer=custom_serializer, + json_deserializer=custom_deserializer, +) + +# when using parameters, you can pass a partial +# custom_serializer=functools.partial(orjson.dumps, option=orjson.OPT_SERIALIZE_NUMPY) diff --git a/docs/examples/core/logger/logger_log_record_order.py b/docs/examples/core/logger/logger_log_record_order.py new file mode 100644 index 00000000000..ed6f85a5739 --- /dev/null +++ b/docs/examples/core/logger/logger_log_record_order.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Logger + +# make message as the first key +logger = Logger(service="payment", log_record_order=["message"]) + +# make request_id that will be added later as the first key +# Logger(service="payment", log_record_order=["request_id"]) + +# Default key sorting order when omit +# Logger(service="payment", log_record_order=["level","location","message","timestamp"]) diff --git a/docs/examples/core/logger/logger_logger_formatter.py b/docs/examples/core/logger/logger_logger_formatter.py new file mode 100644 index 00000000000..d0f1a3f802a --- /dev/null +++ b/docs/examples/core/logger/logger_logger_formatter.py @@ -0,0 +1,15 @@ +from typing import Dict + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter + + +class CustomFormatter(LambdaPowertoolsFormatter): + def serialize(self, log: Dict) -> str: + """Serialize final structured log dict to JSON str""" + log["event"] = log.pop("message") # rename message key to event + return self.json_serializer(log) # use configured json serializer + + +logger = Logger(service="example", logger_formatter=CustomFormatter()) +logger.info("hello") diff --git a/docs/examples/core/logger/logger_logger_formatter_base_powertools_formatter.py b/docs/examples/core/logger/logger_logger_formatter_base_powertools_formatter.py new file mode 100644 index 00000000000..9dc88f455e0 --- /dev/null +++ b/docs/examples/core/logger/logger_logger_formatter_base_powertools_formatter.py @@ -0,0 +1,42 @@ +import logging +from typing import Iterable, List, Optional + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter + + +class CustomFormatter(BasePowertoolsFormatter): + def __init__(self, log_record_order: Optional[List[str]], *args, **kwargs): + self.log_record_order = log_record_order or ["level", "location", "message", "timestamp"] + self.log_format = dict.fromkeys(self.log_record_order) + super().__init__(*args, **kwargs) + + def append_keys(self, **additional_keys): + # also used by `inject_lambda_context` decorator + self.log_format.update(additional_keys) + + def remove_keys(self, keys: Iterable[str]): + for key in keys: + self.log_format.pop(key, None) + + def clear_state(self): + self.log_format = dict.fromkeys(self.log_record_order) + + def format(self, record: logging.LogRecord) -> str: # noqa: A003 + """Format logging record as structured JSON str""" + return json.dumps( + { + "event": super().format(record), + "timestamp": self.formatTime(record), + "my_default_key": "test", + **self.log_format, + } + ) + + +logger = Logger(service="payment", logger_formatter=CustomFormatter()) + + +@logger.inject_lambda_context +def handler(event, context): + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/logger_logger_handler.py b/docs/examples/core/logger/logger_logger_handler.py new file mode 100644 index 00000000000..a35a189725e --- /dev/null +++ b/docs/examples/core/logger/logger_logger_handler.py @@ -0,0 +1,10 @@ +import logging +from pathlib import Path + +from aws_lambda_powertools import Logger + +log_file = Path("/tmp/log.json") +log_file_handler = logging.FileHandler(filename=log_file) +logger = Logger(service="payment", logger_handler=log_file_handler) + +logger.info("Collecting payment") diff --git a/docs/examples/core/logger/logger_remove_keys.py b/docs/examples/core/logger/logger_remove_keys.py new file mode 100644 index 00000000000..555585931cb --- /dev/null +++ b/docs/examples/core/logger/logger_remove_keys.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +def handler(event, context): + logger.append_keys(sample_key="value") + logger.info("Collecting payment") + + logger.remove_keys(["sample_key"]) + logger.info("Collecting payment without sample key") diff --git a/docs/examples/core/logger/logger_sample_rate.py b/docs/examples/core/logger/logger_sample_rate.py new file mode 100644 index 00000000000..fc099c15345 --- /dev/null +++ b/docs/examples/core/logger/logger_sample_rate.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools import Logger + +# Sample 10% of debug logs e.g. 0.1 +logger = Logger(service="payment", sample_rate=0.1) + + +def handler(event, context): + logger.debug("Verifying whether order_id is present") + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/logger_set_correlation_id.py b/docs/examples/core/logger/logger_set_correlation_id.py new file mode 100644 index 00000000000..266ac721bb6 --- /dev/null +++ b/docs/examples/core/logger/logger_set_correlation_id.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") + + +def handler(event, context): + logger.set_correlation_id(event["requestContext"]["requestId"]) + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/logger_set_correlation_id_data_class.py b/docs/examples/core/logger/logger_set_correlation_id_data_class.py new file mode 100644 index 00000000000..f062ad2fd3a --- /dev/null +++ b/docs/examples/core/logger/logger_set_correlation_id_data_class.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent + +logger = Logger(service="payment") + + +def handler(event, context): + event = APIGatewayProxyEvent(event) + logger.set_correlation_id(event.request_context.request_id) + logger.info("Collecting payment") diff --git a/docs/examples/core/logger/logger_utc.py b/docs/examples/core/logger/logger_utc.py new file mode 100644 index 00000000000..7ad275efd3b --- /dev/null +++ b/docs/examples/core/logger/logger_utc.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools import Logger + +logger = Logger(service="payment") +logger.info("Local time") + +logger_in_utc = Logger(service="payment", utc=True) +logger_in_utc.info("GMT time zone") diff --git a/docs/examples/core/logger/logging_formatter.py b/docs/examples/core/logger/logging_formatter.py new file mode 100644 index 00000000000..5e0683c695b --- /dev/null +++ b/docs/examples/core/logger/logging_formatter.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter + +formatter = LambdaPowertoolsFormatter( + utc=True, + log_record_order=["message"], +) +logger = Logger(service="example", logger_formatter=formatter) diff --git a/docs/examples/core/logger/overriding_log_records.py b/docs/examples/core/logger/overriding_log_records.py new file mode 100644 index 00000000000..062f3a5f761 --- /dev/null +++ b/docs/examples/core/logger/overriding_log_records.py @@ -0,0 +1,16 @@ +from aws_lambda_powertools import Logger + +date_format = "%m/%d/%Y %I:%M:%S %p" +location_format = "[%(funcName)s] %(module)s -pants" + +# override location and timestamp format +logger = Logger( + service="payment", + location=location_format, + datefmt=date_format, +) + +# suppress the location key with a None value +logger_two = Logger(service="payment", location=None) + +logger.info("Collecting payment") diff --git a/docs/examples/core/logger/shared_logger_app.py b/docs/examples/core/logger/shared_logger_app.py new file mode 100644 index 00000000000..ed841ba7608 --- /dev/null +++ b/docs/examples/core/logger/shared_logger_app.py @@ -0,0 +1,10 @@ +import shared # Creates a child logger named "payment.shared" + +from aws_lambda_powertools import Logger + +logger = Logger() # POWERTOOLS_SERVICE_NAME: "payment" + + +def handler(event, context): + shared.inject_payment_id(event) + ... diff --git a/docs/examples/core/logger/shared_logger_child.py b/docs/examples/core/logger/shared_logger_child.py new file mode 100644 index 00000000000..13c130d590e --- /dev/null +++ b/docs/examples/core/logger/shared_logger_child.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools import Logger + +logger = Logger(child=True) # POWERTOOLS_SERVICE_NAME: "payment" + + +def inject_payment_id(event): + logger.structure_logs(append=True, payment_id=event.get("payment_id")) diff --git a/docs/examples/core/metrics/add_metadata.py b/docs/examples/core/metrics/add_metadata.py new file mode 100644 index 00000000000..1e0660f2816 --- /dev/null +++ b/docs/examples/core/metrics/add_metadata.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="booking") + + +@metrics.log_metrics +def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + metrics.add_metadata(key="booking_id", value="booking_uuid") diff --git a/docs/examples/core/metrics/clear_metrics_between_tests.py b/docs/examples/core/metrics/clear_metrics_between_tests.py new file mode 100644 index 00000000000..cea3879af83 --- /dev/null +++ b/docs/examples/core/metrics/clear_metrics_between_tests.py @@ -0,0 +1,14 @@ +import pytest + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import metrics as metrics_global + + +@pytest.fixture(scope="function", autouse=True) +def reset_metric_set(): + # Clear out every metric data prior to every test + metrics = Metrics() + metrics.clear_metrics() + metrics_global.is_cold_start = True # ensure each test has cold start + metrics.clear_default_dimensions() # remove persisted default dimensions, if any + yield diff --git a/docs/examples/core/metrics/example_app.py b/docs/examples/core/metrics/example_app.py new file mode 100644 index 00000000000..b644c96b72e --- /dev/null +++ b/docs/examples/core/metrics/example_app.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics() # Sets metric namespace and service via env var +# OR +metrics = Metrics( + namespace="ServerlessAirline", service="orders" +) # Sets metric namespace, and service as a metric dimension diff --git a/docs/examples/core/metrics/flush_metrics.py b/docs/examples/core/metrics/flush_metrics.py new file mode 100644 index 00000000000..379c51f864e --- /dev/null +++ b/docs/examples/core/metrics/flush_metrics.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="ExampleService") + + +@metrics.log_metrics +def lambda_handler(evt, ctx): + metrics.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) diff --git a/docs/examples/core/metrics/flush_metrics_manually.py b/docs/examples/core/metrics/flush_metrics_manually.py new file mode 100644 index 00000000000..cf26cde2500 --- /dev/null +++ b/docs/examples/core/metrics/flush_metrics_manually.py @@ -0,0 +1,13 @@ +import json + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="booking") + + +def lambda_handler(evt, ctx): + metrics.add_metric(name="ColdStart", unit=MetricUnit.Count, value=1) + your_metrics_object = metrics.serialize_metric_set() + metrics.clear_metrics() + print(json.dumps(your_metrics_object)) diff --git a/docs/examples/core/metrics/functional_testing.py b/docs/examples/core/metrics/functional_testing.py new file mode 100644 index 00000000000..71f628f2cae --- /dev/null +++ b/docs/examples/core/metrics/functional_testing.py @@ -0,0 +1,25 @@ +import json + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + + +def test_log_metrics(capsys): + # GIVEN Metrics is initialized + metrics = Metrics(namespace="ServerlessAirline") + + # WHEN we utilize log_metrics to serialize + # and flush all metrics at the end of a function execution + @metrics.log_metrics + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + metrics.add_dimension(name="environment", value="prod") + + lambda_handler({}, {}) + log = capsys.readouterr().out.strip() # remove any extra line + metrics_output = json.loads(log) # deserialize JSON str + + # THEN we should have no exceptions + # and a valid EMF object should be flushed correctly + assert "SuccessfulBooking" in log # basic string assertion in JSON str + assert "SuccessfulBooking" in metrics_output["_aws"]["CloudWatchMetrics"][0]["Metrics"][0]["Name"] diff --git a/docs/examples/core/metrics/functional_testing_multiple_blobs.py b/docs/examples/core/metrics/functional_testing_multiple_blobs.py new file mode 100644 index 00000000000..ed1c23078a1 --- /dev/null +++ b/docs/examples/core/metrics/functional_testing_multiple_blobs.py @@ -0,0 +1,33 @@ +import json +from collections import namedtuple + +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + + +def capture_metrics_output_multiple_emf_objects(capsys): + return [json.loads(line.strip()) for line in capsys.readouterr().out.split("\n") if line] + + +def test_log_metrics(capsys): + # GIVEN Metrics is initialized + metrics = Metrics(namespace="ServerlessAirline") + + # WHEN log_metrics is used with capture_cold_start_metric + @metrics.log_metrics(capture_cold_start_metric=True) + def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) + metrics.add_dimension(name="environment", value="prod") + + # log_metrics uses function_name property from context to add as a dimension for cold start metric + LambdaContext = namedtuple("LambdaContext", "function_name") + lambda_handler({}, LambdaContext("example_fn")) + + cold_start_blob, custom_metrics_blob = capture_metrics_output_multiple_emf_objects(capsys) + + # THEN ColdStart metric and function_name dimension should be logged + # in a separate EMF blob than the application metrics + assert cold_start_blob["ColdStart"] == [1.0] + assert cold_start_blob["function_name"] == "example_fn" + + assert "SuccessfulBooking" in custom_metrics_blob # as per previous example diff --git a/docs/examples/core/metrics/log_metrics_capture_cold_start_metric.py b/docs/examples/core/metrics/log_metrics_capture_cold_start_metric.py new file mode 100644 index 00000000000..37b10026afa --- /dev/null +++ b/docs/examples/core/metrics/log_metrics_capture_cold_start_metric.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Metrics + +metrics = Metrics(service="ExampleService") + + +@metrics.log_metrics(capture_cold_start_metric=True) +def lambda_handler(evt, ctx): + ... diff --git a/docs/examples/core/metrics/log_metrics_default_dimensions.py b/docs/examples/core/metrics/log_metrics_default_dimensions.py new file mode 100644 index 00000000000..39ea7ce4613 --- /dev/null +++ b/docs/examples/core/metrics/log_metrics_default_dimensions.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="booking") +DEFAULT_DIMENSIONS = {"environment": "prod", "another": "one"} + + +@metrics.log_metrics(default_dimensions=DEFAULT_DIMENSIONS) +def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) diff --git a/docs/examples/core/metrics/log_metrics_multiple_decorators.py b/docs/examples/core/metrics/log_metrics_multiple_decorators.py new file mode 100644 index 00000000000..7fd6d14d9d0 --- /dev/null +++ b/docs/examples/core/metrics/log_metrics_multiple_decorators.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools import Metrics, Tracer +from aws_lambda_powertools.metrics import MetricUnit + +tracer = Tracer(service="booking") +metrics = Metrics(namespace="ExampleApplication", service="booking") + + +@metrics.log_metrics +@tracer.capture_lambda_handler +def lambda_handler(evt, ctx): + metrics.add_metric(name="BookingConfirmation", unit=MetricUnit.Count, value=1) diff --git a/docs/examples/core/metrics/log_metrics_raise_on_empty_metrics.py b/docs/examples/core/metrics/log_metrics_raise_on_empty_metrics.py new file mode 100644 index 00000000000..90b5b475709 --- /dev/null +++ b/docs/examples/core/metrics/log_metrics_raise_on_empty_metrics.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.metrics import Metrics + +metrics = Metrics() + + +@metrics.log_metrics(raise_on_empty_metrics=True) +def lambda_handler(evt, ctx): + ... diff --git a/docs/examples/core/metrics/metrics_app.py b/docs/examples/core/metrics/metrics_app.py new file mode 100644 index 00000000000..9d1be476025 --- /dev/null +++ b/docs/examples/core/metrics/metrics_app.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="booking") + + +@metrics.log_metrics +def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) diff --git a/docs/examples/core/metrics/metrics_custom_dimensions_app.py b/docs/examples/core/metrics/metrics_custom_dimensions_app.py new file mode 100644 index 00000000000..ef7f9a85e2d --- /dev/null +++ b/docs/examples/core/metrics/metrics_custom_dimensions_app.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="booking") + + +@metrics.log_metrics +def lambda_handler(evt, ctx): + metrics.add_dimension(name="environment", value="prod") + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) diff --git a/docs/examples/core/metrics/set_default_dimensions.py b/docs/examples/core/metrics/set_default_dimensions.py new file mode 100644 index 00000000000..0006703b287 --- /dev/null +++ b/docs/examples/core/metrics/set_default_dimensions.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +metrics = Metrics(namespace="ExampleApplication", service="booking") +metrics.set_default_dimensions(environment="prod", another="one") + + +@metrics.log_metrics +def lambda_handler(evt, ctx): + metrics.add_metric(name="SuccessfulBooking", unit=MetricUnit.Count, value=1) diff --git a/docs/examples/core/metrics/single_metric.py b/docs/examples/core/metrics/single_metric.py new file mode 100644 index 00000000000..13a1a6c3e0c --- /dev/null +++ b/docs/examples/core/metrics/single_metric.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import single_metric +from aws_lambda_powertools.metrics import MetricUnit + + +def lambda_handler(evt, ctx): + with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1, namespace="ExampleApplication") as metric: + metric.add_dimension(name="function_context", value="$LATEST") + ... diff --git a/docs/examples/core/metrics/template.yml b/docs/examples/core/metrics/template.yml new file mode 100644 index 00000000000..b9bc7ba6e04 --- /dev/null +++ b/docs/examples/core/metrics/template.yml @@ -0,0 +1,13 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: src/ + Handler: app.lambda_handler + Runtime: python3.9 + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: payment + POWERTOOLS_METRICS_NAMESPACE: ServerlessAirline diff --git a/docs/examples/core/tracer/aiohttp_trace_config.py b/docs/examples/core/tracer/aiohttp_trace_config.py new file mode 100644 index 00000000000..3d45dd72d52 --- /dev/null +++ b/docs/examples/core/tracer/aiohttp_trace_config.py @@ -0,0 +1,15 @@ +import asyncio + +import aiohttp + +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.tracing import aiohttp_trace_config + +tracer = Tracer() + + +async def aiohttp_task(): + async with aiohttp.ClientSession(trace_configs=[aiohttp_trace_config()]) as session: + async with session.get("https://httpbin.org/json") as resp: + resp = await resp.json() + return resp diff --git a/docs/examples/core/tracer/capture_error_disable.py b/docs/examples/core/tracer/capture_error_disable.py new file mode 100644 index 00000000000..669c2e360ac --- /dev/null +++ b/docs/examples/core/tracer/capture_error_disable.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_lambda_handler(capture_error=False) +def handler(event, context): + raise ValueError("some sensitive info in the stack trace...") diff --git a/docs/examples/core/tracer/capture_lambda_handler.py b/docs/examples/core/tracer/capture_lambda_handler.py new file mode 100644 index 00000000000..6ccb49fefe5 --- /dev/null +++ b/docs/examples/core/tracer/capture_lambda_handler.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() # Sets service via env var +# OR tracer = Tracer(service="example") + + +@tracer.capture_lambda_handler +def handler(event, context): + charge_id = event.get("charge_id") + payment = collect_payment(charge_id) + ... diff --git a/docs/examples/core/tracer/capture_method_async.py b/docs/examples/core/tracer/capture_method_async.py new file mode 100644 index 00000000000..5e5c5dfe79a --- /dev/null +++ b/docs/examples/core/tracer/capture_method_async.py @@ -0,0 +1,11 @@ +import asyncio +import contextlib + +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_method +async def collect_payment(): + ... diff --git a/docs/examples/core/tracer/capture_method_context_manager.py b/docs/examples/core/tracer/capture_method_context_manager.py new file mode 100644 index 00000000000..7aa21c46f2b --- /dev/null +++ b/docs/examples/core/tracer/capture_method_context_manager.py @@ -0,0 +1,13 @@ +import asyncio +import contextlib + +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@contextlib.contextmanager +@tracer.capture_method +def collect_payment_ctxman(): + yield result + ... diff --git a/docs/examples/core/tracer/capture_method_generators.py b/docs/examples/core/tracer/capture_method_generators.py new file mode 100644 index 00000000000..f5ff3a6fa1c --- /dev/null +++ b/docs/examples/core/tracer/capture_method_generators.py @@ -0,0 +1,12 @@ +import asyncio +import contextlib + +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_method +def collect_payment_gen(): + yield result + ... diff --git a/docs/examples/core/tracer/capture_method_sync.py b/docs/examples/core/tracer/capture_method_sync.py new file mode 100644 index 00000000000..79cc9db68a7 --- /dev/null +++ b/docs/examples/core/tracer/capture_method_sync.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_method +def collect_payment(charge_id): + ret = requests.post(PAYMENT_ENDPOINT) # logic + tracer.put_annotation("PAYMENT_STATUS", "SUCCESS") # custom annotation + return ret diff --git a/docs/examples/core/tracer/concurrent_asynchronous_functions.py b/docs/examples/core/tracer/concurrent_asynchronous_functions.py new file mode 100644 index 00000000000..7bc39c66b1c --- /dev/null +++ b/docs/examples/core/tracer/concurrent_asynchronous_functions.py @@ -0,0 +1,22 @@ +import asyncio + +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +async def another_async_task(): + async with tracer.provider.in_subsegment_async("## another_async_task") as subsegment: + subsegment.put_annotation(key="key", value="value") + subsegment.put_metadata(key="key", value="value", namespace="namespace") + ... + + +async def another_async_task_2(): + ... + + +@tracer.capture_method +async def collect_payment(charge_id): + asyncio.gather(another_async_task(), another_async_task_2()) + ... diff --git a/docs/examples/core/tracer/ignore_endpoint.py b/docs/examples/core/tracer/ignore_endpoint.py new file mode 100644 index 00000000000..271cf233d3b --- /dev/null +++ b/docs/examples/core/tracer/ignore_endpoint.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() +# ignore all calls to `ec2.amazon.com` +tracer.ignore_endpoint(hostname="ec2.amazon.com") +# ignore calls to `*.sensitive.com/password` and `*.sensitive.com/credit-card` +tracer.ignore_endpoint(hostname="*.sensitive.com", urls=["/password", "/credit-card"]) + + +def ec2_api_calls(): + return "suppress_api_responses" + + +@tracer.capture_lambda_handler +def handler(event, context): + for x in long_list: + ec2_api_calls() diff --git a/docs/examples/core/tracer/patch_modules.py b/docs/examples/core/tracer/patch_modules.py new file mode 100644 index 00000000000..a907682211d --- /dev/null +++ b/docs/examples/core/tracer/patch_modules.py @@ -0,0 +1,7 @@ +import boto3 +import requests + +from aws_lambda_powertools import Tracer + +modules_to_be_patched = ["boto3", "requests"] +tracer = Tracer(patch_modules=modules_to_be_patched) diff --git a/docs/examples/core/tracer/put_annotation.py b/docs/examples/core/tracer/put_annotation.py new file mode 100644 index 00000000000..15d3040e31f --- /dev/null +++ b/docs/examples/core/tracer/put_annotation.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_lambda_handler +def handler(event, context): + ... + tracer.put_annotation(key="PaymentStatus", value="SUCCESS") diff --git a/docs/examples/core/tracer/put_metadata.py b/docs/examples/core/tracer/put_metadata.py new file mode 100644 index 00000000000..9f1dd7a58bb --- /dev/null +++ b/docs/examples/core/tracer/put_metadata.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_lambda_handler +def handler(event, context): + ... + ret = some_logic() + tracer.put_metadata(key="payment_response", value=ret) diff --git a/docs/examples/core/tracer/reuse_handler.py b/docs/examples/core/tracer/reuse_handler.py new file mode 100644 index 00000000000..4789aee56f3 --- /dev/null +++ b/docs/examples/core/tracer/reuse_handler.py @@ -0,0 +1,11 @@ +from payment import collect_payment + +from aws_lambda_powertools import Tracer + +tracer = Tracer(service="payment") + + +@tracer.capture_lambda_handler +def handler(event, context): + charge_id = event.get("charge_id") + payment = collect_payment(charge_id) diff --git a/docs/examples/core/tracer/reuse_payment.py b/docs/examples/core/tracer/reuse_payment.py new file mode 100644 index 00000000000..41a81844264 --- /dev/null +++ b/docs/examples/core/tracer/reuse_payment.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer(service="payment") + + +@tracer.capture_method +def collect_payment(charge_id: str): + ... diff --git a/docs/examples/core/tracer/sensitive_data_scenario.py b/docs/examples/core/tracer/sensitive_data_scenario.py new file mode 100644 index 00000000000..3860ddc1163 --- /dev/null +++ b/docs/examples/core/tracer/sensitive_data_scenario.py @@ -0,0 +1,13 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_method(capture_response=False) +def fetch_sensitive_information(): + return "sensitive_information" + + +@tracer.capture_lambda_handler(capture_response=False) +def handler(event, context): + sensitive_information = fetch_sensitive_information() diff --git a/docs/examples/core/tracer/streaming_object_scenario.py b/docs/examples/core/tracer/streaming_object_scenario.py new file mode 100644 index 00000000000..1c516c80171 --- /dev/null +++ b/docs/examples/core/tracer/streaming_object_scenario.py @@ -0,0 +1,12 @@ +import boto3 + +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_method(capture_response=False) +def get_s3_object(bucket_name, object_key): + s3 = boto3.client("s3") + s3_object = s3.get_object(Bucket=bucket_name, Key=object_key) + return s3_object diff --git a/docs/examples/core/tracer/template.yml b/docs/examples/core/tracer/template.yml new file mode 100644 index 00000000000..45d296fdfe7 --- /dev/null +++ b/docs/examples/core/tracer/template.yml @@ -0,0 +1,13 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: src/ + Handler: app.lambda_handler + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + POWERTOOLS_SERVICE_NAME: example diff --git a/docs/examples/core/tracer/tracer_provider_escape_hatches.py b/docs/examples/core/tracer/tracer_provider_escape_hatches.py new file mode 100644 index 00000000000..bc0f30a11c1 --- /dev/null +++ b/docs/examples/core/tracer/tracer_provider_escape_hatches.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Tracer + +tracer = Tracer() + + +@tracer.capture_lambda_handler +def handler(event, context): + with tracer.provider.in_subsegment("## custom subsegment") as subsegment: + ret = some_work() + subsegment.put_metadata("response", ret) diff --git a/docs/examples/index/debug_mode.py b/docs/examples/index/debug_mode.py new file mode 100644 index 00000000000..d063da98645 --- /dev/null +++ b/docs/examples/index/debug_mode.py @@ -0,0 +1,3 @@ +from aws_lambda_powertools.logging.logger import set_package_logger + +set_package_logger() # (1) diff --git a/docs/examples/index/lambda_layer_cdk_app.py b/docs/examples/index/lambda_layer_cdk_app.py new file mode 100644 index 00000000000..44e7a98c440 --- /dev/null +++ b/docs/examples/index/lambda_layer_cdk_app.py @@ -0,0 +1,19 @@ +from aws_cdk import aws_lambda, core + + +class SampleApp(core.Construct): + def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None: + super().__init__(scope, id_) + + powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( + self, + id="lambda-powertools", + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:17", + ) + aws_lambda.Function( + self, + "sample-app-lambda", + runtime=aws_lambda.Runtime.PYTHON_3_9, + layers=[powertools_layer] + # other props... + ) diff --git a/docs/examples/index/lambda_layer_main.tf b/docs/examples/index/lambda_layer_main.tf new file mode 100644 index 00000000000..0e77c214a9e --- /dev/null +++ b/docs/examples/index/lambda_layer_main.tf @@ -0,0 +1,38 @@ +terraform { + required_version = "~> 1.1.7" + required_providers { + aws = "~> 4.4.0" + } +} + +provider "aws" { + region = "{region}" +} + +resource "aws_iam_role" "iam_for_lambda" { + name = "iam_for_lambda" + + assume_role_policy = jsonencode({ + Version = "2012-10-17", + Statement = [ + { + Action = "sts:AssumeRole", + Principal = { + Service = "lambda.amazonaws.com" + }, + Effect = "Allow" + } + ] + }) +} + +resource "aws_lambda_function" "test_lambda" { + filename = "lambda_function_payload.zip" + function_name = "lambda_function_name" + role = aws_iam_role.iam_for_lambda.arn + handler = "index.test" + runtime = "python3.9" + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:17"] + + source_code_hash = filebase64sha256("lambda_function_payload.zip") +} diff --git a/docs/examples/index/lambda_layer_template.yml b/docs/examples/index/lambda_layer_template.yml new file mode 100644 index 00000000000..4f5d4840ef2 --- /dev/null +++ b/docs/examples/index/lambda_layer_template.yml @@ -0,0 +1,11 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Resources: + MyLambdaFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: src + Handler: app.lambda_handler + Runtime: python3.9 + Layers: + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:17 diff --git a/docs/examples/index/least_priviledged_template.yml b/docs/examples/index/least_priviledged_template.yml new file mode 100644 index 00000000000..bb33810fca7 --- /dev/null +++ b/docs/examples/index/least_priviledged_template.yml @@ -0,0 +1,54 @@ +AWSTemplateFormatVersion: "2010-09-09" +Resources: + PowertoolsLayerIamRole: + Type: "AWS::IAM::Role" + Properties: + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + Service: + - "cloudformation.amazonaws.com" + Action: + - "sts:AssumeRole" + Path: "/" + PowertoolsLayerIamPolicy: + Type: "AWS::IAM::Policy" + Properties: + PolicyName: PowertoolsLambdaLayerPolicy + PolicyDocument: + Version: "2012-10-17" + Statement: + - Sid: CloudFormationTransform + Effect: Allow + Action: cloudformation:CreateChangeSet + Resource: + - arn:aws:cloudformation:us-east-1:aws:transform/Serverless-2016-10-31 + - Sid: GetCfnTemplate + Effect: Allow + Action: + - serverlessrepo:CreateCloudFormationTemplate + - serverlessrepo:GetCloudFormationTemplate + Resource: + # this is arn of the powertools SAR app + - arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + - Sid: S3AccessLayer + Effect: Allow + Action: + - s3:GetObject + Resource: + # AWS publishes to an external S3 bucket locked down to your account ID + # The below example is us publishing lambda powertools + # Bucket: awsserverlessrepo-changesets-plntc6bfnfj + # Key: *****/arn:aws:serverlessrepo:eu-west-1:057560766410:applications-aws-lambda-powertools-python-layer-versions-1.10.2/aeeccf50-****-****-****-********* + - arn:aws:s3:::awsserverlessrepo-changesets-*/* + - Sid: GetLayerVersion + Effect: Allow + Action: + - lambda:PublishLayerVersion + - lambda:GetLayerVersion + Resource: + - !Sub arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:layer:aws-lambda-powertools-python-layer* + Roles: + - Ref: "PowertoolsLayerIamRole" diff --git a/docs/examples/index/sar_cdk_app.py b/docs/examples/index/sar_cdk_app.py new file mode 100644 index 00000000000..5072861e0db --- /dev/null +++ b/docs/examples/index/sar_cdk_app.py @@ -0,0 +1,35 @@ +from aws_cdk import aws_lambda +from aws_cdk import aws_sam as sam +from aws_cdk import core + +POWERTOOLS_BASE_NAME = "AWSLambdaPowertools" +# Find latest from github.com/awslabs/aws-lambda-powertools-python/releases +POWERTOOLS_VER = "1.25.6" +POWERTOOLS_ARN = "arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer" + + +class SampleApp(core.Construct): + def __init__(self, scope: core.Construct, id_: str) -> None: + super().__init__(scope, id_) + + # Launches SAR App as CloudFormation nested stack and return Lambda Layer + powertools_app = sam.CfnApplication( + self, + f"{POWERTOOLS_BASE_NAME}Application", + location={"applicationId": POWERTOOLS_ARN, "semanticVersion": POWERTOOLS_VER}, + ) + + powertools_layer_arn = powertools_app.get_att("Outputs.LayerVersionArn").to_string() + powertools_layer_version = aws_lambda.LayerVersion.from_layer_version_arn( + self, f"{POWERTOOLS_BASE_NAME}", powertools_layer_arn + ) + + aws_lambda.Function( + self, + "sample-app-lambda", + runtime=aws_lambda.Runtime.PYTHON_3_8, + function_name="sample-lambda", + code=aws_lambda.Code.asset("./src"), + handler="app.handler", + layers=[powertools_layer_version], + ) diff --git a/docs/examples/index/sar_main.tf b/docs/examples/index/sar_main.tf new file mode 100644 index 00000000000..26d04f3be4d --- /dev/null +++ b/docs/examples/index/sar_main.tf @@ -0,0 +1,41 @@ +terraform { + required_version = "~> 0.13" + required_providers { + aws = "~> 3.50.0" + } +} + +provider "aws" { + region = "us-east-1" +} + +resource "aws_serverlessapplicationrepository_cloudformation_stack" "deploy_sar_stack" { + name = "aws-lambda-powertools-python-layer" + + application_id = data.aws_serverlessapplicationrepository_application.sar_app.application_id + semantic_version = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version + capabilities = [ + "CAPABILITY_IAM", + "CAPABILITY_NAMED_IAM" + ] +} + +data "aws_serverlessapplicationrepository_application" "sar_app" { + application_id = "arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer" + semantic_version = var.aws_powertools_version +} + +variable "aws_powertools_version" { + type = string + default = "1.25.6" + description = "The AWS Powertools release version" +} + +output "deployed_powertools_sar_version" { + value = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version +} + +# Fetch Lambda Powertools Layer ARN from deployed SAR App +output "aws_lambda_powertools_layer_arn" { + value = aws_serverlessapplicationrepository_cloudformation_stack.deploy_sar_stack.outputs.LayerVersionArn +} diff --git a/docs/examples/index/sar_template.yml b/docs/examples/index/sar_template.yml new file mode 100644 index 00000000000..5a66c68d99a --- /dev/null +++ b/docs/examples/index/sar_template.yml @@ -0,0 +1,19 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Resources: + AwsLambdaPowertoolsPythonLayer: + Type: AWS::Serverless::Application + Properties: + Location: + ApplicationId: arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer + SemanticVersion: 1.25.6 # change to latest semantic version available in SAR + + MyLambdaFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: src + Handler: app.lambda_handler + Runtime: python3.9 + Layers: + # fetch Layer ARN from SAR App stack output + - !GetAtt AwsLambdaPowertoolsPythonLayer.Outputs.LayerVersionArn diff --git a/docs/examples/tutorial/add_route_hello_by_name.py b/docs/examples/tutorial/add_route_hello_by_name.py new file mode 100644 index 00000000000..50d49739f2e --- /dev/null +++ b/docs/examples/tutorial/add_route_hello_by_name.py @@ -0,0 +1,10 @@ +import json + + +def hello_name(name): + return {"statusCode": 200, "body": json.dumps({"message": f"hello {name}!"})} + + +def lambda_handler(event, context): + name = event["pathParameters"]["name"] + return hello_name(name) diff --git a/docs/examples/tutorial/add_route_temaplate.yml b/docs/examples/tutorial/add_route_temaplate.yml new file mode 100644 index 00000000000..b8b2ab35303 --- /dev/null +++ b/docs/examples/tutorial/add_route_temaplate.yml @@ -0,0 +1,36 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Sample SAM Template for powertools-quickstart +Globals: + Function: + Timeout: 3 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: app.lambda_handler + Runtime: python3.9 + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get + + HelloWorldByNameFunctionName: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: hello_by_name.lambda_handler + Runtime: python3.9 + Events: + HelloWorldName: + Type: Api + Properties: + Path: /hello/{name} + Method: get +Outputs: + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" diff --git a/docs/examples/tutorial/code_example_app.py b/docs/examples/tutorial/code_example_app.py new file mode 100644 index 00000000000..08eb6bb2b05 --- /dev/null +++ b/docs/examples/tutorial/code_example_app.py @@ -0,0 +1,9 @@ +import json + + +def hello(): + return {"statusCode": 200, "body": json.dumps({"message": "hello unknown!"})} + + +def lambda_handler(event, context): + return hello() diff --git a/docs/examples/tutorial/code_example_template.yml b/docs/examples/tutorial/code_example_template.yml new file mode 100644 index 00000000000..54d8f58c762 --- /dev/null +++ b/docs/examples/tutorial/code_example_template.yml @@ -0,0 +1,25 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: Sample SAM Template for powertools-quickstart +Globals: + Function: + Timeout: 3 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: app.lambda_handler + Runtime: python3.9 + Architectures: + - x86_64 + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get +Outputs: + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" diff --git a/docs/examples/tutorial/create_metrics_app.py b/docs/examples/tutorial/create_metrics_app.py new file mode 100644 index 00000000000..ef2f5f77afa --- /dev/null +++ b/docs/examples/tutorial/create_metrics_app.py @@ -0,0 +1,71 @@ +import os + +import boto3 + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +cold_start = True +metric_namespace = "MyApp" + +logger = Logger(service="APP") +tracer = Tracer(service="APP") +metrics = boto3.client("cloudwatch") +app = APIGatewayRestResolver() + + +@tracer.capture_method +def add_greeting_metric(service: str = "APP"): + function_name = os.getenv("AWS_LAMBDA_FUNCTION_NAME", "undefined") + service_dimension = {"Name": "service", "Value": service} + function_dimension = {"Name": "function_name", "Value": function_name} + is_cold_start = True + + global cold_start + if cold_start: + cold_start = False + else: + is_cold_start = False + + return metrics.put_metric_data( + MetricData=[ + { + "MetricName": "SuccessfulGreetings", + "Dimensions": [service_dimension], + "Unit": "Count", + "Value": 1, + }, + { + "MetricName": "ColdStart", + "Dimensions": [service_dimension, function_dimension], + "Unit": "Count", + "Value": int(is_cold_start), + }, + ], + Namespace=metric_namespace, + ) + + +@app.get("/hello/") +@tracer.capture_method +def hello_name(name): + tracer.put_annotation(key="User", value=name) + logger.info(f"Request from {name} received") + add_greeting_metric() + return {"message": f"hello {name}!"} + + +@app.get("/hello") +@tracer.capture_method +def hello(): + tracer.put_annotation(key="User", value="unknown") + logger.info("Request from unknown received") + add_greeting_metric() + return {"message": "hello unknown!"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/tutorial/create_metrics_template.yml b/docs/examples/tutorial/create_metrics_template.yml new file mode 100644 index 00000000000..c7e16d00302 --- /dev/null +++ b/docs/examples/tutorial/create_metrics_template.yml @@ -0,0 +1,31 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Sample SAM Template for powertools-quickstart +Globals: + Function: + Timeout: 3 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: app.lambda_handler + Runtime: python3.9 + Tracing: Active + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get + HelloWorldName: + Type: Api + Properties: + Path: /hello/{name} + Method: get + Policies: + - CloudWatchPutMetricPolicy: {} +Outputs: + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" diff --git a/docs/examples/tutorial/create_own_router_app.py b/docs/examples/tutorial/create_own_router_app.py new file mode 100644 index 00000000000..1e70174c13e --- /dev/null +++ b/docs/examples/tutorial/create_own_router_app.py @@ -0,0 +1,37 @@ +import json + + +def hello_name(event, **kargs): + username = event["pathParameters"]["name"] + return {"statusCode": 200, "body": json.dumps({"message": f"hello {username}!"})} + + +def hello(**kargs): + return {"statusCode": 200, "body": json.dumps({"message": "hello unknown!"})} + + +class Router: + def __init__(self): + self.routes = {} + + def set(self, path, method, handler): + self.routes[f"{path}-{method}"] = handler + + def get(self, path, method): + try: + route = self.routes[f"{path}-{method}"] + except KeyError: + raise RuntimeError(f"Cannot route request to the correct method. path={path}, method={method}") + return route + + +router = Router() +router.set(path="/hello", method="GET", handler=hello) +router.set(path="/hello/{name}", method="GET", handler=hello_name) + + +def lambda_handler(event, context): + path = event["resource"] + http_method = event["httpMethod"] + method = router.get(path=path, method=http_method) + return method(event=event) diff --git a/docs/examples/tutorial/create_own_router_template.yml b/docs/examples/tutorial/create_own_router_template.yml new file mode 100644 index 00000000000..503eceebd65 --- /dev/null +++ b/docs/examples/tutorial/create_own_router_template.yml @@ -0,0 +1,28 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Sample SAM Template for powertools-quickstart +Globals: + Function: + Timeout: 3 +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: app.lambda_handler + Runtime: python3.9 + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get + HelloWorldName: + Type: Api + Properties: + Path: /hello/{name} + Method: get +Outputs: + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" diff --git a/docs/examples/tutorial/enrich_generate_traces_app.py b/docs/examples/tutorial/enrich_generate_traces_app.py new file mode 100644 index 00000000000..fd1b01a8afa --- /dev/null +++ b/docs/examples/tutorial/enrich_generate_traces_app.py @@ -0,0 +1,47 @@ +from aws_xray_sdk.core import patch_all, xray_recorder + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +logger = Logger(service="APP") + +app = APIGatewayRestResolver() +cold_start = True +patch_all() + + +@app.get("/hello/") +@xray_recorder.capture("hello_name") +def hello_name(name): + subsegment = xray_recorder.current_subsegment() + subsegment.put_annotation(key="User", value=name) + logger.info(f"Request from {name} received") + return {"message": f"hello {name}!"} + + +@app.get("/hello") +@xray_recorder.capture("hello") +def hello(): + subsegment = xray_recorder.current_subsegment() + subsegment.put_annotation(key="User", value="unknown") + logger.info("Request from unknown received") + return {"message": "hello unknown!"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) +@xray_recorder.capture("handler") +def lambda_handler(event, context): + global cold_start + + subsegment = xray_recorder.current_subsegment() + if cold_start: + subsegment.put_annotation(key="ColdStart", value=cold_start) + cold_start = False + else: + subsegment.put_annotation(key="ColdStart", value=cold_start) + + result = app.resolve(event, context) + subsegment.put_metadata("response", result) + + return result diff --git a/docs/examples/tutorial/event_handler_app.py b/docs/examples/tutorial/event_handler_app.py new file mode 100644 index 00000000000..393e732f131 --- /dev/null +++ b/docs/examples/tutorial/event_handler_app.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +app = APIGatewayRestResolver() + + +@app.get("/hello/") +def hello_name(name): + return {"message": f"hello {name}!"} + + +@app.get("/hello") +def hello(): + return {"message": "hello unknown!"} + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/tutorial/generate_traces_app.py b/docs/examples/tutorial/generate_traces_app.py new file mode 100644 index 00000000000..d128dfd83ef --- /dev/null +++ b/docs/examples/tutorial/generate_traces_app.py @@ -0,0 +1,29 @@ +from aws_xray_sdk.core import xray_recorder + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +logger = Logger(service="APP") + +app = APIGatewayRestResolver() + + +@app.get("/hello/") +@xray_recorder.capture("hello_name") +def hello_name(name): + logger.info(f"Request from {name} received") + return {"message": f"hello {name}!"} + + +@app.get("/hello") +@xray_recorder.capture("hello") +def hello(): + logger.info("Request from unknown received") + return {"message": "hello unknown!"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) +@xray_recorder.capture("handler") +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/tutorial/generate_traces_template.yml b/docs/examples/tutorial/generate_traces_template.yml new file mode 100644 index 00000000000..4b3a50e52ab --- /dev/null +++ b/docs/examples/tutorial/generate_traces_template.yml @@ -0,0 +1,31 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: Sample SAM Template for powertools-quickstart +Globals: + Function: + Timeout: 3 + Api: + TracingEnabled: true +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: hello_world/ + Handler: app.lambda_handler + Runtime: python3.9 + Tracing: Active + Events: + HelloWorld: + Type: Api + Properties: + Path: /hello + Method: get + HelloWorldName: + Type: Api + Properties: + Path: /hello/{name} + Method: get +Outputs: + HelloWorldApi: + Description: "API Gateway endpoint URL for Prod stage for Hello World function" + Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" diff --git a/docs/examples/tutorial/json_logger_app.py b/docs/examples/tutorial/json_logger_app.py new file mode 100644 index 00000000000..fd9b6243006 --- /dev/null +++ b/docs/examples/tutorial/json_logger_app.py @@ -0,0 +1,32 @@ +import logging +import os + +from pythonjsonlogger import jsonlogger + +from aws_lambda_powertools.event_handler import APIGatewayRestResolver + +logger = logging.getLogger("APP") +logHandler = logging.StreamHandler() +formatter = jsonlogger.JsonFormatter(fmt="%(asctime)s %(levelname)s %(name)s %(message)s") +logHandler.setFormatter(formatter) +logger.addHandler(logHandler) +logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) + +app = APIGatewayRestResolver() + + +@app.get("/hello/") +def hello_name(name): + logger.info(f"Request from {name} received") + return {"message": f"hello {name}!"} + + +@app.get("/hello") +def hello(): + logger.info("Request from unknown received") + return {"message": "hello unknown!"} + + +def lambda_handler(event, context): + logger.debug(event) + return app.resolve(event, context) diff --git a/docs/examples/tutorial/logger_app.py b/docs/examples/tutorial/logger_app.py new file mode 100644 index 00000000000..e49d9db9abd --- /dev/null +++ b/docs/examples/tutorial/logger_app.py @@ -0,0 +1,24 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +logger = Logger(service="APP") + +app = APIGatewayRestResolver() + + +@app.get("/hello/") +def hello_name(name): + logger.info(f"Request from {name} received") + return {"message": f"hello {name}!"} + + +@app.get("/hello") +def hello(): + logger.info("Request from unknown received") + return {"message": "hello unknown!"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/tutorial/metrics_app.py b/docs/examples/tutorial/metrics_app.py new file mode 100644 index 00000000000..82c59972b0e --- /dev/null +++ b/docs/examples/tutorial/metrics_app.py @@ -0,0 +1,38 @@ +from aws_lambda_powertools import Logger, Metrics, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.metrics import MetricUnit + +logger = Logger(service="APP") +tracer = Tracer(service="APP") +metrics = Metrics(namespace="MyApp", service="APP") +app = APIGatewayRestResolver() + + +@app.get("/hello/") +@tracer.capture_method +def hello_name(name): + tracer.put_annotation(key="User", value=name) + logger.info(f"Request from {name} received") + metrics.add_metric(name="SuccessfulGreetings", unit=MetricUnit.Count, value=1) + return {"message": f"hello {name}!"} + + +@app.get("/hello") +@tracer.capture_method +def hello(): + tracer.put_annotation(key="User", value="unknown") + logger.info("Request from unknown received") + metrics.add_metric(name="SuccessfulGreetings", unit=MetricUnit.Count, value=1) + return {"message": "hello unknown!"} + + +@tracer.capture_lambda_handler +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) +@metrics.log_metrics(capture_cold_start_metric=True) +def lambda_handler(event, context): + try: + return app.resolve(event, context) + except Exception as e: + logger.exception(e) + raise diff --git a/docs/examples/tutorial/tracer_app.py b/docs/examples/tutorial/tracer_app.py new file mode 100644 index 00000000000..3e0f2d32943 --- /dev/null +++ b/docs/examples/tutorial/tracer_app.py @@ -0,0 +1,29 @@ +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.logging import correlation_paths + +logger = Logger(service="APP") +tracer = Tracer(service="APP") +app = APIGatewayRestResolver() + + +@app.get("/hello/") +@tracer.capture_method +def hello_name(name): + tracer.put_annotation(key="User", value=name) + logger.info(f"Request from {name} received") + return {"message": f"hello {name}!"} + + +@app.get("/hello") +@tracer.capture_method +def hello(): + tracer.put_annotation(key="User", value="unknown") + logger.info("Request from unknown received") + return {"message": "hello unknown!"} + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) +@tracer.capture_lambda_handler +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/utilities/batch/caveats_tracer_response_auto_capture.py b/docs/examples/utilities/batch/caveats_tracer_response_auto_capture.py new file mode 100644 index 00000000000..f579580cd4b --- /dev/null +++ b/docs/examples/utilities/batch/caveats_tracer_response_auto_capture.py @@ -0,0 +1,25 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method(capture_response=False) +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/custom_batch_processor.py b/docs/examples/utilities/batch/custom_batch_processor.py new file mode 100644 index 00000000000..f3944f3be86 --- /dev/null +++ b/docs/examples/utilities/batch/custom_batch_processor.py @@ -0,0 +1,62 @@ +import os +from random import randint + +import boto3 + +from aws_lambda_powertools.utilities.batch import BasePartialProcessor, batch_processor + +table_name = os.getenv("TABLE_NAME", "table_not_found") + + +class MyPartialProcessor(BasePartialProcessor): + """ + Process a record and stores successful results at a Amazon DynamoDB Table + + Parameters + ---------- + table_name: str + DynamoDB table name to write results to + """ + + def __init__(self, table_name: str): + self.table_name = table_name + + super().__init__() + + def _prepare(self): + # It's called once, *before* processing + # Creates table resource and clean previous results + self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) + self.success_messages.clear() + + def _clean(self): + # It's called once, *after* closing processing all records (closing the context manager) + # Here we're sending, at once, all successful messages to a ddb table + with self.ddb_table.batch_writer() as batch: + for result in self.success_messages: + batch.put_item(Item=result) + + def _process_record(self, record): + # It handles how your record is processed + # Here we're keeping the status of each run + # where self.handler is the record_handler function passed as an argument + try: + result = self.handler(record) # record_handler passed to decorator/context manager + return self.success_handler(record, result) + except Exception as exc: + return self.failure_handler(record, exc) + + def success_handler(self, record, result): + entry = ("success", result, record) + message = {"age": result} + self.success_messages.append(message) + return entry + + +def record_handler(record): + return randint(0, 100) + + +@batch_processor(record_handler=record_handler, processor=MyPartialProcessor(table_name)) +def lambda_handler(event, context): + return {"statusCode": 200} diff --git a/docs/examples/utilities/batch/custom_boto3_session_context_manager.py b/docs/examples/utilities/batch/custom_boto3_session_context_manager.py new file mode 100644 index 00000000000..8644a726bcb --- /dev/null +++ b/docs/examples/utilities/batch/custom_boto3_session_context_manager.py @@ -0,0 +1,23 @@ +import boto3 + +from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + +session = boto3.session.Session() + + +def record_handler(record): + # This will be called for each individual message from a batch + # It should raise an exception if the message was not processed successfully + return_value = do_something_with(record["body"]) + return return_value + + +def lambda_handler(event, context): + records = event["Records"] + + processor = PartialSQSProcessor(boto3_session=session) + + with processor(records, record_handler): + result = processor.process() + + return result diff --git a/docs/examples/utilities/batch/custom_boto3_session_decorator.py b/docs/examples/utilities/batch/custom_boto3_session_decorator.py new file mode 100644 index 00000000000..f7c6b41ca3f --- /dev/null +++ b/docs/examples/utilities/batch/custom_boto3_session_decorator.py @@ -0,0 +1,17 @@ +import boto3 + +from aws_lambda_powertools.utilities.batch import sqs_batch_processor + +session = boto3.session.Session() + + +def record_handler(record): + # This will be called for each individual message from a batch + # It should raise an exception if the message was not processed successfully + return_value = do_something_with(record["body"]) + return return_value + + +@sqs_batch_processor(record_handler=record_handler, boto3_session=session) +def lambda_handler(event, context): + return {"statusCode": 200} diff --git a/docs/examples/utilities/batch/custom_config_context_manager.py b/docs/examples/utilities/batch/custom_config_context_manager.py new file mode 100644 index 00000000000..8f080d02b5e --- /dev/null +++ b/docs/examples/utilities/batch/custom_config_context_manager.py @@ -0,0 +1,23 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + +config = Config(region_name="us-east-1") + + +def record_handler(record): + # This will be called for each individual message from a batch + # It should raise an exception if the message was not processed successfully + return_value = do_something_with(record["body"]) + return return_value + + +def lambda_handler(event, context): + records = event["Records"] + + processor = PartialSQSProcessor(config=config) + + with processor(records, record_handler): + result = processor.process() + + return result diff --git a/docs/examples/utilities/batch/custom_config_decorator.py b/docs/examples/utilities/batch/custom_config_decorator.py new file mode 100644 index 00000000000..fb71355c0c1 --- /dev/null +++ b/docs/examples/utilities/batch/custom_config_decorator.py @@ -0,0 +1,17 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities.batch import sqs_batch_processor + +config = Config(region_name="us-east-1") + + +def record_handler(record): + # This will be called for each individual message from a batch + # It should raise an exception if the message was not processed successfully + return_value = do_something_with(record["body"]) + return return_value + + +@sqs_batch_processor(record_handler=record_handler, config=config) +def lambda_handler(event, context): + return {"statusCode": 200} diff --git a/docs/examples/utilities/batch/dynamodb_streams_context_manager.py b/docs/examples/utilities/batch/dynamodb_streams_context_manager.py new file mode 100644 index 00000000000..1fae494cfa8 --- /dev/null +++ b/docs/examples/utilities/batch/dynamodb_streams_context_manager.py @@ -0,0 +1,30 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.DynamoDBStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: DynamoDBRecord): + logger.info(record.dynamodb.new_image) + payload: dict = json.loads(record.dynamodb.new_image.get("item").s_value) + # alternatively: + # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image + # payload = change.get("Message").raw_event -> {"S": ""} + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() diff --git a/docs/examples/utilities/batch/dynamodb_streams_decorator.py b/docs/examples/utilities/batch/dynamodb_streams_decorator.py new file mode 100644 index 00000000000..ea63d834b88 --- /dev/null +++ b/docs/examples/utilities/batch/dynamodb_streams_decorator.py @@ -0,0 +1,27 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.DynamoDBStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: DynamoDBRecord): + logger.info(record.dynamodb.new_image) + payload: dict = json.loads(record.dynamodb.new_image.get("Message").get_value) + # alternatively: + # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image + # payload = change.get("Message").raw_event -> {"S": ""} + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/dynamodb_streams_pydantic_inheritance.py b/docs/examples/utilities/batch/dynamodb_streams_pydantic_inheritance.py new file mode 100644 index 00000000000..f518bc65dd1 --- /dev/null +++ b/docs/examples/utilities/batch/dynamodb_streams_pydantic_inheritance.py @@ -0,0 +1,48 @@ +import json +from typing import Dict, Literal, Optional + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.parser import BaseModel, validator +from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamChangedRecordModel, DynamoDBStreamRecordModel +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class Order(BaseModel): + item: dict + + +class OrderDynamoDB(BaseModel): + Message: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("Message", pre=True) + def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): + return json.loads(value["S"]) + + +class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): + NewImage: Optional[OrderDynamoDB] + OldImage: Optional[OrderDynamoDB] + + +class OrderDynamoDBRecord(DynamoDBStreamRecordModel): + dynamodb: OrderDynamoDBChangeRecord + + +processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderDynamoDBRecord) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: OrderDynamoDBRecord): + return record.dynamodb.NewImage.Message.item + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/dynamodb_streams_template.yml b/docs/examples/utilities/batch/dynamodb_streams_template.yml new file mode 100644 index 00000000000..39449b32628 --- /dev/null +++ b/docs/examples/utilities/batch/dynamodb_streams_template.yml @@ -0,0 +1,66 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: partial batch response sample + +Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + # Lambda Destinations require additional permissions + # to send failure records from Kinesis/DynamoDB + - Version: "2012-10-17" + Statement: + Effect: "Allow" + Action: + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + Resource: !GetAtt SampleDLQ.Arn + Events: + DynamoDBStream: + Type: DynamoDB + Properties: + Stream: !GetAtt SampleTable.StreamArn + StartingPosition: LATEST + MaximumRetryAttempts: 2 + DestinationConfig: + OnFailure: + Destination: !GetAtt SampleDLQ.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleTable: + Type: AWS::DynamoDB::Table + Properties: + BillingMode: PAY_PER_REQUEST + AttributeDefinitions: + - AttributeName: pk + AttributeType: S + - AttributeName: sk + AttributeType: S + KeySchema: + - AttributeName: pk + KeyType: HASH + - AttributeName: sk + KeyType: RANGE + SSESpecification: + SSEEnabled: yes + StreamSpecification: + StreamViewType: NEW_AND_OLD_IMAGES diff --git a/docs/examples/utilities/batch/kinesis_data_streams_context_manager.py b/docs/examples/utilities/batch/kinesis_data_streams_context_manager.py new file mode 100644 index 00000000000..1e4a580b34b --- /dev/null +++ b/docs/examples/utilities/batch/kinesis_data_streams_context_manager.py @@ -0,0 +1,27 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.KinesisDataStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() diff --git a/docs/examples/utilities/batch/kinesis_data_streams_decorator.py b/docs/examples/utilities/batch/kinesis_data_streams_decorator.py new file mode 100644 index 00000000000..6996c2a033c --- /dev/null +++ b/docs/examples/utilities/batch/kinesis_data_streams_decorator.py @@ -0,0 +1,24 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.KinesisDataStreams) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: KinesisStreamRecord): + logger.info(record.kinesis.data_as_text) + payload: dict = record.kinesis.data_as_json() + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/kinesis_data_streams_pydantic_inheritance.py b/docs/examples/utilities/batch/kinesis_data_streams_pydantic_inheritance.py new file mode 100644 index 00000000000..0c80478b918 --- /dev/null +++ b/docs/examples/utilities/batch/kinesis_data_streams_pydantic_inheritance.py @@ -0,0 +1,45 @@ +import json + +from pydantic import BaseModel + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.parser import BaseModel, validator +from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord, KinesisDataStreamRecordPayload +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class Order(BaseModel): + item: dict + + +class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): + data: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("data", pre=True) + def transform_message_to_dict(cls, value: str): + # Powertools KinesisDataStreamRecordPayload already decodes b64 to str here + return json.loads(value) + + +class OrderKinesisRecord(KinesisDataStreamRecord): + kinesis: OrderKinesisPayloadRecord + + +processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: OrderKinesisRecord): + return record.kinesis.data.item + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/kinesis_data_streams_template.yml b/docs/examples/utilities/batch/kinesis_data_streams_template.yml new file mode 100644 index 00000000000..6acb7c9ec32 --- /dev/null +++ b/docs/examples/utilities/batch/kinesis_data_streams_template.yml @@ -0,0 +1,53 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: partial batch response sample + +Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + # Lambda Destinations require additional permissions + # to send failure records to DLQ from Kinesis/DynamoDB + - Version: "2012-10-17" + Statement: + Effect: "Allow" + Action: + - sqs:GetQueueAttributes + - sqs:GetQueueUrl + - sqs:SendMessage + Resource: !GetAtt SampleDLQ.Arn + Events: + KinesisStream: + Type: Kinesis + Properties: + Stream: !GetAtt SampleStream.Arn + BatchSize: 100 + StartingPosition: LATEST + MaximumRetryAttempts: 2 + DestinationConfig: + OnFailure: + Destination: !GetAtt SampleDLQ.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleStream: + Type: AWS::Kinesis::Stream + Properties: + ShardCount: 1 diff --git a/docs/examples/utilities/batch/migration_context_manager_after.py b/docs/examples/utilities/batch/migration_context_manager_after.py new file mode 100644 index 00000000000..466554165af --- /dev/null +++ b/docs/examples/utilities/batch/migration_context_manager_after.py @@ -0,0 +1,17 @@ +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + +def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + +def lambda_handler(event, context): + records = event["Records"] + + processor = BatchProcessor(event_type=EventType.SQS) + + with processor(records, record_handler): + result = processor.process() + + return processor.response() diff --git a/docs/examples/utilities/batch/migration_context_manager_before.py b/docs/examples/utilities/batch/migration_context_manager_before.py new file mode 100644 index 00000000000..d1420838c06 --- /dev/null +++ b/docs/examples/utilities/batch/migration_context_manager_before.py @@ -0,0 +1,21 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + +config = Config(region_name="us-east-1") + + +def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + +def lambda_handler(event, context): + records = event["Records"] + + processor = PartialSQSProcessor(config=config) + + with processor(records, record_handler): + result = processor.process() + + return result diff --git a/docs/examples/utilities/batch/migration_decorator_after.py b/docs/examples/utilities/batch/migration_decorator_after.py new file mode 100644 index 00000000000..8b833eb1eb0 --- /dev/null +++ b/docs/examples/utilities/batch/migration_decorator_after.py @@ -0,0 +1,14 @@ +import json + +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + +processor = BatchProcessor(event_type=EventType.SQS) + + +def record_handler(record): + return do_something_with(record["body"]) + + +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context): + return processor.response() diff --git a/docs/examples/utilities/batch/migration_decorator_before.py b/docs/examples/utilities/batch/migration_decorator_before.py new file mode 100644 index 00000000000..8684d0405b7 --- /dev/null +++ b/docs/examples/utilities/batch/migration_decorator_before.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities.batch import sqs_batch_processor + + +def record_handler(record): + return do_something_with(record["body"]) + + +@sqs_batch_processor(record_handler=record_handler) +def lambda_handler(event, context): + return {"statusCode": 200} diff --git a/docs/examples/utilities/batch/sentry_integration.py b/docs/examples/utilities/batch/sentry_integration.py new file mode 100644 index 00000000000..10adf570e6b --- /dev/null +++ b/docs/examples/utilities/batch/sentry_integration.py @@ -0,0 +1,11 @@ +from typing import Tuple + +from sentry_sdk import capture_exception + +from aws_lambda_powertools.utilities.batch import BatchProcessor, FailureResponse + + +class MyProcessor(BatchProcessor): + def failure_handler(self, record, exception) -> FailureResponse: + capture_exception() # send exception to Sentry + return super().failure_handler(record, exception) diff --git a/docs/examples/utilities/batch/sqs_batch_processor_extension.py b/docs/examples/utilities/batch/sqs_batch_processor_extension.py new file mode 100644 index 00000000000..ac86014cbbe --- /dev/null +++ b/docs/examples/utilities/batch/sqs_batch_processor_extension.py @@ -0,0 +1,39 @@ +import json + +from aws_lambda_powertools import Metrics, Tracer +from aws_lambda_powertools.metrics import MetricUnit +from aws_lambda_powertools.utilities.batch import ( + BatchProcessor, + EventType, + ExceptionInfo, + FailureResponse, + batch_processor, +) +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +tracer = Tracer() + + +class MyProcessor(BatchProcessor): + def failure_handler(self, record: SQSRecord, exception: ExceptionInfo) -> FailureResponse: + metrics.add_metric(name="BatchRecordFailures", unit=MetricUnit.Count, value=1) + return super().failure_handler(record, exception) + + +processor = MyProcessor(event_type=EventType.SQS) +metrics = Metrics(namespace="test") + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + +@metrics.log_metrics(capture_cold_start_metric=True) +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/sqs_context_manager.py b/docs/examples/utilities/batch/sqs_context_manager.py new file mode 100644 index 00000000000..a0193a61cfa --- /dev/null +++ b/docs/examples/utilities/batch/sqs_context_manager.py @@ -0,0 +1,28 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages = processor.process() # kick off processing, return list[tuple] + + return processor.response() diff --git a/docs/examples/utilities/batch/sqs_decorator.py b/docs/examples/utilities/batch/sqs_decorator.py new file mode 100644 index 00000000000..0977bfd09b1 --- /dev/null +++ b/docs/examples/utilities/batch/sqs_decorator.py @@ -0,0 +1,25 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/sqs_processed_messages_context_manager.py b/docs/examples/utilities/batch/sqs_processed_messages_context_manager.py new file mode 100644 index 00000000000..449a14b1389 --- /dev/null +++ b/docs/examples/utilities/batch/sqs_processed_messages_context_manager.py @@ -0,0 +1,34 @@ +import json +from typing import Any, List, Literal, Union + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, FailureResponse, SuccessResponse +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +def lambda_handler(event, context: LambdaContext): + batch = event["Records"] + with processor(records=batch, handler=record_handler): + processed_messages: List[Union[SuccessResponse, FailureResponse]] = processor.process() + + for message in processed_messages: + status: Union[Literal["success"], Literal["fail"]] = message[0] + result: Any = message[1] + record: SQSRecord = message[2] + + return processor.response() diff --git a/docs/examples/utilities/batch/sqs_pydantic_inheritance.py b/docs/examples/utilities/batch/sqs_pydantic_inheritance.py new file mode 100644 index 00000000000..6e823fab27e --- /dev/null +++ b/docs/examples/utilities/batch/sqs_pydantic_inheritance.py @@ -0,0 +1,38 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.parser import BaseModel, validator +from aws_lambda_powertools.utilities.parser.models import SqsRecordModel +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class Order(BaseModel): + item: dict + + +class OrderSqsRecord(SqsRecordModel): + body: Order + + # auto transform json string + # so Pydantic can auto-initialize nested Order model + @validator("body", pre=True) + def transform_body_to_dict(cls, value: str): + return json.loads(value) + + +processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqsRecord) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: OrderSqsRecord): + return record.body.item + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/sqs_template.yml b/docs/examples/utilities/batch/sqs_template.yml new file mode 100644 index 00000000000..1c1075e3be8 --- /dev/null +++ b/docs/examples/utilities/batch/sqs_template.yml @@ -0,0 +1,42 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: partial batch response sample + +Globals: + Function: + Timeout: 5 + MemorySize: 256 + Runtime: python3.9 + Tracing: Active + Environment: + Variables: + LOG_LEVEL: INFO + POWERTOOLS_SERVICE_NAME: hello + +Resources: + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + Handler: app.lambda_handler + CodeUri: hello_world + Policies: + - SQSPollerPolicy: + QueueName: !GetAtt SampleQueue.QueueName + Events: + Batch: + Type: SQS + Properties: + Queue: !GetAtt SampleQueue.Arn + FunctionResponseTypes: + - ReportBatchItemFailures + + SampleDLQ: + Type: AWS::SQS::Queue + + SampleQueue: + Type: AWS::SQS::Queue + Properties: + VisibilityTimeout: 30 # Fn timeout * 6 + RedrivePolicy: + maxReceiveCount: 2 + deadLetterTargetArn: !GetAtt SampleDLQ.Arn diff --git a/docs/examples/utilities/batch/suppress_exception_decorator_sqs_batch_processor.py b/docs/examples/utilities/batch/suppress_exception_decorator_sqs_batch_processor.py new file mode 100644 index 00000000000..f8735d396cf --- /dev/null +++ b/docs/examples/utilities/batch/suppress_exception_decorator_sqs_batch_processor.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.batch import sqs_batch_processor + +... + + +@sqs_batch_processor(record_handler=record_handler, config=config, suppress_exception=True) +def lambda_handler(event, context): + return {"statusCode": 200} diff --git a/docs/examples/utilities/batch/suppress_exception_partial_sqs_processor.py b/docs/examples/utilities/batch/suppress_exception_partial_sqs_processor.py new file mode 100644 index 00000000000..6dbd4e797f3 --- /dev/null +++ b/docs/examples/utilities/batch/suppress_exception_partial_sqs_processor.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + +... + +processor = PartialSQSProcessor(config=config, suppress_exception=True) + +with processor(records, record_handler): + result = processor.process() diff --git a/docs/examples/utilities/batch/testing_src_app.py b/docs/examples/utilities/batch/testing_src_app.py new file mode 100644 index 00000000000..0977bfd09b1 --- /dev/null +++ b/docs/examples/utilities/batch/testing_src_app.py @@ -0,0 +1,25 @@ +import json + +from aws_lambda_powertools import Logger, Tracer +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.typing import LambdaContext + +processor = BatchProcessor(event_type=EventType.SQS) +tracer = Tracer() +logger = Logger() + + +@tracer.capture_method +def record_handler(record: SQSRecord): + payload: str = record.body + if payload: + item: dict = json.loads(payload) + ... + + +@logger.inject_lambda_context +@tracer.capture_lambda_handler +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context: LambdaContext): + return processor.response() diff --git a/docs/examples/utilities/batch/testing_test_app.py b/docs/examples/utilities/batch/testing_test_app.py new file mode 100644 index 00000000000..cda6fb58c09 --- /dev/null +++ b/docs/examples/utilities/batch/testing_test_app.py @@ -0,0 +1,51 @@ +import json +from dataclasses import dataclass +from pathlib import Path + +import pytest +from src.app import lambda_handler, processor + + +def load_event(path: Path): + with path.open() as f: + return json.load(f) + + +@pytest.fixture +def lambda_context(): + @dataclass + class LambdaContext: + function_name: str = "test" + memory_limit_in_mb: int = 128 + invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" + aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" + + return LambdaContext() + + +@pytest.fixture() +def sqs_event(): + """Generates API GW Event""" + return load_event(path=Path("events/sqs_event.json")) + + +def test_app_batch_partial_response(sqs_event, lambda_context): + # GIVEN + processor = app.processor # access processor for additional assertions + successful_record = sqs_event["Records"][0] + failed_record = sqs_event["Records"][1] + expected_response = { + "batchItemFailures": [ + { + "itemIdentifier": failed_record["messageId"], + }, + ], + } + + # WHEN + ret = app.lambda_handler(sqs_event, lambda_context) + + # THEN + assert ret == expected_response + assert len(processor.fail_messages) == 1 + assert processor.success_messages[0] == successful_record diff --git a/docs/examples/utilities/data_classes/app_active_mq.py b/docs/examples/utilities/data_classes/app_active_mq.py new file mode 100644 index 00000000000..fd169637f35 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_active_mq.py @@ -0,0 +1,15 @@ +from typing import Dict + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.active_mq_event import ActiveMQEvent + +logger = Logger() + + +@event_source(data_class=ActiveMQEvent) +def lambda_handler(event: ActiveMQEvent, context): + for message in event.messages: + logger.debug(f"MessageID: {message.message_id}") + data: Dict = message.json_data + logger.debug("Process json in base64 encoded data str", data) diff --git a/docs/examples/utilities/data_classes/app_alb.py b/docs/examples/utilities/data_classes/app_alb.py new file mode 100644 index 00000000000..3703b02a997 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_alb.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.utilities.data_classes import ALBEvent, event_source + + +@event_source(data_class=ALBEvent) +def lambda_handler(event: ALBEvent, context): + if "helloworld" in event.path and event.http_method == "POST": + do_something_with(event.json_body, event.query_string_parameters) diff --git a/docs/examples/utilities/data_classes/app_appsync_authorizer.py b/docs/examples/utilities/data_classes/app_appsync_authorizer.py new file mode 100644 index 00000000000..012f7beb016 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_appsync_authorizer.py @@ -0,0 +1,33 @@ +from typing import Dict + +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.logging.logger import Logger +from aws_lambda_powertools.utilities.data_classes.appsync_authorizer_event import ( + AppSyncAuthorizerEvent, + AppSyncAuthorizerResponse, +) +from aws_lambda_powertools.utilities.data_classes.event_source import event_source + +logger = Logger() + + +def get_user_by_token(token: str): + """Look a user by token""" + ... + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_AUTHORIZER) +@event_source(data_class=AppSyncAuthorizerEvent) +def lambda_handler(event: AppSyncAuthorizerEvent, context) -> Dict: + user = get_user_by_token(event.authorization_token) + + if not user: + # No user found, return not authorized + return AppSyncAuthorizerResponse().asdict() + + return AppSyncAuthorizerResponse( + authorize=True, + resolver_context={"id": user.id}, + # Only allow admins to delete events + deny_fields=None if user.is_admin else ["Mutation.deleteEvent"], + ).asdict() diff --git a/docs/examples/utilities/data_classes/app_appsync_resolver.py b/docs/examples/utilities/data_classes/app_appsync_resolver.py new file mode 100644 index 00000000000..25d39e96829 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_appsync_resolver.py @@ -0,0 +1,36 @@ +from aws_lambda_powertools.logging import Logger, correlation_paths +from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import ( + AppSyncIdentityCognito, + AppSyncResolverEvent, +) + +logger = Logger() + + +def get_locations(name: str = None, size: int = 0, page: int = 0): + """Your resolver logic here""" + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +def lambda_handler(event, context): + event: AppSyncResolverEvent = AppSyncResolverEvent(event) + + # Case insensitive look up of request headers + x_forwarded_for = event.get_header_value("x-forwarded-for") + + # Support for AppSyncIdentityCognito or AppSyncIdentityIAM identity types + assert isinstance(event.identity, AppSyncIdentityCognito) + identity: AppSyncIdentityCognito = event.identity + + # Logging with correlation_id + logger.debug( + { + "x-forwarded-for": x_forwarded_for, + "username": identity.username, + } + ) + + if event.type_name == "Merchant" and event.field_name == "locations": + return get_locations(**event.arguments) + + raise ValueError(f"Unsupported field resolver: {event.field_name}") diff --git a/docs/examples/utilities/data_classes/app_cloudwatch_logs.py b/docs/examples/utilities/data_classes/app_cloudwatch_logs.py new file mode 100644 index 00000000000..88008c87b59 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_cloudwatch_logs.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities.data_classes import CloudWatchLogsEvent, event_source +from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData + + +@event_source(data_class=CloudWatchLogsEvent) +def lambda_handler(event: CloudWatchLogsEvent, context): + decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data + log_events = decompressed_log.log_events + for event in log_events: + do_something_with(event.timestamp, event.message) diff --git a/docs/examples/utilities/data_classes/app_codepipeline_job.py b/docs/examples/utilities/data_classes/app_codepipeline_job.py new file mode 100644 index 00000000000..eb4c2125f56 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_codepipeline_job.py @@ -0,0 +1,43 @@ +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes import CodePipelineJobEvent, event_source + +logger = Logger() + + +@event_source(data_class=CodePipelineJobEvent) +def lambda_handler(event, context): + """The Lambda function handler + + If a continuing job then checks the CloudFormation stack status + and updates the job accordingly. + + If a new job then kick of an update or creation of the target + CloudFormation stack. + """ + + # Extract the Job ID + job_id = event.get_id + + # Extract the params + params: dict = event.decoded_user_parameters + stack = params["stack"] + artifact_name = params["artifact"] + template_file = params["file"] + + try: + if event.data.continuation_token: + # If we're continuing then the create/update has already been triggered + # we just need to check if it has finished. + check_stack_update_status(job_id, stack) + else: + template = event.get_artifact(artifact_name, template_file) + # Kick off a stack update or create + start_update_or_create(job_id, stack, template) + except Exception as e: + # If any other exceptions which we didn't expect are raised + # then fail the job and log the exception message. + logger.exception("Function failed due to exception.") + put_job_failure(job_id, "Function exception: " + str(e)) + + logger.debug("Function complete.") + return "Complete." diff --git a/docs/examples/utilities/data_classes/app_cognito_create_auth_challenge.py b/docs/examples/utilities/data_classes/app_cognito_create_auth_challenge.py new file mode 100644 index 00000000000..9f57743f053 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_cognito_create_auth_challenge.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent + + +@event_source(data_class=CreateAuthChallengeTriggerEvent) +def handler(event: CreateAuthChallengeTriggerEvent, context) -> dict: + if event.request.challenge_name == "CUSTOM_CHALLENGE": + event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"} + event.response.private_challenge_parameters = {"answer": "5"} + event.response.challenge_metadata = "CAPTCHA_CHALLENGE" + return event.raw_event diff --git a/docs/examples/utilities/data_classes/app_cognito_define_auth_challenge.py b/docs/examples/utilities/data_classes/app_cognito_define_auth_challenge.py new file mode 100644 index 00000000000..e7efad058a9 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_cognito_define_auth_challenge.py @@ -0,0 +1,29 @@ +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import DefineAuthChallengeTriggerEvent + + +def handler(event: dict, context) -> dict: + event: DefineAuthChallengeTriggerEvent = DefineAuthChallengeTriggerEvent(event) + if len(event.request.session) == 1 and event.request.session[0].challenge_name == "SRP_A": + event.response.issue_tokens = False + event.response.fail_authentication = False + event.response.challenge_name = "PASSWORD_VERIFIER" + elif ( + len(event.request.session) == 2 + and event.request.session[1].challenge_name == "PASSWORD_VERIFIER" + and event.request.session[1].challenge_result + ): + event.response.issue_tokens = False + event.response.fail_authentication = False + event.response.challenge_name = "CUSTOM_CHALLENGE" + elif ( + len(event.request.session) == 3 + and event.request.session[2].challenge_name == "CUSTOM_CHALLENGE" + and event.request.session[2].challenge_result + ): + event.response.issue_tokens = True + event.response.fail_authentication = False + else: + event.response.issue_tokens = False + event.response.fail_authentication = True + + return event.raw_event diff --git a/docs/examples/utilities/data_classes/app_cognito_post_confirmation.py b/docs/examples/utilities/data_classes/app_cognito_post_confirmation.py new file mode 100644 index 00000000000..bae89629a0c --- /dev/null +++ b/docs/examples/utilities/data_classes/app_cognito_post_confirmation.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent + + +def lambda_handler(event, context): + event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) + + user_attributes = event.request.user_attributes + do_something_with(user_attributes) diff --git a/docs/examples/utilities/data_classes/app_cognito_verify_auth_challenge_response.py b/docs/examples/utilities/data_classes/app_cognito_verify_auth_challenge_response.py new file mode 100644 index 00000000000..95b96c4d2f9 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_cognito_verify_auth_challenge_response.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent + + +@event_source(data_class=VerifyAuthChallengeResponseTriggerEvent) +def handler(event: VerifyAuthChallengeResponseTriggerEvent, context) -> dict: + event.response.answer_correct = ( + event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer + ) + return event.raw_event diff --git a/docs/examples/utilities/data_classes/app_connect_contact_flow.py b/docs/examples/utilities/data_classes/app_connect_contact_flow.py new file mode 100644 index 00000000000..53d120a4c4b --- /dev/null +++ b/docs/examples/utilities/data_classes/app_connect_contact_flow.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( + ConnectContactFlowChannel, + ConnectContactFlowEndpointType, + ConnectContactFlowEvent, + ConnectContactFlowInitiationMethod, +) + + +def lambda_handler(event, context): + event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) + assert event.contact_data.attributes == {"Language": "en-US"} + assert event.contact_data.channel == ConnectContactFlowChannel.VOICE + assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER + assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API diff --git a/docs/examples/utilities/data_classes/app_dynamodb.py b/docs/examples/utilities/data_classes/app_dynamodb.py new file mode 100644 index 00000000000..e9bc49da03e --- /dev/null +++ b/docs/examples/utilities/data_classes/app_dynamodb.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( + DynamoDBRecordEventName, + DynamoDBStreamEvent, +) + + +def lambda_handler(event, context): + event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) + + # Multiple records can be delivered in a single event + for record in event.records: + if record.event_name == DynamoDBRecordEventName.MODIFY: + do_something_with(record.dynamodb.new_image) + do_something_with(record.dynamodb.old_image) diff --git a/docs/examples/utilities/data_classes/app_dynamodb_multiple_records_types.py b/docs/examples/utilities/data_classes/app_dynamodb_multiple_records_types.py new file mode 100644 index 00000000000..53b38918260 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_dynamodb_multiple_records_types.py @@ -0,0 +1,16 @@ +from aws_lambda_powertools.utilities.data_classes import DynamoDBStreamEvent, event_source +from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import AttributeValue, AttributeValueType +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@event_source(data_class=DynamoDBStreamEvent) +def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): + for record in event.records: + key: AttributeValue = record.dynamodb.keys["id"] + if key == AttributeValueType.Number: + # {"N": "123.45"} => "123.45" + assert key.get_value == key.n_value + print(key.get_value) + elif key == AttributeValueType.Map: + assert key.get_value == key.map_value + print(key.get_value) diff --git a/docs/examples/utilities/data_classes/app_event_bridge.py b/docs/examples/utilities/data_classes/app_event_bridge.py new file mode 100644 index 00000000000..6570add4578 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_event_bridge.py @@ -0,0 +1,6 @@ +from aws_lambda_powertools.utilities.data_classes import EventBridgeEvent, event_source + + +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event: EventBridgeEvent, context): + do_something_with(event.detail) diff --git a/docs/examples/utilities/data_classes/app_http_api.py b/docs/examples/utilities/data_classes/app_http_api.py new file mode 100644 index 00000000000..970c4ce6f73 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_http_api.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEventV2, event_source + + +@event_source(data_class=APIGatewayProxyEventV2) +def lambda_handler(event: APIGatewayProxyEventV2, context): + if "helloworld" in event.path and event.http_method == "POST": + do_something_with(event.json_body, event.query_string_parameters) diff --git a/docs/examples/utilities/data_classes/app_http_api_authorizer.py b/docs/examples/utilities/data_classes/app_http_api_authorizer.py new file mode 100644 index 00000000000..a067e0179a0 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_http_api_authorizer.py @@ -0,0 +1,25 @@ +from secrets import compare_digest + +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( + APIGatewayAuthorizerEventV2, + APIGatewayAuthorizerResponseV2, +) + + +def get_user_by_token(token): + if compare_digest(token, "Foo"): + return {"name": "Foo"} + return None + + +@event_source(data_class=APIGatewayAuthorizerEventV2) +def handler(event: APIGatewayAuthorizerEventV2, context): + user = get_user_by_token(event.get_header_value("x-token")) + + if user is None: + # No user was found, so we return not authorized + return APIGatewayAuthorizerResponseV2().asdict() + + # Found the user and setting the details in the context + return APIGatewayAuthorizerResponseV2(authorize=True, context=user).asdict() diff --git a/docs/examples/utilities/data_classes/app_kinesis_data_streams.py b/docs/examples/utilities/data_classes/app_kinesis_data_streams.py new file mode 100644 index 00000000000..77a6fcfeb7a --- /dev/null +++ b/docs/examples/utilities/data_classes/app_kinesis_data_streams.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.utilities.data_classes import KinesisStreamEvent, event_source + + +@event_source(data_class=KinesisStreamEvent) +def lambda_handler(event: KinesisStreamEvent, context): + kinesis_record = next(event.records).kinesis + + # if data was delivered as text + data = kinesis_record.data_as_text() + + # if data was delivered as json + data = kinesis_record.data_as_json() + + do_something_with(data) diff --git a/docs/examples/utilities/data_classes/app_rabbit_mq.py b/docs/examples/utilities/data_classes/app_rabbit_mq.py new file mode 100644 index 00000000000..e4d241b1bdb --- /dev/null +++ b/docs/examples/utilities/data_classes/app_rabbit_mq.py @@ -0,0 +1,17 @@ +from typing import Dict + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.rabbit_mq_event import RabbitMQEvent + +logger = Logger() + + +@event_source(data_class=RabbitMQEvent) +def lambda_handler(event: RabbitMQEvent, context): + for queue_name, messages in event.rmq_messages_by_queue.items(): + logger.debug(f"Messages for queue: {queue_name}") + for message in messages: + logger.debug(f"MessageID: {message.basic_properties.message_id}") + data: Dict = message.json_data + logger.debug("Process json in base64 encoded data str", data) diff --git a/docs/examples/utilities/data_classes/app_rest_api.py b/docs/examples/utilities/data_classes/app_rest_api.py new file mode 100644 index 00000000000..21921a6c09a --- /dev/null +++ b/docs/examples/utilities/data_classes/app_rest_api.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent, event_source + + +@event_source(data_class=APIGatewayProxyEvent) +def lambda_handler(event: APIGatewayProxyEvent, context): + if "helloworld" in event.path and event.http_method == "GET": + request_context = event.request_context + identity = request_context.identity + user = identity.user + do_something_with(event.json_body, user) diff --git a/docs/examples/utilities/data_classes/app_rest_api_type_request.py b/docs/examples/utilities/data_classes/app_rest_api_type_request.py new file mode 100644 index 00000000000..d7a96bdd4bf --- /dev/null +++ b/docs/examples/utilities/data_classes/app_rest_api_type_request.py @@ -0,0 +1,52 @@ +from secrets import compare_digest + +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( + DENY_ALL_RESPONSE, + APIGatewayAuthorizerRequestEvent, + APIGatewayAuthorizerResponse, + HttpVerb, +) + + +def get_user_by_token(token): + if compare_digest(token, "admin-foo"): + return {"id": 0, "name": "Admin", "isAdmin": True} + elif compare_digest(token, "regular-foo"): + return {"id": 1, "name": "Joe"} + else: + return None + + +@event_source(data_class=APIGatewayAuthorizerRequestEvent) +def handler(event: APIGatewayAuthorizerRequestEvent, context): + user = get_user_by_token(event.get_header_value("Authorization")) + + if user is None: + # No user was found + # to return 401 - `{"message":"Unauthorized"}`, but pollutes lambda error count metrics + # raise Exception("Unauthorized") + # to return 403 - `{"message":"Forbidden"}` + return DENY_ALL_RESPONSE + + # parse the `methodArn` as an `APIGatewayRouteArn` + arn = event.parsed_arn + + # Create the response builder from parts of the `methodArn` + # and set the logged in user id and context + policy = APIGatewayAuthorizerResponse( + principal_id=user["id"], + context=user, + region=arn.region, + aws_account_id=arn.aws_account_id, + api_id=arn.api_id, + stage=arn.stage, + ) + + # Conditional IAM Policy + if user.get("isAdmin", False): + policy.allow_all_routes() + else: + policy.allow_route(HttpVerb.GET, "/user-profile") + + return policy.asdict() diff --git a/docs/examples/utilities/data_classes/app_rest_api_type_token.py b/docs/examples/utilities/data_classes/app_rest_api_type_token.py new file mode 100644 index 00000000000..d3af20957e1 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_rest_api_type_token.py @@ -0,0 +1,24 @@ +from aws_lambda_powertools.utilities.data_classes import event_source +from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( + APIGatewayAuthorizerResponse, + APIGatewayAuthorizerTokenEvent, +) + + +@event_source(data_class=APIGatewayAuthorizerTokenEvent) +def handler(event: APIGatewayAuthorizerTokenEvent, context): + arn = event.parsed_arn + + policy = APIGatewayAuthorizerResponse( + principal_id="user", + region=arn.region, + aws_account_id=arn.aws_account_id, + api_id=arn.api_id, + stage=arn.stage, + ) + + if event.authorization_token == "42": + policy.allow_all_routes() + else: + policy.deny_all_routes() + return policy.asdict() diff --git a/docs/examples/utilities/data_classes/app_s3.py b/docs/examples/utilities/data_classes/app_s3.py new file mode 100644 index 00000000000..42c7e847ce8 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_s3.py @@ -0,0 +1,14 @@ +from urllib.parse import unquote_plus + +from aws_lambda_powertools.utilities.data_classes import S3Event, event_source + + +@event_source(data_class=S3Event) +def lambda_handler(event: S3Event, context): + bucket_name = event.bucket_name + + # Multiple records can be delivered in a single event + for record in event.records: + object_key = unquote_plus(record.s3.get_object.key) + + do_something_with(f"{bucket_name}/{object_key}") diff --git a/docs/examples/utilities/data_classes/app_s3_object_lambda.py b/docs/examples/utilities/data_classes/app_s3_object_lambda.py new file mode 100644 index 00000000000..7b27e766ad7 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_s3_object_lambda.py @@ -0,0 +1,31 @@ +import boto3 +import requests + +from aws_lambda_powertools import Logger +from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA +from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent + +logger = Logger() +session = boto3.Session() +s3 = session.client("s3") + + +@logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True) +def lambda_handler(event, context): + event = S3ObjectLambdaEvent(event) + + # Get object from S3 + response = requests.get(event.input_s3_url) + original_object = response.content.decode("utf-8") + + # Make changes to the object about to be returned + transformed_object = original_object.upper() + + # Write object back to S3 Object Lambda + s3.write_get_object_response( + Body=transformed_object, + RequestRoute=event.request_route, + RequestToken=event.request_token, + ) + + return {"status_code": 200} diff --git a/docs/examples/utilities/data_classes/app_ses.py b/docs/examples/utilities/data_classes/app_ses.py new file mode 100644 index 00000000000..94982887857 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_ses.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.utilities.data_classes import SESEvent, event_source + + +@event_source(data_class=SESEvent) +def lambda_handler(event: SESEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + mail = record.ses.mail + common_headers = mail.common_headers + + do_something_with(common_headers.to, common_headers.subject) diff --git a/docs/examples/utilities/data_classes/app_sns.py b/docs/examples/utilities/data_classes/app_sns.py new file mode 100644 index 00000000000..08b04c82504 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_sns.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.utilities.data_classes import SNSEvent, event_source + + +@event_source(data_class=SNSEvent) +def lambda_handler(event: SNSEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + message = record.sns.message + subject = record.sns.subject + + do_something_with(subject, message) diff --git a/docs/examples/utilities/data_classes/app_sqs.py b/docs/examples/utilities/data_classes/app_sqs.py new file mode 100644 index 00000000000..d5031150558 --- /dev/null +++ b/docs/examples/utilities/data_classes/app_sqs.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.data_classes import SQSEvent, event_source + + +@event_source(data_class=SQSEvent) +def lambda_handler(event: SQSEvent, context): + # Multiple records can be delivered in a single event + for record in event.records: + do_something_with(record.body) diff --git a/docs/examples/utilities/data_classes/using_data_classes.py b/docs/examples/utilities/data_classes/using_data_classes.py new file mode 100644 index 00000000000..004379acdae --- /dev/null +++ b/docs/examples/utilities/data_classes/using_data_classes.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent + + +def lambda_handler(event: dict, context): + event = APIGatewayProxyEvent(event) + if "helloworld" in event.path and event.http_method == "GET": + do_something_with(event.body, user) diff --git a/docs/examples/utilities/data_classes/using_data_classes_event_source.py b/docs/examples/utilities/data_classes/using_data_classes_event_source.py new file mode 100644 index 00000000000..d1688e5f898 --- /dev/null +++ b/docs/examples/utilities/data_classes/using_data_classes_event_source.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent, event_source + + +@event_source(data_class=APIGatewayProxyEvent) +def lambda_handler(event: APIGatewayProxyEvent, context): + if "helloworld" in event.path and event.http_method == "GET": + do_something_with(event.body, user) diff --git a/docs/examples/utilities/feature_flags/app_config.py b/docs/examples/utilities/feature_flags/app_config.py new file mode 100644 index 00000000000..4519a2f985a --- /dev/null +++ b/docs/examples/utilities/feature_flags/app_config.py @@ -0,0 +1,25 @@ +import jmespath +from botocore.config import Config + +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore + +boto_config = Config(read_timeout=10, retries={"total_max_attempts": 2}) + +# Custom JMESPath functions +class CustomFunctions(jmespath.functions.Functions): + @jmespath.functions.signature({"types": ["string"]}) + def _func_special_decoder(self, s): + return my_custom_decoder_logic(s) + + +custom_jmespath_options = {"custom_functions": CustomFunctions()} + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="configuration", + max_age=120, + envelope="features", + sdk_config=boto_config, + jmespath_options=custom_jmespath_options, +) diff --git a/docs/examples/utilities/feature_flags/cache_config.py b/docs/examples/utilities/feature_flags/cache_config.py new file mode 100644 index 00000000000..82b639f9515 --- /dev/null +++ b/docs/examples/utilities/feature_flags/cache_config.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="features", + max_age=300, +) diff --git a/docs/examples/utilities/feature_flags/cdk_app.py b/docs/examples/utilities/feature_flags/cdk_app.py new file mode 100644 index 00000000000..eedede094ac --- /dev/null +++ b/docs/examples/utilities/feature_flags/cdk_app.py @@ -0,0 +1,58 @@ +import json + +import aws_cdk.aws_appconfig as appconfig +from aws_cdk import core + + +class SampleFeatureFlagStore(core.Construct): + def __init__(self, scope: core.Construct, id_: str) -> None: + super().__init__(scope, id_) + + features_config = { + "premium_features": { + "default": False, + "rules": { + "customer tier equals premium": { + "when_match": True, + "conditions": [{"action": "EQUALS", "key": "tier", "value": "premium"}], + } + }, + }, + "ten_percent_off_campaign": {"default": True}, + } + + self.config_app = appconfig.CfnApplication( + self, + id="app", + name="product-catalogue", + ) + self.config_env = appconfig.CfnEnvironment( + self, + id="env", + application_id=self.config_app.ref, + name="dev-env", + ) + self.config_profile = appconfig.CfnConfigurationProfile( + self, + id="profile", + application_id=self.config_app.ref, + location_uri="hosted", + name="features", + ) + self.hosted_cfg_version = appconfig.CfnHostedConfigurationVersion( + self, + "version", + application_id=self.config_app.ref, + configuration_profile_id=self.config_profile.ref, + content=json.dumps(features_config), + content_type="application/json", + ) + self.app_config_deployment = appconfig.CfnDeployment( + self, + id="deploy", + application_id=self.config_app.ref, + configuration_profile_id=self.config_profile.ref, + configuration_version=self.hosted_cfg_version.ref, + deployment_strategy_id="AppConfig.AllAtOnce", + environment_id=self.config_env.ref, + ) diff --git a/docs/examples/utilities/feature_flags/envelope.py b/docs/examples/utilities/feature_flags/envelope.py new file mode 100644 index 00000000000..40aeb73e40a --- /dev/null +++ b/docs/examples/utilities/feature_flags/envelope.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="configuration", + envelope="feature_flags", +) diff --git a/docs/examples/utilities/feature_flags/get_enabled_features.py b/docs/examples/utilities/feature_flags/get_enabled_features.py new file mode 100644 index 00000000000..f7dcfc77af3 --- /dev/null +++ b/docs/examples/utilities/feature_flags/get_enabled_features.py @@ -0,0 +1,34 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags + +app = APIGatewayRestResolver() + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="features", +) +feature_flags = FeatureFlags(store=app_config) + + +@app.get("/products") +def list_products(): + ctx = { + **app.current_event.headers, + **app.current_event.json_body, + } + + # all_features is evaluated to ["geo_customer_campaign", "ten_percent_off_campaign"] + all_features: list[str] = feature_flags.get_enabled_features(context=ctx) + + if "geo_customer_campaign" in all_features: + # apply discounts based on geo + ... + + if "ten_percent_off_campaign" in all_features: + # apply additional 10% for all customers + ... + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/docs/examples/utilities/feature_flags/get_raw_configuration.py b/docs/examples/utilities/feature_flags/get_raw_configuration.py new file mode 100644 index 00000000000..f3f60e0395a --- /dev/null +++ b/docs/examples/utilities/feature_flags/get_raw_configuration.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="configuration", + envelope="feature_flags", +) + +feature_flags = FeatureFlags(store=app_config) + +config = app_config.get_raw_configuration diff --git a/docs/examples/utilities/feature_flags/non_boolean_flag.py b/docs/examples/utilities/feature_flags/non_boolean_flag.py new file mode 100644 index 00000000000..9b6d8e20bb0 --- /dev/null +++ b/docs/examples/utilities/feature_flags/non_boolean_flag.py @@ -0,0 +1,24 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="features", +) + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event, context): + # Get customer's tier from incoming request + ctx = {"tier": event.get("tier", "standard")} + + # Evaluate `has_premium_features` base don customer's tier + premium_features: list[str] = feature_flags.evaluate( + name="premium_features", + context=ctx, + default=False, + ) + for feature in premium_features: + # enable premium features + ... diff --git a/docs/examples/utilities/feature_flags/single_feature_flag.py b/docs/examples/utilities/feature_flags/single_feature_flag.py new file mode 100644 index 00000000000..a9496452722 --- /dev/null +++ b/docs/examples/utilities/feature_flags/single_feature_flag.py @@ -0,0 +1,25 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="features", +) + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event, context): + # Get customer's tier from incoming request + ctx = {"tier": event.get("tier", "standard")} + + # Evaluate whether customer's tier has access to premium features + # based on `has_premium_features` rules + has_premium_features: bool = feature_flags.evaluate( + name="premium_features", + context=ctx, + default=False, + ) + if has_premium_features: + # enable premium features + ... diff --git a/docs/examples/utilities/feature_flags/static_flag.py b/docs/examples/utilities/feature_flags/static_flag.py new file mode 100644 index 00000000000..4213f674812 --- /dev/null +++ b/docs/examples/utilities/feature_flags/static_flag.py @@ -0,0 +1,20 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags + +app_config = AppConfigStore( + environment="dev", + application="product-catalogue", + name="features", +) + +feature_flags = FeatureFlags(store=app_config) + + +def lambda_handler(event, context): + apply_discount: bool = feature_flags.evaluate( + name="ten_percent_off_campaign", + default=False, + ) + + if apply_discount: + # apply 10% discount to product + ... diff --git a/docs/examples/utilities/feature_flags/template.yml b/docs/examples/utilities/feature_flags/template.yml new file mode 100644 index 00000000000..a36f16f4b5f --- /dev/null +++ b/docs/examples/utilities/feature_flags/template.yml @@ -0,0 +1,60 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: Lambda Powertools Feature flags sample template +Resources: + FeatureStoreApp: + Type: AWS::AppConfig::Application + Properties: + Description: "AppConfig Application for feature toggles" + Name: product-catalogue + + FeatureStoreDevEnv: + Type: AWS::AppConfig::Environment + Properties: + ApplicationId: !Ref FeatureStoreApp + Description: "Development Environment for the App Config Store" + Name: dev + + FeatureStoreConfigProfile: + Type: AWS::AppConfig::ConfigurationProfile + Properties: + ApplicationId: !Ref FeatureStoreApp + Name: features + LocationUri: "hosted" + + HostedConfigVersion: + Type: AWS::AppConfig::HostedConfigurationVersion + Properties: + ApplicationId: !Ref FeatureStoreApp + ConfigurationProfileId: !Ref FeatureStoreConfigProfile + Description: 'A sample hosted configuration version' + Content: | + { + "premium_features": { + "default": false, + "rules": { + "customer tier equals premium": { + "when_match": true, + "conditions": [ + { + "action": "EQUALS", + "key": "tier", + "value": "premium" + } + ] + } + } + }, + "ten_percent_off_campaign": { + "default": false + } + } + ContentType: 'application/json' + + ConfigDeployment: + Type: AWS::AppConfig::Deployment + Properties: + ApplicationId: !Ref FeatureStoreApp + ConfigurationProfileId: !Ref FeatureStoreConfigProfile + ConfigurationVersion: !Ref HostedConfigVersion + DeploymentStrategyId: "AppConfig.AllAtOnce" + EnvironmentId: !Ref FeatureStoreDevEnv diff --git a/docs/examples/utilities/feature_flags/unit_test.py b/docs/examples/utilities/feature_flags/unit_test.py new file mode 100644 index 00000000000..5a80f338f12 --- /dev/null +++ b/docs/examples/utilities/feature_flags/unit_test.py @@ -0,0 +1,48 @@ +from aws_lambda_powertools.utilities.feature_flags import AppConfigStore, FeatureFlags, RuleAction + + +def init_feature_flags(mocker, mock_schema, envelope="") -> FeatureFlags: + """Mock AppConfig Store get_configuration method to use mock schema instead""" + + method_to_mock = "aws_lambda_powertools.utilities.feature_flags.AppConfigStore.get_configuration" + mocked_get_conf = mocker.patch(method_to_mock) + mocked_get_conf.return_value = mock_schema + + app_conf_store = AppConfigStore( + environment="test_env", + application="test_app", + name="test_conf_name", + envelope=envelope, + ) + + return FeatureFlags(store=app_conf_store) + + +def test_flags_condition_match(mocker): + # GIVEN + expected_value = True + mocked_app_config_schema = { + "my_feature": { + "default": expected_value, + "rules": { + "tenant id equals 12345": { + "when_match": True, + "conditions": [ + { + "action": RuleAction.EQUALS.value, + "key": "tenant_id", + "value": "12345", + } + ], + } + }, + } + } + + # WHEN + ctx = {"tenant_id": "12345", "username": "a"} + feature_flags = init_feature_flags(mocker=mocker, mock_schema=mocked_app_config_schema) + flag = feature_flags.evaluate(name="my_feature", context=ctx, default=False) + + # THEN + assert flag == expected_value diff --git a/docs/examples/utilities/idempotency/batch_sample.py b/docs/examples/utilities/idempotency/batch_sample.py new file mode 100644 index 00000000000..f12d9d3040b --- /dev/null +++ b/docs/examples/utilities/idempotency/batch_sample.py @@ -0,0 +1,27 @@ +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function + +processor = BatchProcessor(event_type=EventType.SQS) +dynamodb = DynamoDBPersistenceLayer(table_name="idem") +config = IdempotencyConfig( + event_key_jmespath="messageId", # see Choosing a payload subset section + use_local_cache=True, +) + + +@idempotent_function(data_keyword_argument="record", config=config, persistence_store=dynamodb) +def record_handler(record: SQSRecord): + return {"message": record["body"]} + + +@idempotent_function(data_keyword_argument="data", config=config, persistence_store=dynamodb) +def dummy(arg_one, arg_two, data: dict, **kwargs): + return {"data": data} + + +@batch_processor(record_handler=record_handler, processor=processor) +def lambda_handler(event, context): + # `data` parameter must be called as a keyword argument to work + dummy("hello", "universe", data="test") + return processor.response() diff --git a/docs/examples/utilities/idempotency/bring_your_own_persistent_store.py b/docs/examples/utilities/idempotency/bring_your_own_persistent_store.py new file mode 100644 index 00000000000..0aa2c6c1f45 --- /dev/null +++ b/docs/examples/utilities/idempotency/bring_your_own_persistent_store.py @@ -0,0 +1,126 @@ +import datetime +import logging +from typing import Any, Dict, Optional + +import boto3 +from botocore.config import Config + +from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer +from aws_lambda_powertools.utilities.idempotency.exceptions import ( + IdempotencyItemAlreadyExistsError, + IdempotencyItemNotFoundError, +) +from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord + +logger = logging.getLogger(__name__) + + +class DynamoDBPersistenceLayer(BasePersistenceLayer): + def __init__( + self, + table_name: str, + key_attr: str = "id", + expiry_attr: str = "expiration", + status_attr: str = "status", + data_attr: str = "data", + validation_key_attr: str = "validation", + boto_config: Optional[Config] = None, + boto3_session: Optional[boto3.session.Session] = None, + ): + boto_config = boto_config or Config() + session = boto3_session or boto3.session.Session() + self._ddb_resource = session.resource("dynamodb", config=boto_config) + self.table_name = table_name + self.table = self._ddb_resource.Table(self.table_name) + self.key_attr = key_attr + self.expiry_attr = expiry_attr + self.status_attr = status_attr + self.data_attr = data_attr + self.validation_key_attr = validation_key_attr + super(DynamoDBPersistenceLayer, self).__init__() + + def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: + """ + Translate raw item records from DynamoDB to DataRecord + + Parameters + ---------- + item: Dict[str, Union[str, int]] + Item format from dynamodb response + + Returns + ------- + DataRecord + representation of item + + """ + return DataRecord( + idempotency_key=item[self.key_attr], + status=item[self.status_attr], + expiry_timestamp=item[self.expiry_attr], + response_data=item.get(self.data_attr), + payload_hash=item.get(self.validation_key_attr), + ) + + def _get_record(self, idempotency_key) -> DataRecord: + response = self.table.get_item(Key={self.key_attr: idempotency_key}, ConsistentRead=True) + + try: + item = response["Item"] + except KeyError: + raise IdempotencyItemNotFoundError + return self._item_to_data_record(item) + + def _put_record(self, data_record: DataRecord) -> None: + item = { + self.key_attr: data_record.idempotency_key, + self.expiry_attr: data_record.expiry_timestamp, + self.status_attr: data_record.status, + } + + if self.payload_validation_enabled: + item[self.validation_key_attr] = data_record.payload_hash + + now = datetime.datetime.now() + try: + logger.debug(f"Putting record for idempotency key: {data_record.idempotency_key}") + self.table.put_item( + Item=item, + ConditionExpression=f"attribute_not_exists({self.key_attr}) OR {self.expiry_attr} < :now", + ExpressionAttributeValues={":now": int(now.timestamp())}, + ) + except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: + logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") + raise IdempotencyItemAlreadyExistsError + + def _update_record(self, data_record: DataRecord): + logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") + update_expression = "SET #response_data = :response_data, #expiry = :expiry, #status = :status" + expression_attr_values = { + ":expiry": data_record.expiry_timestamp, + ":response_data": data_record.response_data, + ":status": data_record.status, + } + expression_attr_names = { + "#response_data": self.data_attr, + "#expiry": self.expiry_attr, + "#status": self.status_attr, + } + + if self.payload_validation_enabled: + update_expression += ", #validation_key = :validation_key" + expression_attr_values[":validation_key"] = data_record.payload_hash + expression_attr_names["#validation_key"] = self.validation_key_attr + + kwargs = { + "Key": {self.key_attr: data_record.idempotency_key}, + "UpdateExpression": update_expression, + "ExpressionAttributeValues": expression_attr_values, + "ExpressionAttributeNames": expression_attr_names, + } + + self.table.update_item(**kwargs) + + def _delete_record(self, data_record: DataRecord) -> None: + logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") + self.table.delete_item(Key={self.key_attr: data_record.idempotency_key}) diff --git a/docs/examples/utilities/idempotency/dataclass_sample.py b/docs/examples/utilities/idempotency/dataclass_sample.py new file mode 100644 index 00000000000..aa8f7f8dc9f --- /dev/null +++ b/docs/examples/utilities/idempotency/dataclass_sample.py @@ -0,0 +1,33 @@ +from dataclasses import dataclass + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function + +dynamodb = DynamoDBPersistenceLayer(table_name="idem") +config = IdempotencyConfig( + event_key_jmespath="order_id", # see Choosing a payload subset section + use_local_cache=True, +) + + +@dataclass +class OrderItem: + sku: str + description: str + + +@dataclass +class Order: + item: OrderItem + order_id: int + + +@idempotent_function(data_keyword_argument="order", config=config, persistence_store=dynamodb) +def process_order(order: Order): + return f"processed order {order.order_id}" + + +order_item = OrderItem(sku="fake", description="sample") +order = Order(item=order_item, order_id="fake-id") + +# `order` parameter must be called as a keyword argument to work +process_order(order=order) diff --git a/docs/examples/utilities/idempotency/dynamodb_persistence_layer_customization.py b/docs/examples/utilities/idempotency/dynamodb_persistence_layer_customization.py new file mode 100644 index 00000000000..d0d97c41eef --- /dev/null +++ b/docs/examples/utilities/idempotency/dynamodb_persistence_layer_customization.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer + +persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + key_attr="idempotency_key", + expiry_attr="expires_at", + status_attr="current_status", + data_attr="result_data", + validation_key_attr="validation_key", +) diff --git a/docs/examples/utilities/idempotency/idempotency_cache_ttl.py b/docs/examples/utilities/idempotency/idempotency_cache_ttl.py new file mode 100644 index 00000000000..fee48e48d86 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_cache_ttl.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") +config = IdempotencyConfig( + event_key_jmespath="body", + expires_after_seconds=5 * 60, # 5 minutes +) + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + ... diff --git a/docs/examples/utilities/idempotency/idempotency_composite_primary_key.py b/docs/examples/utilities/idempotency/idempotency_composite_primary_key.py new file mode 100644 index 00000000000..9652c1681c3 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_composite_primary_key.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent + +persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + sort_key_attr="sort_key", +) + + +@idempotent(persistence_store=persistence_layer) +def handler(event, context): + return {"message": "success", "id": event["body"]["id"]} diff --git a/docs/examples/utilities/idempotency/idempotency_custom_config.py b/docs/examples/utilities/idempotency/idempotency_custom_config.py new file mode 100644 index 00000000000..2230bc13b29 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_custom_config.py @@ -0,0 +1,15 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +config = IdempotencyConfig(event_key_jmespath="body") +boto_config = Config() +persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + boto_config=boto_config, +) + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + ... diff --git a/docs/examples/utilities/idempotency/idempotency_custom_session.py b/docs/examples/utilities/idempotency/idempotency_custom_session.py new file mode 100644 index 00000000000..05e072160c2 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_custom_session.py @@ -0,0 +1,16 @@ +import boto3 + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +boto3_session = boto3.session.Session() +persistence_layer = DynamoDBPersistenceLayer( + table_name="IdempotencyTable", + boto3_session=boto3_session, +) + +config = IdempotencyConfig(event_key_jmespath="body") + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + ... diff --git a/docs/examples/utilities/idempotency/idempotency_exception_sample.py b/docs/examples/utilities/idempotency/idempotency_exception_sample.py new file mode 100644 index 00000000000..7df004a2aef --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_exception_sample.py @@ -0,0 +1,24 @@ +import requests + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function + +dynamodb = DynamoDBPersistenceLayer(table_name="idem") +config = IdempotencyConfig(event_key_jmespath="order_id") + + +def lambda_handler(event, context): + # If an exception is raised here, no idempotent record will ever get created as the + # idempotent function does not get called + do_some_stuff() + + result = call_external_service(data={"user": "user1", "id": 5}) + + # This exception will not cause the idempotent record to be deleted, since it + # happens after the decorated function has been successfully called + raise Exception + + +@idempotent_function(data_keyword_argument="data", config=config, persistence_store=dynamodb) +def call_external_service(data: dict, **kwargs): + result = requests.post("http://example.com", json={"user": data["user"], "transaction_id": data["id"]}) + return result.json() diff --git a/docs/examples/utilities/idempotency/idempotency_in_memory_cache.py b/docs/examples/utilities/idempotency/idempotency_in_memory_cache.py new file mode 100644 index 00000000000..ad10117b651 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_in_memory_cache.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") +config = IdempotencyConfig( + event_key_jmespath="body", + use_local_cache=True, +) + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + ... diff --git a/docs/examples/utilities/idempotency/idempotency_key_required.py b/docs/examples/utilities/idempotency/idempotency_key_required.py new file mode 100644 index 00000000000..47212dd48d2 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_key_required.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + +# Requires "user"."uid" and "order_id" to be present +config = IdempotencyConfig( + event_key_jmespath="[user.uid, order_id]", + raise_on_no_idempotency_key=True, +) + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + ... diff --git a/docs/examples/utilities/idempotency/idempotency_payload_validation.py b/docs/examples/utilities/idempotency/idempotency_payload_validation.py new file mode 100644 index 00000000000..50396c93ddd --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_payload_validation.py @@ -0,0 +1,25 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +config = IdempotencyConfig( + event_key_jmespath="[userDetail, productId]", + payload_validation_jmespath="amount", +) +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + # Creating a subscription payment is a side + # effect of calling this function! + payment = create_subscription_payment( + user=event["userDetail"]["username"], + product=event["product_id"], + amount=event["amount"], + ) + ... + return { + "message": "success", + "statusCode": 200, + "payment_id": payment.id, + "amount": payment.amount, + } diff --git a/docs/examples/utilities/idempotency/idempotency_with_validator.py b/docs/examples/utilities/idempotency/idempotency_with_validator.py new file mode 100644 index 00000000000..d98efc18732 --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotency_with_validator.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent +from aws_lambda_powertools.utilities.validation import envelopes, validator + +config = IdempotencyConfig(event_key_jmespath="[message, username]") +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + +@validator(envelope=envelopes.API_GATEWAY_HTTP) +@idempotent(config=config, persistence_store=persistence_layer) +def lambda_handler(event, context): + cause_some_side_effects(event["username"]) + return {"message": event["message"], "statusCode": 200} diff --git a/docs/examples/utilities/idempotency/idempotent_decorator.py b/docs/examples/utilities/idempotency/idempotent_decorator.py new file mode 100644 index 00000000000..d3e78d2d94c --- /dev/null +++ b/docs/examples/utilities/idempotency/idempotent_decorator.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + + +@idempotent(persistence_store=persistence_layer) +def handler(event, context): + payment = create_subscription_payment(user=event["user"], product=event["product_id"]) + ... + return { + "payment_id": payment.id, + "message": "success", + "statusCode": 200, + } diff --git a/docs/examples/utilities/idempotency/parser_pydantic_sample.py b/docs/examples/utilities/idempotency/parser_pydantic_sample.py new file mode 100644 index 00000000000..6df8d3dcf35 --- /dev/null +++ b/docs/examples/utilities/idempotency/parser_pydantic_sample.py @@ -0,0 +1,30 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function +from aws_lambda_powertools.utilities.parser import BaseModel + +dynamodb = DynamoDBPersistenceLayer(table_name="idem") +config = IdempotencyConfig( + event_key_jmespath="order_id", # see Choosing a payload subset section + use_local_cache=True, +) + + +class OrderItem(BaseModel): + sku: str + description: str + + +class Order(BaseModel): + item: OrderItem + order_id: int + + +@idempotent_function(data_keyword_argument="order", config=config, persistence_store=dynamodb) +def process_order(order: Order): + return f"processed order {order.order_id}" + + +order_item = OrderItem(sku="fake", description="sample") +order = Order(item=order_item, order_id="fake-id") + +# `order` parameter must be called as a keyword argument to work +process_order(order=order) diff --git a/docs/examples/utilities/idempotency/payment.py b/docs/examples/utilities/idempotency/payment.py new file mode 100644 index 00000000000..c2711b5e2f5 --- /dev/null +++ b/docs/examples/utilities/idempotency/payment.py @@ -0,0 +1,20 @@ +import json + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") + +# Treat everything under the "body" key +# in the event json object as our payload +config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event, context): + body = json.loads(event["body"]) + payment = create_subscription_payment( + user=body["user"], + product=body["product_id"], + ) + ... + return {"payment_id": payment.id, "message": "success", "statusCode": 200} diff --git a/docs/examples/utilities/idempotency/template.yml b/docs/examples/utilities/idempotency/template.yml new file mode 100644 index 00000000000..549f2db9cc6 --- /dev/null +++ b/docs/examples/utilities/idempotency/template.yml @@ -0,0 +1,26 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Resources: + IdempotencyTable: + Type: AWS::DynamoDB::Table + Properties: + AttributeDefinitions: + - AttributeName: id + AttributeType: S + KeySchema: + - AttributeName: id + KeyType: HASH + TimeToLiveSpecification: + AttributeName: expiration + Enabled: true + BillingMode: PAY_PER_REQUEST + + HelloWorldFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: src/ + Handler: app.lambda_handler + Runtime: python3.9 + Policies: + - DynamoDBCrudPolicy: + TableName: !Ref IdempotencyTable diff --git a/docs/examples/utilities/idempotency/testing_idempotency_disabled_app.py b/docs/examples/utilities/idempotency/testing_idempotency_disabled_app.py new file mode 100644 index 00000000000..d6fa87ea124 --- /dev/null +++ b/docs/examples/utilities/idempotency/testing_idempotency_disabled_app.py @@ -0,0 +1,13 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") + + +@idempotent(persistence_store=persistence_layer) +def handler(event, context): + print("expensive operation") + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } diff --git a/docs/examples/utilities/idempotency/testing_idempotency_disabled_test.py b/docs/examples/utilities/idempotency/testing_idempotency_disabled_test.py new file mode 100644 index 00000000000..2bbd960f18d --- /dev/null +++ b/docs/examples/utilities/idempotency/testing_idempotency_disabled_test.py @@ -0,0 +1,9 @@ +from app import handler + + +def test_idempotent_lambda_handler(monkeypatch): + # Set POWERTOOLS_IDEMPOTENCY_DISABLED before calling decorated functions + monkeypatch.setenv("POWERTOOLS_IDEMPOTENCY_DISABLED", 1) + + result = handler() + ... diff --git a/docs/examples/utilities/idempotency/testing_with_dynamodb_local_app.py b/docs/examples/utilities/idempotency/testing_with_dynamodb_local_app.py new file mode 100644 index 00000000000..d6fa87ea124 --- /dev/null +++ b/docs/examples/utilities/idempotency/testing_with_dynamodb_local_app.py @@ -0,0 +1,13 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") + + +@idempotent(persistence_store=persistence_layer) +def handler(event, context): + print("expensive operation") + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } diff --git a/docs/examples/utilities/idempotency/testing_with_dynamodb_local_test.py b/docs/examples/utilities/idempotency/testing_with_dynamodb_local_test.py new file mode 100644 index 00000000000..b79041cdb63 --- /dev/null +++ b/docs/examples/utilities/idempotency/testing_with_dynamodb_local_test.py @@ -0,0 +1,12 @@ +import app +import boto3 + + +def test_idempotent_lambda(): + # Create our own Table resource using the endpoint for our DynamoDB Local instance + resource = boto3.resource("dynamodb", endpoint_url="http://localhost:8000") + table = resource.Table(app.persistence_layer.table_name) + app.persistence_layer.table = table + + result = app.handler({"testkey": "testvalue"}, {}) + assert result["payment_id"] == 12345 diff --git a/docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_app.py b/docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_app.py new file mode 100644 index 00000000000..d6fa87ea124 --- /dev/null +++ b/docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_app.py @@ -0,0 +1,13 @@ +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") + + +@idempotent(persistence_store=persistence_layer) +def handler(event, context): + print("expensive operation") + return { + "payment_id": 12345, + "message": "success", + "statusCode": 200, + } diff --git a/docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_test.py b/docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_test.py new file mode 100644 index 00000000000..cbb20367bd2 --- /dev/null +++ b/docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_test.py @@ -0,0 +1,11 @@ +from unittest.mock import MagicMock + +import app + + +def test_idempotent_lambda(): + table = MagicMock() + app.persistence_layer.table = table + result = app.handler({"testkey": "testvalue"}, {}) + table.put_item.assert_called() + ... diff --git a/docs/examples/utilities/jmespath_functions/custom_jmespath_function.py b/docs/examples/utilities/jmespath_functions/custom_jmespath_function.py new file mode 100644 index 00000000000..b03a095313c --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/custom_jmespath_function.py @@ -0,0 +1,22 @@ +from jmespath.functions import signature + +from aws_lambda_powertools.utilities.jmespath_utils import PowertoolsFunctions, extract_data_from_envelope + + +class CustomFunctions(PowertoolsFunctions): + @signature({"types": ["string"]}) # Only decode if value is a string + def _func_special_decoder(self, s): + return my_custom_decoder_logic(s) + + +custom_jmespath_options = {"custom_functions": CustomFunctions()} + + +def handler(event, context): + # use the custom name after `_func_` + extract_data_from_envelope( + data=event, + envelope="special_decoder(body)", + jmespath_options=custom_jmespath_options, + ) + ... diff --git a/docs/examples/utilities/jmespath_functions/extract_data_built_in_jmespath.py b/docs/examples/utilities/jmespath_functions/extract_data_built_in_jmespath.py new file mode 100644 index 00000000000..d77d08ed49e --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/extract_data_built_in_jmespath.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.jmespath_utils import envelopes, extract_data_from_envelope +from aws_lambda_powertools.utilities.typing import LambdaContext + + +def handler(event: dict, context: LambdaContext): + payload = extract_data_from_envelope(data=event, envelope=envelopes.SNS) + customer = payload.get("customerId") # now deserialized + ... diff --git a/docs/examples/utilities/jmespath_functions/extract_data_jmespath.py b/docs/examples/utilities/jmespath_functions/extract_data_jmespath.py new file mode 100644 index 00000000000..bfd1e6af88e --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/extract_data_jmespath.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope +from aws_lambda_powertools.utilities.typing import LambdaContext + + +def handler(event: dict, context: LambdaContext): + payload = extract_data_from_envelope(data=event, envelope="powertools_json(body)") + customer = payload.get("customerId") # now deserialized + ... diff --git a/docs/examples/utilities/jmespath_functions/powertools_base64_gzip_jmespath_function.py b/docs/examples/utilities/jmespath_functions/powertools_base64_gzip_jmespath_function.py new file mode 100644 index 00000000000..0da16c62e9d --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/powertools_base64_gzip_jmespath_function.py @@ -0,0 +1,13 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import validate + +sample_event = { + "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" +} + +validate( + event=sample_event, + schema=schemas.INPUT, + envelope="powertools_base64_gzip(data) | powertools_json(@)", +) diff --git a/docs/examples/utilities/jmespath_functions/powertools_base64_jmespath_function.py b/docs/examples/utilities/jmespath_functions/powertools_base64_jmespath_function.py new file mode 100644 index 00000000000..eac125c238a --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/powertools_base64_jmespath_function.py @@ -0,0 +1,13 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import validate + +sample_event = { + "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=", +} + +validate( + event=sample_event, + schema=schemas.INPUT, + envelope="powertools_json(powertools_base64(data))", +) diff --git a/docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function.py b/docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function.py new file mode 100644 index 00000000000..991cd930b68 --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function.py @@ -0,0 +1,9 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import validate + +sample_event = { + "data": '{"payload": {"message": "hello hello", "username": "blah blah"}}', +} + +validate(event=sample_event, schema=schemas.INPUT, envelope="powertools_json(data)") diff --git a/docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function_idempotency.py b/docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function_idempotency.py new file mode 100644 index 00000000000..04017241416 --- /dev/null +++ b/docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function_idempotency.py @@ -0,0 +1,21 @@ +import json + +from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, IdempotencyConfig, idempotent + +persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") +config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") + + +@idempotent(config=config, persistence_store=persistence_layer) +def handler(event: dict, context): + body = json.loads(event["body"]) + payment = create_subscription_payment( + user=body["user"], + product=body["product_id"], + ) + ... + return { + "payment_id": payment.id, + "message": "success", + "statusCode": 200, + } diff --git a/docs/examples/utilities/middleware_factory/middleware_no_params.py b/docs/examples/utilities/middleware_factory/middleware_no_params.py new file mode 100644 index 00000000000..5bb4b2fcadc --- /dev/null +++ b/docs/examples/utilities/middleware_factory/middleware_no_params.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator + + +@lambda_handler_decorator +def middleware_before_after(handler, event, context): + # logic_before_handler_execution() + response = handler(event, context) + # logic_after_handler_execution() + return response + + +@middleware_before_after +def lambda_handler(event, context): + ... diff --git a/docs/examples/utilities/middleware_factory/middleware_trace_custom.py b/docs/examples/utilities/middleware_factory/middleware_trace_custom.py new file mode 100644 index 00000000000..3e41e040150 --- /dev/null +++ b/docs/examples/utilities/middleware_factory/middleware_trace_custom.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools import Tracer +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator + + +@lambda_handler_decorator(trace_execution=True) +def middleware_name(handler, event, context): + # tracer = Tracer() # Takes a copy of an existing tracer instance + # tracer.add_annotation... + # tracer.add_metadata... + return handler(event, context) diff --git a/docs/examples/utilities/middleware_factory/middleware_trace_execution.py b/docs/examples/utilities/middleware_factory/middleware_trace_execution.py new file mode 100644 index 00000000000..4aa0113b490 --- /dev/null +++ b/docs/examples/utilities/middleware_factory/middleware_trace_execution.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator + + +@lambda_handler_decorator(trace_execution=True) +def my_middleware(handler, event, context): + return handler(event, context) + + +@my_middleware +def lambda_handler(event, context): + ... diff --git a/docs/examples/utilities/middleware_factory/middleware_with_params.py b/docs/examples/utilities/middleware_factory/middleware_with_params.py new file mode 100644 index 00000000000..80a81508483 --- /dev/null +++ b/docs/examples/utilities/middleware_factory/middleware_with_params.py @@ -0,0 +1,19 @@ +from typing import List + +from aws_lambda_powertools.middleware_factory import lambda_handler_decorator + + +@lambda_handler_decorator +def obfuscate_sensitive_data(handler, event, context, fields: List = None): + # Obfuscate email before calling Lambda handler + if fields: + for field in fields: + if field in event: + event[field] = obfuscate(event[field]) + + return handler(event, context) + + +@obfuscate_sensitive_data(fields=["email"]) +def lambda_handler(event, context): + ... diff --git a/docs/examples/utilities/parameters/app_config_provider.py b/docs/examples/utilities/parameters/app_config_provider.py new file mode 100644 index 00000000000..24a1eaea6f9 --- /dev/null +++ b/docs/examples/utilities/parameters/app_config_provider.py @@ -0,0 +1,15 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities import parameters + +config = Config(region_name="us-west-1") +appconf_provider = parameters.AppConfigProvider( + environment="my_env", + application="my_app", + config=config, +) + + +def handler(event, context): + # Retrieve a single secret + value: bytes = appconf_provider.get("my_conf") diff --git a/docs/examples/utilities/parameters/create_your_own_s3_provider.py b/docs/examples/utilities/parameters/create_your_own_s3_provider.py new file mode 100644 index 00000000000..28cc584c9ae --- /dev/null +++ b/docs/examples/utilities/parameters/create_your_own_s3_provider.py @@ -0,0 +1,53 @@ +import copy +from typing import Dict + +import boto3 + +from aws_lambda_powertools.utilities import BaseProvider + + +class S3Provider(BaseProvider): + bucket_name = None + client = None + + def __init__(self, bucket_name: str): + # Initialize the client to your custom parameter store + # E.g.: + + self.bucket_name = bucket_name + self.client = boto3.client("s3") + + def _get(self, name: str, **sdk_options) -> str: + # Retrieve a single value + # E.g.: + + sdk_options["Bucket"] = self.bucket_name + sdk_options["Key"] = name + + response = self.client.get_object(**sdk_options) + return + + def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: + # Retrieve multiple values + # E.g.: + + list_sdk_options = copy.deepcopy(sdk_options) + + list_sdk_options["Bucket"] = self.bucket_name + list_sdk_options["Prefix"] = path + + list_response = self.client.list_objects_v2(**list_sdk_options) + + parameters = {} + + for obj in list_response.get("Contents", []): + get_sdk_options = copy.deepcopy(sdk_options) + + get_sdk_options["Bucket"] = self.bucket_name + get_sdk_options["Key"] = obj["Key"] + + get_response = self.client.get_object(**get_sdk_options) + + parameters[obj["Key"]] = get_response["Body"].read().decode() + + return parameters diff --git a/docs/examples/utilities/parameters/custom_caching_parameters.py b/docs/examples/utilities/parameters/custom_caching_parameters.py new file mode 100644 index 00000000000..d603484a02d --- /dev/null +++ b/docs/examples/utilities/parameters/custom_caching_parameters.py @@ -0,0 +1,16 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities import parameters + +config = Config(region_name="us-west-1") +ssm_provider = parameters.SSMProvider(config=config) + + +def handler(event, context): + # Retrieve a single parameter + value = ssm_provider.get("/my/parameter", max_age=60) # 1 minute + + # Retrieve multiple parameters from a path prefix + values = ssm_provider.get_multiple("/my/path/prefix", max_age=60) + for k, v in values.items(): + print(f"{k}: {v}") diff --git a/docs/examples/utilities/parameters/dynamodb_provider.py b/docs/examples/utilities/parameters/dynamodb_provider.py new file mode 100644 index 00000000000..f17d7456008 --- /dev/null +++ b/docs/examples/utilities/parameters/dynamodb_provider.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities import parameters + +dynamodb_provider = parameters.DynamoDBProvider(table_name="my-table") + + +def handler(event, context): + # Retrieve a value from DynamoDB + value = dynamodb_provider.get("my-parameter") diff --git a/docs/examples/utilities/parameters/dynamodb_provider_customization.py b/docs/examples/utilities/parameters/dynamodb_provider_customization.py new file mode 100644 index 00000000000..9b67658d2cf --- /dev/null +++ b/docs/examples/utilities/parameters/dynamodb_provider_customization.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.utilities import parameters + +dynamodb_provider = parameters.DynamoDBProvider( + table_name="my-table", + key_attr="MyKeyAttr", + sort_attr="MySortAttr", + value_attr="MyvalueAttr", +) + + +def handler(event, context): + value = dynamodb_provider.get("my-parameter") diff --git a/docs/examples/utilities/parameters/dynamodb_provider_get_multiple.py b/docs/examples/utilities/parameters/dynamodb_provider_get_multiple.py new file mode 100644 index 00000000000..31e3d43981b --- /dev/null +++ b/docs/examples/utilities/parameters/dynamodb_provider_get_multiple.py @@ -0,0 +1,13 @@ +from aws_lambda_powertools.utilities import parameters + +dynamodb_provider = parameters.DynamoDBProvider(table_name="my-table") + + +def handler(event, context): + # Retrieve multiple values by performing a Query on the DynamoDB table + # This returns a dict with the sort key attribute as dict key. + parameters = dynamodb_provider.get_multiple("my-hash-key") + for k, v in parameters.items(): + # k: param-a + # v: "my-value-a" + print(f"{k}: {v}") diff --git a/docs/examples/utilities/parameters/dynamodb_provider_local.py b/docs/examples/utilities/parameters/dynamodb_provider_local.py new file mode 100644 index 00000000000..b7e7dca8829 --- /dev/null +++ b/docs/examples/utilities/parameters/dynamodb_provider_local.py @@ -0,0 +1,6 @@ +from aws_lambda_powertools.utilities import parameters + +dynamodb_provider = parameters.DynamoDBProvider( + table_name="my-table", + endpoint_url="http://localhost:8000", +) diff --git a/docs/examples/utilities/parameters/fetching_app_config.py b/docs/examples/utilities/parameters/fetching_app_config.py new file mode 100644 index 00000000000..b6ea5014e10 --- /dev/null +++ b/docs/examples/utilities/parameters/fetching_app_config.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.utilities import parameters + + +def handler(event, context): + # Retrieve a single configuration, latest version + value: bytes = parameters.get_app_config( + name="my_configuration", + environment="my_env", + application="my_app", + ) diff --git a/docs/examples/utilities/parameters/fetching_secrets.py b/docs/examples/utilities/parameters/fetching_secrets.py new file mode 100644 index 00000000000..a62ba094c65 --- /dev/null +++ b/docs/examples/utilities/parameters/fetching_secrets.py @@ -0,0 +1,6 @@ +from aws_lambda_powertools.utilities import parameters + + +def handler(event, context): + # Retrieve a single secret + value = parameters.get_secret("my-secret") diff --git a/docs/examples/utilities/parameters/force_fetch_parameters.py b/docs/examples/utilities/parameters/force_fetch_parameters.py new file mode 100644 index 00000000000..d049c1f0c41 --- /dev/null +++ b/docs/examples/utilities/parameters/force_fetch_parameters.py @@ -0,0 +1,6 @@ +from aws_lambda_powertools.utilities import parameters + + +def handler(event, context): + # Retrieve a single parameter + value = parameters.get_parameter("/my/parameter", force_fetch=True) diff --git a/docs/examples/utilities/parameters/parameters_custom_config.py b/docs/examples/utilities/parameters/parameters_custom_config.py new file mode 100644 index 00000000000..79510421617 --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_custom_config.py @@ -0,0 +1,12 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities import parameters + +boto_config = Config() +ssm_provider = parameters.SSMProvider(config=boto_config) + + +def handler(event, context): + # Retrieve a single parameter + value = ssm_provider.get("/my/parameter") + ... diff --git a/docs/examples/utilities/parameters/parameters_custom_session.py b/docs/examples/utilities/parameters/parameters_custom_session.py new file mode 100644 index 00000000000..d76fffe00ac --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_custom_session.py @@ -0,0 +1,12 @@ +import boto3 + +from aws_lambda_powertools.utilities import parameters + +boto3_session = boto3.session.Session() +ssm_provider = parameters.SSMProvider(boto3_session=boto3_session) + + +def handler(event, context): + # Retrieve a single parameter + value = ssm_provider.get("/my/parameter") + ... diff --git a/docs/examples/utilities/parameters/parameters_sdk_args.py b/docs/examples/utilities/parameters/parameters_sdk_args.py new file mode 100644 index 00000000000..571d070de8a --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_sdk_args.py @@ -0,0 +1,8 @@ +from aws_lambda_powertools.utilities import parameters + +secrets_provider = parameters.SecretsProvider() + + +def handler(event, context): + # The 'VersionId' argument will be passed to the underlying get_secret_value() call. + value = secrets_provider.get("my-secret", VersionId="e62ec170-6b01-48c7-94f3-d7497851a8d2") diff --git a/docs/examples/utilities/parameters/parameters_transform.py b/docs/examples/utilities/parameters/parameters_transform.py new file mode 100644 index 00000000000..eb7c3be0d8a --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_transform.py @@ -0,0 +1,5 @@ +from aws_lambda_powertools.utilities import parameters + + +def handler(event, context): + value_from_json = parameters.get_parameter("/my/json/parameter", transform="json") diff --git a/docs/examples/utilities/parameters/parameters_transform_auto.py b/docs/examples/utilities/parameters/parameters_transform_auto.py new file mode 100644 index 00000000000..f85e6aa1b13 --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_transform_auto.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.utilities import parameters + +ssm_provider = parameters.SSMProvider() + + +def handler(event, context): + values = ssm_provider.get_multiple("/param", transform="auto") diff --git a/docs/examples/utilities/parameters/parameters_transform_providers.py b/docs/examples/utilities/parameters/parameters_transform_providers.py new file mode 100644 index 00000000000..2451330e9c7 --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_transform_providers.py @@ -0,0 +1,11 @@ +from aws_lambda_powertools.utilities import parameters + +ssm_provider = parameters.SSMProvider() + + +def handler(event, context): + # Transform a JSON string + value_from_json = ssm_provider.get("/my/json/parameter", transform="json") + + # Transform a Base64 encoded string + value_from_binary = ssm_provider.get("/my/binary/parameter", transform="binary") diff --git a/docs/examples/utilities/parameters/parameters_transform_raise_on_transform_error.py b/docs/examples/utilities/parameters/parameters_transform_raise_on_transform_error.py new file mode 100644 index 00000000000..542ad754439 --- /dev/null +++ b/docs/examples/utilities/parameters/parameters_transform_raise_on_transform_error.py @@ -0,0 +1,19 @@ +from aws_lambda_powertools.utilities import parameters + +ssm_provider = parameters.SSMProvider() + + +def handler(event, context): + # This will display: + # /param/a: [some value] + # /param/b: [some value] + # /param/c: None + values = ssm_provider.get_multiple("/param", transform="json") + for k, v in values.items(): + print(f"{k}: {v}") + + try: + # This will raise a TransformParameterError exception + values = ssm_provider.get_multiple("/param", transform="json", raise_on_transform_error=True) + except parameters.exceptions.TransformParameterError: + ... diff --git a/docs/examples/utilities/parameters/recursively_parameters.py b/docs/examples/utilities/parameters/recursively_parameters.py new file mode 100644 index 00000000000..5ff2fcb7161 --- /dev/null +++ b/docs/examples/utilities/parameters/recursively_parameters.py @@ -0,0 +1,12 @@ +from aws_lambda_powertools.utilities import parameters + + +def handler(event, context): + # Retrieve a single parameter + value = parameters.get_parameter("/my/parameter") + + # Retrieve multiple parameters from a path prefix recursively + # This returns a dict with the parameter name as key + values = parameters.get_parameters("/my/path/prefix") + for k, v in values.items(): + print(f"{k}: {v}") diff --git a/docs/examples/utilities/parameters/secrets_provider.py b/docs/examples/utilities/parameters/secrets_provider.py new file mode 100644 index 00000000000..f775ff3a7b8 --- /dev/null +++ b/docs/examples/utilities/parameters/secrets_provider.py @@ -0,0 +1,11 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities import parameters + +config = Config(region_name="us-west-1") +secrets_provider = parameters.SecretsProvider(config=config) + + +def handler(event, context): + # Retrieve a single secret + value = secrets_provider.get("my-secret") diff --git a/docs/examples/utilities/parameters/ssm_provider.py b/docs/examples/utilities/parameters/ssm_provider.py new file mode 100644 index 00000000000..b83b2bafa16 --- /dev/null +++ b/docs/examples/utilities/parameters/ssm_provider.py @@ -0,0 +1,16 @@ +from botocore.config import Config + +from aws_lambda_powertools.utilities import parameters + +config = Config(region_name="us-west-1") +ssm_provider = parameters.SSMProvider(config=config) # or boto3_session=boto3.Session() + + +def handler(event, context): + # Retrieve a single parameter + value = ssm_provider.get("/my/parameter") + + # Retrieve multiple parameters from a path prefix + values = ssm_provider.get_multiple("/my/path/prefix") + for k, v in values.items(): + print(f"{k}: {v}") diff --git a/docs/examples/utilities/parameters/ssm_provider_get_options.py b/docs/examples/utilities/parameters/ssm_provider_get_options.py new file mode 100644 index 00000000000..0dbcc743411 --- /dev/null +++ b/docs/examples/utilities/parameters/ssm_provider_get_options.py @@ -0,0 +1,9 @@ +from aws_lambda_powertools.utilities import parameters + +ssm_provider = parameters.SSMProvider() + + +def handler(event, context): + decrypted_value = ssm_provider.get("/my/encrypted/parameter", decrypt=True) + + no_recursive_values = ssm_provider.get_multiple("/my/path/prefix", recursive=False) diff --git a/docs/examples/utilities/parameters/testing_parameters_fixture.py b/docs/examples/utilities/parameters/testing_parameters_fixture.py new file mode 100644 index 00000000000..2a872172252 --- /dev/null +++ b/docs/examples/utilities/parameters/testing_parameters_fixture.py @@ -0,0 +1,16 @@ +import pytest +from src import index + + +@pytest.fixture +def mock_parameter_response(monkeypatch): + def mockreturn(name): + return "mock_value" + + monkeypatch.setattr(index.parameters, "get_parameter", mockreturn) + + +# Pass our fixture as an argument to all tests where we want to mock the get_parameter response +def test_handler(mock_parameter_response): + return_val = index.handler({}, {}) + assert return_val.get("message") == "mock_value" diff --git a/docs/examples/utilities/parameters/testing_parameters_index.py b/docs/examples/utilities/parameters/testing_parameters_index.py new file mode 100644 index 00000000000..ef94df54f25 --- /dev/null +++ b/docs/examples/utilities/parameters/testing_parameters_index.py @@ -0,0 +1,7 @@ +from aws_lambda_powertools.utilities import parameters + + +def handler(event, context): + # Retrieve a single parameter + value = parameters.get_parameter("my-parameter-name") + return {"message": value} diff --git a/docs/examples/utilities/parameters/testing_parameters_mock.py b/docs/examples/utilities/parameters/testing_parameters_mock.py new file mode 100644 index 00000000000..8ac789fd5a1 --- /dev/null +++ b/docs/examples/utilities/parameters/testing_parameters_mock.py @@ -0,0 +1,13 @@ +from unittest.mock import patch + +from src import index + + +# Replaces "aws_lambda_powertools.utilities.parameters.get_parameter" with a Mock object +@patch("aws_lambda_powertools.utilities.parameters.get_parameter") +def test_handler(get_parameter_mock): + get_parameter_mock.return_value = "mock_value" + + return_val = index.handler({}, {}) + get_parameter_mock.assert_called_with("my-parameter-name") + assert return_val.get("message") == "mock_value" diff --git a/docs/examples/utilities/parameters/testing_parameters_tests.py b/docs/examples/utilities/parameters/testing_parameters_tests.py new file mode 100644 index 00000000000..0a7679d990c --- /dev/null +++ b/docs/examples/utilities/parameters/testing_parameters_tests.py @@ -0,0 +1,10 @@ +from src import index + + +def test_handler(monkeypatch): + def mockreturn(name): + return "mock_value" + + monkeypatch.setattr(index.parameters, "get_parameter", mockreturn) + return_val = index.handler({}, {}) + assert return_val.get("message") == "mock_value" diff --git a/docs/examples/utilities/parser/parser_envelope.py b/docs/examples/utilities/parser/parser_envelope.py new file mode 100644 index 00000000000..529dc630523 --- /dev/null +++ b/docs/examples/utilities/parser/parser_envelope.py @@ -0,0 +1,35 @@ +from aws_lambda_powertools.utilities.parser import BaseModel, envelopes, event_parser, parse +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class UserModel(BaseModel): + username: str + password1: str + password2: str + + +payload = { + "version": "0", + "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", + "detail-type": "CustomerSignedUp", + "source": "CustomerService", + "account": "111122223333", + "time": "2020-10-22T18:43:48Z", + "region": "us-west-1", + "resources": ["some_additional_"], + "detail": { + "username": "universe", + "password1": "myp@ssword", + "password2": "repeat password", + }, +} + +ret = parse(model=UserModel, envelope=envelopes.EventBridgeEnvelope, event=payload) + +# Parsed model only contains our actual model, not the entire EventBridge + Payload parsed +assert ret.password1 == ret.password2 + +# Same behaviour but using our decorator +@event_parser(model=UserModel, envelope=envelopes.EventBridgeEnvelope) +def handler(event: UserModel, context: LambdaContext): + assert event.password1 == event.password2 diff --git a/docs/examples/utilities/parser/parser_event_bridge_envelope.py b/docs/examples/utilities/parser/parser_event_bridge_envelope.py new file mode 100644 index 00000000000..83a7067c2f5 --- /dev/null +++ b/docs/examples/utilities/parser/parser_event_bridge_envelope.py @@ -0,0 +1,26 @@ +from typing import Any, Dict, Optional, TypeVar, Union + +from aws_lambda_powertools.utilities.parser import BaseEnvelope, BaseModel +from aws_lambda_powertools.utilities.parser.models import EventBridgeModel + +Model = TypeVar("Model", bound=BaseModel) + + +class EventBridgeEnvelope(BaseEnvelope): + def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Optional[Model]: + """Parses data found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : Model + Data model provided to parse after extracting data using envelope + + Returns + ------- + Any + Parsed detail payload with model provided + """ + parsed_envelope = EventBridgeModel.parse_obj(data) + return self._parse(data=parsed_envelope.detail, model=model) diff --git a/docs/examples/utilities/parser/parser_event_bridge_model.py b/docs/examples/utilities/parser/parser_event_bridge_model.py new file mode 100644 index 00000000000..286d9f2dc57 --- /dev/null +++ b/docs/examples/utilities/parser/parser_event_bridge_model.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import Any, Dict, List + +from aws_lambda_powertools.utilities.parser import BaseModel, Field + + +class EventBridgeModel(BaseModel): + version: str + id: str # noqa: A003,VNE003 + source: str + account: str + time: datetime + region: str + resources: List[str] + detail_type: str = Field(None, alias="detail-type") + detail: Dict[str, Any] diff --git a/docs/examples/utilities/parser/parser_event_parser_decorator.py b/docs/examples/utilities/parser/parser_event_parser_decorator.py new file mode 100644 index 00000000000..d0aa9340eb6 --- /dev/null +++ b/docs/examples/utilities/parser/parser_event_parser_decorator.py @@ -0,0 +1,44 @@ +import json +from typing import List, Optional + +from aws_lambda_powertools.utilities.parser import BaseModel, event_parser +from aws_lambda_powertools.utilities.typing import LambdaContext + + +class OrderItem(BaseModel): + id: int + quantity: int + description: str + + +class Order(BaseModel): + id: int + description: str + items: List[OrderItem] # nesting models are supported + optional_field: Optional[str] # this field may or may not be available when parsing + + +@event_parser(model=Order) +def handler(event: Order, context: LambdaContext): + print(event.id) + print(event.description) + print(event.items) + + order_items = [item for item in event.items] + ... + + +payload = { + "id": 10876546789, + "description": "My order", + "items": [ + { + "id": 1015938732, + "quantity": 1, + "description": "item xpto", + }, + ], +} + +handler(event=payload, context=LambdaContext()) +handler(event=json.dumps(payload), context=LambdaContext()) # also works if event is a JSON string diff --git a/docs/examples/utilities/parser/parser_extending_builtin_models.py b/docs/examples/utilities/parser/parser_extending_builtin_models.py new file mode 100644 index 00000000000..b02e3176983 --- /dev/null +++ b/docs/examples/utilities/parser/parser_extending_builtin_models.py @@ -0,0 +1,52 @@ +from typing import List, Optional + +from aws_lambda_powertools.utilities.parser import BaseModel, parse +from aws_lambda_powertools.utilities.parser.models import EventBridgeModel + + +class OrderItem(BaseModel): + id: int + quantity: int + description: str + + +class Order(BaseModel): + id: int + description: str + items: List[OrderItem] + + +class OrderEventModel(EventBridgeModel): + detail: Order + + +payload = { + "version": "0", + "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", + "detail-type": "OrderPurchased", + "source": "OrderService", + "account": "111122223333", + "time": "2020-10-22T18:43:48Z", + "region": "us-west-1", + "resources": ["some_additional"], + "detail": { + "id": 10876546789, + "description": "My order", + "items": [ + { + "id": 1015938732, + "quantity": 1, + "description": "item xpto", + }, + ], + }, +} + +ret = parse(model=OrderEventModel, event=payload) + +assert ret.source == "OrderService" +assert ret.detail.description == "My order" +assert ret.detail_type == "OrderPurchased" # we rename it to snake_case since detail-type is an invalid name + +for order_item in ret.detail.items: + ... diff --git a/docs/examples/utilities/parser/parser_model_export.py b/docs/examples/utilities/parser/parser_model_export.py new file mode 100644 index 00000000000..e03277820f6 --- /dev/null +++ b/docs/examples/utilities/parser/parser_model_export.py @@ -0,0 +1,32 @@ +from aws_lambda_powertools.utilities import Logger +from aws_lambda_powertools.utilities.parser import BaseModel, ValidationError, parse, validator + +logger = Logger(service="user") + + +class UserModel(BaseModel): + username: str + password1: str + password2: str + + +payload = { + "username": "universe", + "password1": "myp@ssword", + "password2": "repeat password", +} + + +def my_function(): + try: + return parse(model=UserModel, event=payload) + except ValidationError as e: + logger.exception(e.json()) + return {"status_code": 400, "message": "Invalid username"} + + +User: UserModel = my_function() +user_dict = User.dict() +user_json = User.json() +user_json_schema_as_dict = User.schema() +user_json_schema_as_json = User.schema_json(indent=2) diff --git a/docs/examples/utilities/parser/parser_models.py b/docs/examples/utilities/parser/parser_models.py new file mode 100644 index 00000000000..030e27a5135 --- /dev/null +++ b/docs/examples/utilities/parser/parser_models.py @@ -0,0 +1,16 @@ +from typing import List, Optional + +from aws_lambda_powertools.utilities.parser import BaseModel + + +class OrderItem(BaseModel): + id: int + quantity: int + description: str + + +class Order(BaseModel): + id: int + description: str + items: List[OrderItem] # nesting models are supported + optional_field: Optional[str] # this field may or may not be available when parsing diff --git a/docs/examples/utilities/parser/parser_parse_function.py b/docs/examples/utilities/parser/parser_parse_function.py new file mode 100644 index 00000000000..834fe8e1197 --- /dev/null +++ b/docs/examples/utilities/parser/parser_parse_function.py @@ -0,0 +1,39 @@ +from typing import List, Optional + +from aws_lambda_powertools.utilities.parser import BaseModel, ValidationError, parse + + +class OrderItem(BaseModel): + id: int + quantity: int + description: str + + +class Order(BaseModel): + id: int + description: str + items: List[OrderItem] # nesting models are supported + optional_field: Optional[str] # this field may or may not be available when parsing + + +payload = { + "id": 10876546789, + "description": "My order", + "items": [ + { + # this will cause a validation error + "id": [1015938732], + "quantity": 1, + "description": "item xpto", + } + ], +} + + +def my_function(): + try: + parsed_payload: Order = parse(event=payload, model=Order) + # payload dict is now parsed into our model + return parsed_payload.items + except ValidationError: + return {"status_code": 400, "message": "Invalid order"} diff --git a/docs/examples/utilities/parser/parser_validator.py b/docs/examples/utilities/parser/parser_validator.py new file mode 100644 index 00000000000..aa6f42fb9b0 --- /dev/null +++ b/docs/examples/utilities/parser/parser_validator.py @@ -0,0 +1,14 @@ +from aws_lambda_powertools.utilities.parser import BaseModel, parse, validator + + +class HelloWorldModel(BaseModel): + message: str + + @validator("message") + def is_hello_world(cls, v): + if v != "hello world": + raise ValueError("Message must be hello world!") + return v + + +parse(model=HelloWorldModel, event={"message": "hello universe"}) diff --git a/docs/examples/utilities/parser/parser_validator_all.py b/docs/examples/utilities/parser/parser_validator_all.py new file mode 100644 index 00000000000..d2905e812bb --- /dev/null +++ b/docs/examples/utilities/parser/parser_validator_all.py @@ -0,0 +1,16 @@ +from aws_lambda_powertools.utilities.parser import BaseModel, parse, validator + + +class HelloWorldModel(BaseModel): + message: str + sender: str + + @validator("*") + def has_whitespace(cls, v): + if " " not in v: + raise ValueError("Must have whitespace...") + + return v + + +parse(model=HelloWorldModel, event={"message": "hello universe", "sender": "universe"}) diff --git a/docs/examples/utilities/parser/parser_validator_root.py b/docs/examples/utilities/parser/parser_validator_root.py new file mode 100644 index 00000000000..934866d2a1a --- /dev/null +++ b/docs/examples/utilities/parser/parser_validator_root.py @@ -0,0 +1,23 @@ +from aws_lambda_powertools.utilities.parser import BaseModel, parse, root_validator + + +class UserModel(BaseModel): + username: str + password1: str + password2: str + + @root_validator + def check_passwords_match(cls, values): + pw1, pw2 = values.get("password1"), values.get("password2") + if pw1 is not None and pw2 is not None and pw1 != pw2: + raise ValueError("passwords do not match") + return values + + +payload = { + "username": "universe", + "password1": "myp@ssword", + "password2": "repeat password", +} + +parse(model=UserModel, event=payload) diff --git a/docs/examples/utilities/typing/lambda_context.py b/docs/examples/utilities/typing/lambda_context.py new file mode 100644 index 00000000000..0ad59a19d79 --- /dev/null +++ b/docs/examples/utilities/typing/lambda_context.py @@ -0,0 +1,8 @@ +from typing import Any, Dict + +from aws_lambda_powertools.utilities.typing import LambdaContext + + +def handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, Any]: + # Insert business logic + return event diff --git a/docs/examples/utilities/validation/unwrapping_events.py b/docs/examples/utilities/validation/unwrapping_events.py new file mode 100644 index 00000000000..bcdb27b6fe5 --- /dev/null +++ b/docs/examples/utilities/validation/unwrapping_events.py @@ -0,0 +1,8 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import validator + + +@validator(inbound_schema=schemas.INPUT, envelope="detail") +def handler(event, context): + return event diff --git a/docs/examples/utilities/validation/unwrapping_popular_event_sources.py b/docs/examples/utilities/validation/unwrapping_popular_event_sources.py new file mode 100644 index 00000000000..a11de9a3478 --- /dev/null +++ b/docs/examples/utilities/validation/unwrapping_popular_event_sources.py @@ -0,0 +1,8 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import envelopes, validator + + +@validator(inbound_schema=schemas.INPUT, envelope=envelopes.EVENTBRIDGE) +def handler(event, context): + return event diff --git a/docs/examples/utilities/validation/validate_custom_format.py b/docs/examples/utilities/validation/validate_custom_format.py new file mode 100644 index 00000000000..00ec025eaa3 --- /dev/null +++ b/docs/examples/utilities/validation/validate_custom_format.py @@ -0,0 +1,10 @@ +import schema + +from aws_lambda_powertools.utilities.validation import validate + +custom_format = { + "int64": True, # simply ignore it, + "positive": lambda x: False if x < 0 else True, +} + +validate(event=event, schema=schemas.INPUT, formats=custom_format) diff --git a/docs/examples/utilities/validation/validate_jsonschema.py b/docs/examples/utilities/validation/validate_jsonschema.py new file mode 100644 index 00000000000..8df90378bd4 --- /dev/null +++ b/docs/examples/utilities/validation/validate_jsonschema.py @@ -0,0 +1,131 @@ +INPUT = { + "$schema": "http://json-schema.org/draft-04/schema#", + "definitions": { + "AWSAPICallViaCloudTrail": { + "properties": { + "additionalEventData": {"$ref": "#/definitions/AdditionalEventData"}, + "awsRegion": {"type": "string"}, + "errorCode": {"type": "string"}, + "errorMessage": {"type": "string"}, + "eventID": {"type": "string"}, + "eventName": {"type": "string"}, + "eventSource": {"type": "string"}, + "eventTime": {"format": "date-time", "type": "string"}, + "eventType": {"type": "string"}, + "eventVersion": {"type": "string"}, + "recipientAccountId": {"type": "string"}, + "requestID": {"type": "string"}, + "requestParameters": {"$ref": "#/definitions/RequestParameters"}, + "resources": {"items": {"type": "object"}, "type": "array"}, + "responseElements": {"type": ["object", "null"]}, + "sourceIPAddress": {"type": "string"}, + "userAgent": {"type": "string"}, + "userIdentity": {"$ref": "#/definitions/UserIdentity"}, + "vpcEndpointId": {"type": "string"}, + "x-amazon-open-api-schema-readOnly": {"type": "boolean"}, + }, + "required": [ + "eventID", + "awsRegion", + "eventVersion", + "responseElements", + "sourceIPAddress", + "eventSource", + "requestParameters", + "resources", + "userAgent", + "readOnly", + "userIdentity", + "eventType", + "additionalEventData", + "vpcEndpointId", + "requestID", + "eventTime", + "eventName", + "recipientAccountId", + ], + "type": "object", + }, + "AdditionalEventData": { + "properties": { + "objectRetentionInfo": {"$ref": "#/definitions/ObjectRetentionInfo"}, + "x-amz-id-2": {"type": "string"}, + }, + "required": ["x-amz-id-2"], + "type": "object", + }, + "Attributes": { + "properties": { + "creationDate": {"format": "date-time", "type": "string"}, + "mfaAuthenticated": {"type": "string"}, + }, + "required": ["mfaAuthenticated", "creationDate"], + "type": "object", + }, + "LegalHoldInfo": { + "properties": { + "isUnderLegalHold": {"type": "boolean"}, + "lastModifiedTime": {"format": "int64", "type": "integer"}, + }, + "type": "object", + }, + "ObjectRetentionInfo": { + "properties": { + "legalHoldInfo": {"$ref": "#/definitions/LegalHoldInfo"}, + "retentionInfo": {"$ref": "#/definitions/RetentionInfo"}, + }, + "type": "object", + }, + "RequestParameters": { + "properties": { + "bucketName": {"type": "string"}, + "key": {"type": "string"}, + "legal-hold": {"type": "string"}, + "retention": {"type": "string"}, + }, + "required": ["bucketName", "key"], + "type": "object", + }, + "RetentionInfo": { + "properties": { + "lastModifiedTime": {"format": "int64", "type": "integer"}, + "retainUntilMode": {"type": "string"}, + "retainUntilTime": {"format": "int64", "type": "integer"}, + }, + "type": "object", + }, + "SessionContext": { + "properties": {"attributes": {"$ref": "#/definitions/Attributes"}}, + "required": ["attributes"], + "type": "object", + }, + "UserIdentity": { + "properties": { + "accessKeyId": {"type": "string"}, + "accountId": {"type": "string"}, + "arn": {"type": "string"}, + "principalId": {"type": "string"}, + "sessionContext": {"$ref": "#/definitions/SessionContext"}, + "type": {"type": "string"}, + }, + "required": ["accessKeyId", "sessionContext", "accountId", "principalId", "type", "arn"], + "type": "object", + }, + }, + "properties": { + "account": {"type": "string"}, + "detail": {"$ref": "#/definitions/AWSAPICallViaCloudTrail"}, + "detail-type": {"type": "string"}, + "id": {"type": "string"}, + "region": {"type": "string"}, + "resources": {"items": {"type": "string"}, "type": "array"}, + "source": {"type": "string"}, + "time": {"format": "date-time", "type": "string"}, + "version": {"type": "string"}, + }, + "required": ["detail-type", "resources", "id", "source", "time", "detail", "region", "version", "account"], + "title": "AWSAPICallViaCloudTrail", + "type": "object", + "x-amazon-events-detail-type": "AWS API Call via CloudTrail", + "x-amazon-events-source": "aws.s3", +} diff --git a/docs/examples/utilities/validation/validator_decorator.py b/docs/examples/utilities/validation/validator_decorator.py new file mode 100644 index 00000000000..938db5adbe4 --- /dev/null +++ b/docs/examples/utilities/validation/validator_decorator.py @@ -0,0 +1,8 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import validator + + +@validator(inbound_schema=schemas.INPUT, outbound_schema=schemas.OUTPUT) +def handler(event, context): + return event diff --git a/docs/examples/utilities/validation/validator_function.py b/docs/examples/utilities/validation/validator_function.py new file mode 100644 index 00000000000..8a7c4738c6a --- /dev/null +++ b/docs/examples/utilities/validation/validator_function.py @@ -0,0 +1,14 @@ +import schemas + +from aws_lambda_powertools.utilities.validation import validate +from aws_lambda_powertools.utilities.validation.exceptions import SchemaValidationError + + +def handler(event, context): + try: + validate(event=event, schema=schemas.INPUT) + except SchemaValidationError as e: + # do something before re-raising + raise + + return event diff --git a/docs/index.md b/docs/index.md index 9a1a876fa0d..aa9dfa4750b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -9,7 +9,6 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su Check out [this detailed blog post](https://aws.amazon.com/blogs/opensource/simplifying-serverless-best-practices-with-lambda-powertools/) with a practical example. - ## Install Powertools is available in the following formats: @@ -58,12 +57,8 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: === "SAM" - ```yaml hl_lines="5" - MyLambdaFunction: - Type: AWS::Serverless::Function - Properties: - Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:17 + ```yaml hl_lines="11" + --8<-- "docs/examples/index/lambda_layer_template.yml" ``` === "Serverless framework" @@ -79,71 +74,13 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: === "CDK" ```python hl_lines="11 16" - from aws_cdk import core, aws_lambda - - class SampleApp(core.Construct): - - def __init__(self, scope: core.Construct, id_: str, env: core.Environment) -> None: - super().__init__(scope, id_) - - powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( - self, - id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:17" - ) - aws_lambda.Function(self, - 'sample-app-lambda', - runtime=aws_lambda.Runtime.PYTHON_3_9, - layers=[powertools_layer] - # other props... - ) + --8<-- "docs/examples/index/lambda_layer_cdk_app.py" ``` === "Terraform" - ```terraform hl_lines="9 38" - terraform { - required_version = "~> 1.0.5" - required_providers { - aws = "~> 3.50.0" - } - } - - provider "aws" { - region = "{region}" - } - - resource "aws_iam_role" "iam_for_lambda" { - name = "iam_for_lambda" - - assume_role_policy = < None: - super().__init__(scope, id_) - - # Launches SAR App as CloudFormation nested stack and return Lambda Layer - powertools_app = sam.CfnApplication(self, - f'{POWERTOOLS_BASE_NAME}Application', - location={ - 'applicationId': POWERTOOLS_ARN, - 'semanticVersion': POWERTOOLS_VER - }, - ) - - powertools_layer_arn = powertools_app.get_att("Outputs.LayerVersionArn").to_string() - powertools_layer_version = aws_lambda.LayerVersion.from_layer_version_arn(self, f'{POWERTOOLS_BASE_NAME}', powertools_layer_arn) - - aws_lambda.Function(self, - 'sample-app-lambda', - runtime=aws_lambda.Runtime.PYTHON_3_8, - function_name='sample-lambda', - code=aws_lambda.Code.asset('./src'), - handler='app.handler', - layers: [powertools_layer_version] - ) + ```python hl_lines="19 22-25 34" + --8<-- "docs/examples/index/sar_cdk_app.py" ``` === "Terraform" @@ -289,47 +181,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, > Credits to [Dani Comnea](https://github.com/DanyC97) for providing the Terraform equivalent. ```terraform hl_lines="12-13 15-20 23-25 40" - terraform { - required_version = "~> 0.13" - required_providers { - aws = "~> 3.50.0" - } - } - - provider "aws" { - region = "us-east-1" - } - - resource "aws_serverlessapplicationrepository_cloudformation_stack" "deploy_sar_stack" { - name = "aws-lambda-powertools-python-layer" - - application_id = data.aws_serverlessapplicationrepository_application.sar_app.application_id - semantic_version = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version - capabilities = [ - "CAPABILITY_IAM", - "CAPABILITY_NAMED_IAM" - ] - } - - data "aws_serverlessapplicationrepository_application" "sar_app" { - application_id = "arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer" - semantic_version = var.aws_powertools_version - } - - variable "aws_powertools_version" { - type = string - default = "1.20.2" - description = "The AWS Powertools release version" - } - - output "deployed_powertools_sar_version" { - value = data.aws_serverlessapplicationrepository_application.sar_app.semantic_version - } - - # Fetch Lambda Powertools Layer ARN from deployed SAR App - output "aws_lambda_powertools_layer_arn" { - value = aws_serverlessapplicationrepository_cloudformation_stack.deploy_sar_stack.outputs.LayerVersionArn - } + --8<-- "docs/examples/index/sar_main.tf" ``` ??? example "Example: Least-privileged IAM permissions to deploy Layer" @@ -341,60 +193,7 @@ If using SAM, you can include this SAR App as part of your shared Layers stack, === "template.yml" ```yaml hl_lines="21-52" - AWSTemplateFormatVersion: "2010-09-09" - Resources: - PowertoolsLayerIamRole: - Type: "AWS::IAM::Role" - Properties: - AssumeRolePolicyDocument: - Version: "2012-10-17" - Statement: - - Effect: "Allow" - Principal: - Service: - - "cloudformation.amazonaws.com" - Action: - - "sts:AssumeRole" - Path: "/" - PowertoolsLayerIamPolicy: - Type: "AWS::IAM::Policy" - Properties: - PolicyName: PowertoolsLambdaLayerPolicy - PolicyDocument: - Version: "2012-10-17" - Statement: - - Sid: CloudFormationTransform - Effect: Allow - Action: cloudformation:CreateChangeSet - Resource: - - arn:aws:cloudformation:us-east-1:aws:transform/Serverless-2016-10-31 - - Sid: GetCfnTemplate - Effect: Allow - Action: - - serverlessrepo:CreateCloudFormationTemplate - - serverlessrepo:GetCloudFormationTemplate - Resource: - # this is arn of the powertools SAR app - - arn:aws:serverlessrepo:eu-west-1:057560766410:applications/aws-lambda-powertools-python-layer - - Sid: S3AccessLayer - Effect: Allow - Action: - - s3:GetObject - Resource: - # AWS publishes to an external S3 bucket locked down to your account ID - # The below example is us publishing lambda powertools - # Bucket: awsserverlessrepo-changesets-plntc6bfnfj - # Key: *****/arn:aws:serverlessrepo:eu-west-1:057560766410:applications-aws-lambda-powertools-python-layer-versions-1.10.2/aeeccf50-****-****-****-********* - - arn:aws:s3:::awsserverlessrepo-changesets-*/* - - Sid: GetLayerVersion - Effect: Allow - Action: - - lambda:PublishLayerVersion - - lambda:GetLayerVersion - Resource: - - !Sub arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:layer:aws-lambda-powertools-python-layer* - Roles: - - Ref: "PowertoolsLayerIamRole" + --8<-- "docs/examples/index/least_priviledged_template.yml" ``` You can fetch available versions via SAR ListApplicationVersions API: @@ -455,9 +254,7 @@ Core utilities such as Tracing, Logging, Metrics, and Event Handler will be avai As a best practice, AWS Lambda Powertools module logging statements are suppressed. If necessary, you can enable debugging using `set_package_logger` for additional information on every internal operation: ```python title="Powertools debug mode example" -from aws_lambda_powertools.logging.logger import set_package_logger - -set_package_logger() # (1) +--8<-- "docs/examples/index/debug_mode.py" ``` 1. :information_source: this will configure our `aws_lambda_powertools` logger with debug. diff --git a/docs/shared/validation_basic_jsonschema.py b/docs/shared/validation_basic_jsonschema.py index afb8a723d18..e9e3ae8ea4d 100644 --- a/docs/shared/validation_basic_jsonschema.py +++ b/docs/shared/validation_basic_jsonschema.py @@ -33,7 +33,15 @@ "examples": [{"statusCode": 200, "body": "response"}], "required": ["statusCode", "body"], "properties": { - "statusCode": {"$id": "#/properties/statusCode", "type": "integer", "title": "The statusCode"}, - "body": {"$id": "#/properties/body", "type": "string", "title": "The response"}, + "statusCode": { + "$id": "#/properties/statusCode", + "type": "integer", + "title": "The statusCode", + }, + "body": { + "$id": "#/properties/body", + "type": "string", + "title": "The response", + }, }, } diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md index 5ea8ec7f2fa..d8033b75fd4 100644 --- a/docs/tutorial/index.md +++ b/docs/tutorial/index.md @@ -36,52 +36,22 @@ Let's configure our base application to look like the following code snippet. === "app.py" ```python - import json - - - def hello(): - return {"statusCode": 200, "body": json.dumps({"message": "hello unknown!"})} - - - def lambda_handler(event, context): - return hello() + --8<-- "docs/examples/tutorial/code_example_app.py" ``` === "template.yaml" ```yaml - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: Sample SAM Template for powertools-quickstart - Globals: - Function: - Timeout: 3 - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: app.lambda_handler - Runtime: python3.9 - Architectures: - - x86_64 - Events: - HelloWorld: - Type: Api - Properties: - Path: /hello - Method: get - Outputs: - HelloWorldApi: - Description: "API Gateway endpoint URL for Prod stage for Hello World function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" + --8<-- "docs/examples/tutorial/code_example_template.yml" ``` + Our Lambda code consists of an entry point function named `lambda_handler`, and a `hello` function. When API Gateway receives a HTTP GET request on `/hello` route, Lambda will call our `lambda_handler` function, subsequently calling the `hello` function. API Gateway will use this response to return the correct HTTP Status Code and payload back to the caller. ???+ warning For simplicity, we do not set up authentication and authorization! You can find more information on how to implement it on [AWS SAM documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-controlling-access-to-apis.html){target="_blank"}. + ### Run your code At each point, you have two ways to run your code: locally and within your AWS account. @@ -106,7 +76,6 @@ As a result, a local API endpoint will be exposed and you can invoke it using yo ???+ info To learn more about local testing, please visit the [AWS SAM CLI local testing](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-cli-command-reference-sam-local-start-api.html) documentation. - #### Live test First, you need to deploy your application into your AWS Account by issuing `sam build && sam deploy --guided` command. This command builds a ZIP package of your source code, and deploy it to your AWS Account. @@ -154,57 +123,13 @@ For this to work, we could create a new Lambda function to handle incoming reque === "hello_by_name.py" ```python - import json - - - def hello_name(name): - return {"statusCode": 200, "body": json.dumps({"message": f"hello {name}!"})} - - - def lambda_handler(event, context): - name = event["pathParameters"]["name"] - return hello_name(name) + --8<-- "docs/examples/tutorial/add_route_hello_by_name.py" ``` === "template.yaml" ```yaml hl_lines="21-32" - AWSTemplateFormatVersion: "2010-09-09" - Transform: AWS::Serverless-2016-10-31 - Description: Sample SAM Template for powertools-quickstart - Globals: - Function: - Timeout: 3 - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: app.lambda_handler - Runtime: python3.9 - Events: - HelloWorld: - Type: Api - Properties: - Path: /hello - Method: get - - HelloWorldByNameFunctionName: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: hello_by_name.lambda_handler - Runtime: python3.9 - Events: - HelloWorldName: - Type: Api - Properties: - Path: /hello/{name} - Method: get - Outputs: - HelloWorldApi: - Description: "API Gateway endpoint URL for Prod stage for Hello World function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" + --8<-- "docs/examples/tutorial/add_route_temaplate.yml" ``` ???+ question @@ -226,76 +151,14 @@ A first attempt at the routing logic might look similar to the following code sn === "app.py" - ```python hl_lines="4 9 13 27-29 35-36" - import json - - - def hello_name(event, **kargs): - username = event["pathParameters"]["name"] - return {"statusCode": 200, "body": json.dumps({"message": f"hello {username}!"})} - - - def hello(**kargs): - return {"statusCode": 200, "body": json.dumps({"message": "hello unknown!"})} - - - class Router: - def __init__(self): - self.routes = {} - - def set(self, path, method, handler): - self.routes[f"{path}-{method}"] = handler - - def get(self, path, method): - try: - route = self.routes[f"{path}-{method}"] - except KeyError: - raise RuntimeError(f"Cannot route request to the correct method. path={path}, method={method}") - return route - - router = Router() - router.set(path="/hello", method="GET", handler=hello) - router.set(path="/hello/{name}", method="GET", handler=hello_name) - - - def lambda_handler(event, context): - path = event["resource"] - http_method = event["httpMethod"] - method = router.get(path=path, method=http_method) - return method(event=event) + ```python hl_lines="4 9 13 28-30 36-37" + --8<-- "docs/examples/tutorial/create_own_router_app.py" ``` === "template.yaml" ```yaml hl_lines="15-24" - AWSTemplateFormatVersion: "2010-09-09" - Transform: AWS::Serverless-2016-10-31 - Description: Sample SAM Template for powertools-quickstart - Globals: - Function: - Timeout: 3 - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: app.lambda_handler - Runtime: python3.9 - Events: - HelloWorld: - Type: Api - Properties: - Path: /hello - Method: get - HelloWorldName: - Type: Api - Properties: - Path: /hello/{name} - Method: get - Outputs: - HelloWorldApi: - Description: "API Gateway endpoint URL for Prod stage for Hello World function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" + --8<-- "docs/examples/tutorial/create_own_router_template.yml" ``` Let's break this down: @@ -322,23 +185,7 @@ Let's include Lambda Powertools as a dependency in `requirement.txt`, and use Ev === "app.py" ```python hl_lines="1 3 6 11 17" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - app = APIGatewayRestResolver() - - - @app.get("/hello/") - def hello_name(name): - return {"message": f"hello {name}!"} - - - @app.get("/hello") - def hello(): - return {"message": "hello unknown!"} - - - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "docs/examples/tutorial/event_handler_app.py" ``` === "requirements.txt" @@ -374,39 +221,10 @@ The first option could be to use the standard Python Logger, and use a specializ === "app.py" - ```python hl_lines="4 5 7-12 19 25 30" - import logging - import os - - from pythonjsonlogger import jsonlogger - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - - logger = logging.getLogger("APP") - logHandler = logging.StreamHandler() - formatter = jsonlogger.JsonFormatter(fmt="%(asctime)s %(levelname)s %(name)s %(message)s") - logHandler.setFormatter(formatter) - logger.addHandler(logHandler) - logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) - - app = APIGatewayRestResolver() - - - @app.get("/hello/") - def hello_name(name): - logger.info(f"Request from {name} received") - return {"message": f"hello {name}!"} - - - @app.get("/hello") - def hello(): - logger.info("Request from unknown received") - return {"message": "hello unknown!"} - - - def lambda_handler(event, context): - logger.debug(event) - return app.resolve(event, context) + ```python hl_lines="4 6 8-13 19 25 30" + --8<-- "docs/examples/tutorial/json_logger_app.py" ``` + === "requirements.txt" ```bash @@ -416,9 +234,9 @@ The first option could be to use the standard Python Logger, and use a specializ With just a few lines our logs will now output to `JSON` format. We've taken the following steps to make that work: -* **L7**: Creates an application logger named `APP`. -* **L8-11**: Configures handler and formatter. -* **L12**: Sets the logging level set in the `LOG_LEVEL` environment variable, or `INFO` as a sentinel value. +* **L8**: Creates an application logger named `APP`. +* **L9-12**: Configures handler and formatter. +* **L13**: Sets the logging level set in the `LOG_LEVEL` environment variable, or `INFO` as a sentinel value. After that, we use this logger in our application code to record the required information. We see logs structured as follows: @@ -443,7 +261,6 @@ So far, so good! We can take a step further now by adding additional context to We could start by creating a dictionary with Lambda context information or something from the incoming event, which should always be logged. Additional attributes could be added on every `logger.info` using `extra` keyword like in any standard Python logger. - ### Simplifying with Logger ???+ question "Surely this could be easier, right?" @@ -452,30 +269,7 @@ We could start by creating a dictionary with Lambda context information or somet As we already have Lambda Powertools as a dependency, we can simply import [Logger](../core/logger.md){target="_blank"}. ```python title="Refactoring with Lambda Powertools Logger" hl_lines="1 3 5 12 18 22" -from aws_lambda_powertools import Logger -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.logging import correlation_paths - -logger = Logger(service="APP") - -app = APIGatewayRestResolver() - - -@app.get("/hello/") -def hello_name(name): - logger.info(f"Request from {name} received") - return {"message": f"hello {name}!"} - - -@app.get("/hello") -def hello(): - logger.info("Request from unknown received") - return {"message": "hello unknown!"} - - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) -def lambda_handler(event, context): - return app.resolve(event, context) +--8<-- "docs/examples/tutorial/logger_app.py" ``` Let's break this down: @@ -485,7 +279,6 @@ Let's break this down: * **L22**: We also instruct Logger to use the incoming API Gateway Request ID as a [correlation id](../core/logger.md##set_correlation_id-method) automatically. * **L22**: Since we're in dev, we also use `log_event=True` to automatically log each incoming request for debugging. This can be also set via [environment variables](./index.md#environment-variables){target="_blank"}. - This is how the logs would look like now: ```json title="Our logs are now structured consistently" @@ -536,71 +329,13 @@ Let's explore how we can instrument our code with [AWS X-Ray SDK](https://docs.a === "app.py" ```python hl_lines="1 13 20 27" - from aws_xray_sdk.core import xray_recorder - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.logging import correlation_paths - - logger = Logger(service="APP") - - app = APIGatewayRestResolver() - - - @app.get("/hello/") - @xray_recorder.capture('hello_name') - def hello_name(name): - logger.info(f"Request from {name} received") - return {"message": f"hello {name}!"} - - - @app.get("/hello") - @xray_recorder.capture('hello') - def hello(): - logger.info("Request from unknown received") - return {"message": "hello unknown!"} - - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) - @xray_recorder.capture('handler') - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "docs/examples/tutorial/generate_traces_app.py" ``` === "template.yaml" ```yaml hl_lines="7-8 16" - AWSTemplateFormatVersion: "2010-09-09" - Transform: AWS::Serverless-2016-10-31 - Description: Sample SAM Template for powertools-quickstart - Globals: - Function: - Timeout: 3 - Api: - TracingEnabled: true - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: app.lambda_handler - Runtime: python3.9 - Tracing: Active - Events: - HelloWorld: - Type: Api - Properties: - Path: /hello - Method: get - HelloWorldName: - Type: Api - Properties: - Path: /hello/{name} - Method: get - Outputs: - HelloWorldApi: - Description: "API Gateway endpoint URL for Prod stage for Hello World function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" + --8<-- "docs/examples/tutorial/generate_traces_template.yml" ``` === "requirements.txt" @@ -646,53 +381,7 @@ Within AWS X-Ray, we can answer these questions by using two features: tracing * Let's put them into action. ```python title="Enriching traces with annotations and metadata" hl_lines="10 17-18 26-27 35 37-42 45" -from aws_xray_sdk.core import patch_all, xray_recorder - -from aws_lambda_powertools import Logger -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.logging import correlation_paths - -logger = Logger(service="APP") - -app = APIGatewayRestResolver() -cold_start = True -patch_all() - - -@app.get("/hello/") -@xray_recorder.capture('hello_name') -def hello_name(name): - subsegment = xray_recorder.current_subsegment() - subsegment.put_annotation(key="User", value=name) - logger.info(f"Request from {name} received") - return {"message": f"hello {name}!"} - - -@app.get("/hello") -@xray_recorder.capture('hello') -def hello(): - subsegment = xray_recorder.current_subsegment() - subsegment.put_annotation(key="User", value="unknown") - logger.info("Request from unknown received") - return {"message": "hello unknown!"} - - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) -@xray_recorder.capture('handler') -def lambda_handler(event, context): - global cold_start - - subsegment = xray_recorder.current_subsegment() - if cold_start: - subsegment.put_annotation(key="ColdStart", value=cold_start) - cold_start = False - else: - subsegment.put_annotation(key="ColdStart", value=cold_start) - - result = app.resolve(event, context) - subsegment.put_metadata("response", result) - - return result +--8<-- "docs/examples/tutorial/enrich_generate_traces_app.py" ``` Let's break it down: @@ -725,35 +414,7 @@ We can simplify our previous patterns by using [Lambda Powertools Tracer](../cor You can now safely remove `aws-xray-sdk` from `requirements.txt`; keep `aws-lambda-powertools` only. ```python title="Refactoring with Lambda Powertools Tracer" hl_lines="1 6 11 13 19 21 27" -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.logging import correlation_paths - -logger = Logger(service="APP") -tracer = Tracer(service="APP") -app = APIGatewayRestResolver() - - -@app.get("/hello/") -@tracer.capture_method -def hello_name(name): - tracer.put_annotation(key="User", value=name) - logger.info(f"Request from {name} received") - return {"message": f"hello {name}!"} - - -@app.get("/hello") -@tracer.capture_method -def hello(): - tracer.put_annotation(key="User", value="unknown") - logger.info("Request from unknown received") - return {"message": "hello unknown!"} - - -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) -@tracer.capture_lambda_handler -def lambda_handler(event, context): - return app.resolve(event, context) +--8<-- "docs/examples/tutorial/tracer_app.py" ``` Decorators, annotations and metadata are largely the same, except we now have a much cleaner code as the boilerplate is gone. Here's what's changed compared to AWS X-Ray SDK approach: @@ -801,114 +462,13 @@ Let's expand our application with custom metrics using AWS SDK to see how it wor === "app.py" ```python hl_lines="3 10 14 19-47 55 64" - import os - - import boto3 - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.logging import correlation_paths - - cold_start = True - metric_namespace = "MyApp" - - logger = Logger(service="APP") - tracer = Tracer(service="APP") - metrics = boto3.client("cloudwatch") - app = APIGatewayRestResolver() - - - @tracer.capture_method - def add_greeting_metric(service: str = "APP"): - function_name = os.getenv("AWS_LAMBDA_FUNCTION_NAME", "undefined") - service_dimension = {"Name": "service", "Value": service} - function_dimension = {"Name": "function_name", "Value": function_name} - is_cold_start = True - - global cold_start - if cold_start: - cold_start = False - else: - is_cold_start = False - - return metrics.put_metric_data( - MetricData=[ - { - "MetricName": "SuccessfulGreetings", - "Dimensions": [service_dimension], - "Unit": "Count", - "Value": 1, - }, - { - "MetricName": "ColdStart", - "Dimensions": [service_dimension, function_dimension], - "Unit": "Count", - "Value": int(is_cold_start) - } - ], - Namespace=metric_namespace, - ) - - - @app.get("/hello/") - @tracer.capture_method - def hello_name(name): - tracer.put_annotation(key="User", value=name) - logger.info(f"Request from {name} received") - add_greeting_metric() - return {"message": f"hello {name}!"} - - - @app.get("/hello") - @tracer.capture_method - def hello(): - tracer.put_annotation(key="User", value="unknown") - logger.info("Request from unknown received") - add_greeting_metric() - return {"message": "hello unknown!"} - - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) - @tracer.capture_lambda_handler - def lambda_handler(event, context): - return app.resolve(event, context) + --8<-- "docs/examples/tutorial/create_metrics_app.py" ``` === "template.yaml" - ```yaml hl_lines="27 28" - AWSTemplateFormatVersion: "2010-09-09" - Transform: AWS::Serverless-2016-10-31 - Description: Sample SAM Template for powertools-quickstart - Globals: - Function: - Timeout: 3 - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: hello_world/ - Handler: app.lambda_handler - Runtime: python3.9 - Tracing: Active - Events: - HelloWorld: - Type: Api - Properties: - Path: /hello - Method: get - HelloWorldName: - Type: Api - Properties: - Path: /hello/{name} - Method: get - Policies: - - CloudWatchPutMetricPolicy: {} - Outputs: - HelloWorldApi: - Description: "API Gateway endpoint URL for Prod stage for Hello World function" - Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/hello/" - + ```yaml hl_lines="26 27" + --8<-- "docs/examples/tutorial/create_metrics_template.yml" ``` There's a lot going on, let's break this down: @@ -939,54 +499,16 @@ In general terms, EMF is a specification that expects metrics in a JSON payload Let's implement that using [Metrics](../core/metrics.md){target="_blank}: -```python title="Refactoring with Lambda Powertools Metrics" hl_lines="1 4 9 18 27 33" -from aws_lambda_powertools import Logger, Tracer, Metrics -from aws_lambda_powertools.event_handler import APIGatewayRestResolver -from aws_lambda_powertools.logging import correlation_paths -from aws_lambda_powertools.metrics import MetricUnit - - -logger = Logger(service="APP") -tracer = Tracer(service="APP") -metrics = Metrics(namespace="MyApp", service="APP") -app = APIGatewayRestResolver() - - -@app.get("/hello/") -@tracer.capture_method -def hello_name(name): - tracer.put_annotation(key="User", value=name) - logger.info(f"Request from {name} received") - metrics.add_metric(name="SuccessfulGreetings", unit=MetricUnit.Count, value=1) - return {"message": f"hello {name}!"} - - -@app.get("/hello") -@tracer.capture_method -def hello(): - tracer.put_annotation(key="User", value="unknown") - logger.info("Request from unknown received") - metrics.add_metric(name="SuccessfulGreetings", unit=MetricUnit.Count, value=1) - return {"message": "hello unknown!"} - - -@tracer.capture_lambda_handler -@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST, log_event=True) -@metrics.log_metrics(capture_cold_start_metric=True) -def lambda_handler(event, context): - try: - return app.resolve(event, context) - except Exception as e: - logger.exception(e) - raise +```python title="Refactoring with Lambda Powertools Metrics" hl_lines="1 4 8 17 26 32" +--8<-- "docs/examples/tutorial/metrics_app.py" ``` That's a lot less boilerplate code! Let's break this down: -* **L9**: We initialize `Metrics` with our service name (`APP`) and metrics namespace (`MyApp`), reducing the need to add the `service` dimension for every metric and setting the namespace later -* **L18, 27**: We use `add_metric` similarly to our custom function, except we now have an enum `MetricCount` to help us understand which Metric Units we have at our disposal -* **L33**: We use `@metrics.log_metrics` decorator to ensure that our metrics are aligned with the EMF output and validated before-hand, like in case we forget to set namespace, or accidentally use a metric unit as a string that doesn't exist in CloudWatch. -* **L33**: We also use `capture_cold_start_metric=True` so we don't have to handle that logic either. Note that [Metrics](../core/metrics.md){target="_blank"} does not publish a warm invocation metric (ColdStart=0) for cost reasons. As such, treat the absence (sparse metric) as a non-cold start invocation. +* **L8**: We initialize `Metrics` with our service name (`APP`) and metrics namespace (`MyApp`), reducing the need to add the `service` dimension for every metric and setting the namespace later +* **L17, 26**: We use `add_metric` similarly to our custom function, except we now have an enum `MetricCount` to help us understand which Metric Units we have at our disposal +* **L32**: We use `@metrics.log_metrics` decorator to ensure that our metrics are aligned with the EMF output and validated before-hand, like in case we forget to set namespace, or accidentally use a metric unit as a string that doesn't exist in CloudWatch. +* **L32**: We also use `capture_cold_start_metric=True` so we don't have to handle that logic either. Note that [Metrics](../core/metrics.md){target="_blank"} does not publish a warm invocation metric (ColdStart=0) for cost reasons. As such, treat the absence (sparse metric) as a non-cold start invocation. Repeat the process of building, deploying, and invoking your application via the API endpoint a few times to generate metrics - [Artillery](https://www.artillery.io/){target="_blank"} and [K6.io](https://k6.io/open-source){target="_blank"} are quick ways to generate some load. Within [CloudWatch Metrics view](https://console.aws.amazon.com/cloudwatch/home#metricsV2:graph=~()){target="_blank}, you should see `MyApp` custom namespace with your custom metrics there and `SuccessfulGreetings` available to graph. diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 14dc80bdb11..69ce45bb2a2 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -20,8 +20,8 @@ If your function fails to process any message from the batch, the entire batch r With this utility, batch records are processed individually – only messages that failed to be processed return to the queue or stream for a further retry. This works when two mechanisms are in place: -1. `ReportBatchItemFailures` is set in your SQS, Kinesis, or DynamoDB event source properties -2. [A specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"} is returned so Lambda knows which records should not be deleted during partial responses +1. `ReportBatchItemFailures` is set in your SQS, Kinesis, or DynamoDB event source properties +2. [A specific response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"} is returned so Lambda knows which records should not be deleted during partial responses ???+ warning "Warning: This utility lowers the chance of processing records more than once; it does not guarantee it" We recommend implementing processing logic in an [idempotent manner](idempotency.md){target="_blank"} wherever possible. @@ -38,257 +38,46 @@ You do not need any additional IAM permissions to use this utility, except for w The remaining sections of the documentation will rely on these samples. For completeness, this demonstrates IAM permissions and Dead Letter Queue where batch records will be sent after 2 retries were attempted. - === "SQS" - ```yaml title="template.yaml" hl_lines="31-32" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: partial batch response sample - - Globals: - Function: - Timeout: 5 - MemorySize: 256 - Runtime: python3.9 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: hello - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Policies: - - SQSPollerPolicy: - QueueName: !GetAtt SampleQueue.QueueName - Events: - Batch: - Type: SQS - Properties: - Queue: !GetAtt SampleQueue.Arn - FunctionResponseTypes: - - ReportBatchItemFailures - - SampleDLQ: - Type: AWS::SQS::Queue - - SampleQueue: - Type: AWS::SQS::Queue - Properties: - VisibilityTimeout: 30 # Fn timeout * 6 - RedrivePolicy: - maxReceiveCount: 2 - deadLetterTargetArn: !GetAtt SampleDLQ.Arn + ```yaml title="template.yaml" hl_lines="30-31" + --8<-- "docs/examples/utilities/batch/sqs_template.yml" ``` === "Kinesis Data Streams" ```yaml title="template.yaml" hl_lines="44-45" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: partial batch response sample - - Globals: - Function: - Timeout: 5 - MemorySize: 256 - Runtime: python3.9 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: hello - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Policies: - # Lambda Destinations require additional permissions - # to send failure records to DLQ from Kinesis/DynamoDB - - Version: "2012-10-17" - Statement: - Effect: "Allow" - Action: - - sqs:GetQueueAttributes - - sqs:GetQueueUrl - - sqs:SendMessage - Resource: !GetAtt SampleDLQ.Arn - Events: - KinesisStream: - Type: Kinesis - Properties: - Stream: !GetAtt SampleStream.Arn - BatchSize: 100 - StartingPosition: LATEST - MaximumRetryAttempts: 2 - DestinationConfig: - OnFailure: - Destination: !GetAtt SampleDLQ.Arn - FunctionResponseTypes: - - ReportBatchItemFailures - - SampleDLQ: - Type: AWS::SQS::Queue - - SampleStream: - Type: AWS::Kinesis::Stream - Properties: - ShardCount: 1 + --8<-- "docs/examples/utilities/batch/kinesis_data_streams_template.yml" ``` === "DynamoDB Streams" ```yaml title="template.yaml" hl_lines="43-44" - AWSTemplateFormatVersion: '2010-09-09' - Transform: AWS::Serverless-2016-10-31 - Description: partial batch response sample - - Globals: - Function: - Timeout: 5 - MemorySize: 256 - Runtime: python3.9 - Tracing: Active - Environment: - Variables: - LOG_LEVEL: INFO - POWERTOOLS_SERVICE_NAME: hello - - Resources: - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Handler: app.lambda_handler - CodeUri: hello_world - Policies: - # Lambda Destinations require additional permissions - # to send failure records from Kinesis/DynamoDB - - Version: "2012-10-17" - Statement: - Effect: "Allow" - Action: - - sqs:GetQueueAttributes - - sqs:GetQueueUrl - - sqs:SendMessage - Resource: !GetAtt SampleDLQ.Arn - Events: - DynamoDBStream: - Type: DynamoDB - Properties: - Stream: !GetAtt SampleTable.StreamArn - StartingPosition: LATEST - MaximumRetryAttempts: 2 - DestinationConfig: - OnFailure: - Destination: !GetAtt SampleDLQ.Arn - FunctionResponseTypes: - - ReportBatchItemFailures - - SampleDLQ: - Type: AWS::SQS::Queue - - SampleTable: - Type: AWS::DynamoDB::Table - Properties: - BillingMode: PAY_PER_REQUEST - AttributeDefinitions: - - AttributeName: pk - AttributeType: S - - AttributeName: sk - AttributeType: S - KeySchema: - - AttributeName: pk - KeyType: HASH - - AttributeName: sk - KeyType: RANGE - SSESpecification: - SSEEnabled: yes - StreamSpecification: - StreamViewType: NEW_AND_OLD_IMAGES - + --8<-- "docs/examples/utilities/batch/dynamodb_streams_template.yml" ``` ### Processing messages from SQS Processing batches from SQS works in four stages: -1. Instantiate **`BatchProcessor`** and choose **`EventType.SQS`** for the event type -2. Define your function to handle each batch record, and use [`SQSRecord`](data_classes.md#sqs){target="_blank"} type annotation for autocompletion -3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing -4. Return the appropriate response contract to Lambda via **`.response()`** processor method +1. Instantiate **`BatchProcessor`** and choose **`EventType.SQS`** for the event type +2. Define your function to handle each batch record, and use [`SQSRecord`](data_classes.md#sqs){target="_blank"} type annotation for autocompletion +3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing +4. Return the appropriate response contract to Lambda via **`.response()`** processor method ???+ info This code example optionally uses Tracer and Logger for completion. === "As a decorator" - ```python hl_lines="4-5 9 15 23 25" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="4-5 8 14 23 25" + --8<-- "docs/examples/utilities/batch/sqs_decorator.py" ``` === "As a context manager" - ```python hl_lines="4-5 9 15 24-26 28" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages = processor.process() # kick off processing, return list[tuple] - - return processor.response() + ```python hl_lines="4-5 8 14 24-26 28" + --8<-- "docs/examples/utilities/batch/sqs_context_manager.py" ``` === "Sample response" @@ -350,73 +139,24 @@ Processing batches from SQS works in four stages: Processing batches from Kinesis works in four stages: -1. Instantiate **`BatchProcessor`** and choose **`EventType.KinesisDataStreams`** for the event type -2. Define your function to handle each batch record, and use [`KinesisStreamRecord`](data_classes.md#kinesis-streams){target="_blank"} type annotation for autocompletion -3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing -4. Return the appropriate response contract to Lambda via **`.response()`** processor method +1. Instantiate **`BatchProcessor`** and choose **`EventType.KinesisDataStreams`** for the event type +2. Define your function to handle each batch record, and use [`KinesisStreamRecord`](data_classes.md#kinesis-streams){target="_blank"} type annotation for autocompletion +3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing +4. Return the appropriate response contract to Lambda via **`.response()`** processor method ???+ info This code example optionally uses Tracer and Logger for completion. === "As a decorator" - ```python hl_lines="4-5 9 15 22 24" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.KinesisDataStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: KinesisStreamRecord): - logger.info(record.kinesis.data_as_text) - payload: dict = record.kinesis.data_as_json() - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="4-5 8 14 22 24" + --8<-- "docs/examples/utilities/batch/kinesis_data_streams_decorator.py" ``` === "As a context manager" - ```python hl_lines="4-5 9 15 23-25 27" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.KinesisDataStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: KinesisStreamRecord): - logger.info(record.kinesis.data_as_text) - payload: dict = record.kinesis.data_as_json() - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages = processor.process() # kick off processing, return list[tuple] - - return processor.response() + ```python hl_lines="4-5 8 14 23-25 27" + --8<-- "docs/examples/utilities/batch/kinesis_data_streams_context_manager.py" ``` === "Sample response" @@ -433,7 +173,6 @@ Processing batches from Kinesis works in four stages: } ``` - === "Sample event" ```json @@ -475,84 +214,28 @@ Processing batches from Kinesis works in four stages: } ``` - ### Processing messages from DynamoDB Processing batches from Kinesis works in four stages: -1. Instantiate **`BatchProcessor`** and choose **`EventType.DynamoDBStreams`** for the event type -2. Define your function to handle each batch record, and use [`DynamoDBRecord`](data_classes.md#dynamodb-streams){target="_blank"} type annotation for autocompletion -3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing -4. Return the appropriate response contract to Lambda via **`.response()`** processor method +1. Instantiate **`BatchProcessor`** and choose **`EventType.DynamoDBStreams`** for the event type +2. Define your function to handle each batch record, and use [`DynamoDBRecord`](data_classes.md#dynamodb-streams){target="_blank"} type annotation for autocompletion +3. Use either **`batch_processor`** decorator or your instantiated processor as a context manager to kick off processing +4. Return the appropriate response contract to Lambda via **`.response()`** processor method ???+ info This code example optionally uses Tracer and Logger for completion. === "As a decorator" - ```python hl_lines="4-5 9 15 25 27" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.DynamoDBStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: DynamoDBRecord): - logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("Message").get_value) - # alternatively: - # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image - # payload = change.get("Message").raw_event -> {"S": ""} - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="4-5 8 14 25 27" + --8<-- "docs/examples/utilities/batch/dynamodb_streams_decorator.py" ``` === "As a context manager" - ```python hl_lines="4-5 9 15 26-28 30" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.DynamoDBStreams) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: DynamoDBRecord): - logger.info(record.dynamodb.new_image) - payload: dict = json.loads(record.dynamodb.new_image.get("item").s_value) - # alternatively: - # changes: Dict[str, dynamo_db_stream_event.AttributeValue] = record.dynamodb.new_image - # payload = change.get("Message").raw_event -> {"S": ""} - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages = processor.process() # kick off processing, return list[tuple] - - return processor.response() + ```python hl_lines="4-5 8 14 26-28 30" + --8<-- "docs/examples/utilities/batch/dynamodb_streams_context_manager.py" ``` === "Sample response" @@ -569,7 +252,6 @@ Processing batches from Kinesis works in four stages: } ``` - === "Sample event" ```json @@ -638,7 +320,6 @@ All records in the batch will be passed to this handler for processing, even if All processing logic will and should be performed by the `record_handler` function. - ## Advanced ### Pydantic integration @@ -647,139 +328,22 @@ You can bring your own Pydantic models via **`model`** parameter when inheriting Inheritance is importance because we need to access message IDs and sequence numbers from these records in the event of failure. Mypy is fully integrated with this utility, so it should identify whether you're passing the incorrect Model. - === "SQS" - ```python hl_lines="5 9-10 12-19 21 27" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.parser.models import SqsRecordModel - from aws_lambda_powertools.utilities.typing import LambdaContext - - - class Order(BaseModel): - item: dict - - class OrderSqsRecord(SqsRecordModel): - body: Order - - # auto transform json string - # so Pydantic can auto-initialize nested Order model - @validator("body", pre=True) - def transform_body_to_dict(cls, value: str): - return json.loads(value) - - processor = BatchProcessor(event_type=EventType.SQS, model=OrderSqsRecord) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: OrderSqsRecord): - return record.body.item - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="6 10-11 14-21 24 30" + --8<-- "docs/examples/utilities/batch/sqs_pydantic_inheritance.py" ``` === "Kinesis Data Streams" - ```python hl_lines="5 9-10 12-20 22-23 26 32" - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.parser.models import KinesisDataStreamRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - class Order(BaseModel): - item: dict - - class OrderKinesisPayloadRecord(KinesisDataStreamRecordPayload): - data: Order - - # auto transform json string - # so Pydantic can auto-initialize nested Order model - @validator("data", pre=True) - def transform_message_to_dict(cls, value: str): - # Powertools KinesisDataStreamRecordModel already decodes b64 to str here - return json.loads(value) - - class OrderKinesisRecord(KinesisDataStreamRecordModel): - kinesis: OrderKinesisPayloadRecord - - - processor = BatchProcessor(event_type=EventType.KinesisDataStreams, model=OrderKinesisRecord) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: OrderKinesisRecord): - return record.kinesis.data.item - - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="8 12-13 16-24 27-28 31 37" + --8<-- "docs/examples/utilities/batch/kinesis_data_streams_pydantic_inheritance.py" ``` === "DynamoDB Streams" - ```python hl_lines="7 11-12 14-21 23-25 27-28 31 37" - import json - - from typing import Dict, Literal - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.parser.models import DynamoDBStreamRecordModel - from aws_lambda_powertools.utilities.typing import LambdaContext - - - class Order(BaseModel): - item: dict - - class OrderDynamoDB(BaseModel): - Message: Order - - # auto transform json string - # so Pydantic can auto-initialize nested Order model - @validator("Message", pre=True) - def transform_message_to_dict(cls, value: Dict[Literal["S"], str]): - return json.loads(value["S"]) - - class OrderDynamoDBChangeRecord(DynamoDBStreamChangedRecordModel): - NewImage: Optional[OrderDynamoDB] - OldImage: Optional[OrderDynamoDB] - - class OrderDynamoDBRecord(DynamoDBStreamRecordModel): - dynamodb: OrderDynamoDBChangeRecord - - - processor = BatchProcessor(event_type=EventType.DynamoDBStreams, model=OrderKinesisRecord) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: OrderDynamoDBRecord): - return record.dynamodb.NewImage.Message.item - - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + ```python hl_lines="7 11-12 15-22 25-27 30-31 34 40" + --8<-- "docs/examples/utilities/batch/dynamodb_streams_pydantic_inheritance.py" ``` ### Accessing processed messages @@ -789,51 +353,10 @@ Use the context manager to access a list of all returned values from your `recor * **When successful**. We will include a tuple with `success`, the result of `record_handler`, and the batch record * **When failed**. We will include a tuple with `fail`, exception as a string, and the batch record - -```python hl_lines="31-38" title="Accessing processed messages via context manager" -import json - -from typing import Any, List, Literal, Union - -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.utilities.batch import (BatchProcessor, - EventType, - FailureResponse, - SuccessResponse, - batch_processor) -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord -from aws_lambda_powertools.utilities.typing import LambdaContext - - -processor = BatchProcessor(event_type=EventType.SQS) -tracer = Tracer() -logger = Logger() - - -@tracer.capture_method -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -def lambda_handler(event, context: LambdaContext): - batch = event["Records"] - with processor(records=batch, handler=record_handler): - processed_messages: List[Union[SuccessResponse, FailureResponse]] = processor.process() - - for message in processed_messages: - status: Union[Literal["success"], Literal["fail"]] = message[0] - result: Any = message[1] - record: SQSRecord = message[2] - - - return processor.response() +```python hl_lines="5 26-27 29-32" title="Accessing processed messages via context manager" +--8<-- "docs/examples/utilities/batch/sqs_processed_messages_context_manager.py" ``` - ### Extending BatchProcessor You might want to bring custom logic to the existing `BatchProcessor` to slightly override how we handle successes and failures. @@ -847,35 +370,7 @@ For these scenarios, you can subclass `BatchProcessor` and quickly override `suc Let's suppose you'd like to add a metric named `BatchRecordFailures` for each batch record that failed processing ```python title="Extending failure handling mechanism in BatchProcessor" - -from typing import Tuple - -from aws_lambda_powertools import Metrics -from aws_lambda_powertools.metrics import MetricUnit -from aws_lambda_powertools.utilities.batch import batch_processor, BatchProcessor, ExceptionInfo, EventType, FailureResponse -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - - -class MyProcessor(BatchProcessor): - def failure_handler(self, record: SQSRecord, exception: ExceptionInfo) -> FailureResponse: - metrics.add_metric(name="BatchRecordFailures", unit=MetricUnit.Count, value=1) - return super().failure_handler(record, exception) - -processor = MyProcessor(event_type=EventType.SQS) -metrics = Metrics(namespace="test") - - -@tracer.capture_method -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - -@metrics.log_metrics(capture_cold_start_metric=True) -@batch_processor(record_handler=record_handler, processor=processor) -def lambda_handler(event, context: LambdaContext): - return processor.response() +--8<-- "docs/examples/utilities/batch/sqs_batch_processor_extension.py" ``` ### Create your own partial processor @@ -888,66 +383,8 @@ You can create your own partial batch processor from scratch by inheriting the ` You can then use this class as a context manager, or pass it to `batch_processor` to use as a decorator on your Lambda handler function. -```python hl_lines="3 9 24 30 37 57" title="Creating a custom batch processor" -from random import randint - -from aws_lambda_powertools.utilities.batch import BasePartialProcessor, batch_processor -import boto3 -import os - -table_name = os.getenv("TABLE_NAME", "table_not_found") - -class MyPartialProcessor(BasePartialProcessor): - """ - Process a record and stores successful results at a Amazon DynamoDB Table - - Parameters - ---------- - table_name: str - DynamoDB table name to write results to - """ - - def __init__(self, table_name: str): - self.table_name = table_name - - super().__init__() - - def _prepare(self): - # It's called once, *before* processing - # Creates table resource and clean previous results - self.ddb_table = boto3.resource("dynamodb").Table(self.table_name) - self.success_messages.clear() - - def _clean(self): - # It's called once, *after* closing processing all records (closing the context manager) - # Here we're sending, at once, all successful messages to a ddb table - with self.ddb_table.batch_writer() as batch: - for result in self.success_messages: - batch.put_item(Item=result) - - def _process_record(self, record): - # It handles how your record is processed - # Here we're keeping the status of each run - # where self.handler is the record_handler function passed as an argument - try: - result = self.handler(record) # record_handler passed to decorator/context manager - return self.success_handler(record, result) - except Exception as exc: - return self.failure_handler(record, exc) - - def success_handler(self, record): - entry = ("success", result, record) - message = {"age": result} - self.success_messages.append(message) - return entry - - -def record_handler(record): - return randint(0, 100) - -@batch_processor(record_handler=record_handler, processor=MyPartialProcessor(table_name)) -def lambda_handler(event, context): - return {"statusCode": 200} +```python hl_lines="6 11 26 32 39 60" title="Creating a custom batch processor" +--8<-- "docs/examples/utilities/batch/custom_batch_processor.py" ``` ### Caveats @@ -958,34 +395,8 @@ When using Tracer to capture responses for each batch record processing, you mig If that's the case, you can configure [Tracer to disable response auto-capturing](../core/tracer.md#disabling-response-auto-capture){target="_blank"}. - -```python hl_lines="14" title="Disabling Tracer response auto-capturing" -import json - -from aws_lambda_powertools import Logger, Tracer -from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord -from aws_lambda_powertools.utilities.typing import LambdaContext - - -processor = BatchProcessor(event_type=EventType.SQS) -tracer = Tracer() -logger = Logger() - - -@tracer.capture_method(capture_response=False) -def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -@batch_processor(record_handler=record_handler, processor=processor) -def lambda_handler(event, context: LambdaContext): - return processor.response() - +```python hl_lines="13" title="Disabling Tracer response auto-capturing" +--8<-- "docs/examples/utilities/batch/caveats_tracer_response_auto_capture.py" ``` ## Testing your code @@ -999,87 +410,13 @@ Given a SQS batch where the first batch record succeeds and the second fails pro === "test_app.py" ```python - import json - - from pathlib import Path - from dataclasses import dataclass - - import pytest - from src.app import lambda_handler, processor - - - def load_event(path: Path): - with path.open() as f: - return json.load(f) - - - @pytest.fixture - def lambda_context(): - @dataclass - class LambdaContext: - function_name: str = "test" - memory_limit_in_mb: int = 128 - invoked_function_arn: str = "arn:aws:lambda:eu-west-1:809313241:function:test" - aws_request_id: str = "52fdfc07-2182-154f-163f-5f0f9a621d72" - - return LambdaContext() - - @pytest.fixture() - def sqs_event(): - """Generates API GW Event""" - return load_event(path=Path("events/sqs_event.json")) - - - def test_app_batch_partial_response(sqs_event, lambda_context): - # GIVEN - processor = app.processor # access processor for additional assertions - successful_record = sqs_event["Records"][0] - failed_record = sqs_event["Records"][1] - expected_response = { - "batchItemFailures: [ - { - "itemIdentifier": failed_record["messageId"] - } - ] - } - - # WHEN - ret = app.lambda_handler(sqs_event, lambda_context) - - # THEN - assert ret == expected_response - assert len(processor.fail_messages) == 1 - assert processor.success_messages[0] == successful_record + --8<-- "docs/examples/utilities/batch/testing_test_app.py" ``` === "src/app.py" ```python - import json - - from aws_lambda_powertools import Logger, Tracer - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.typing import LambdaContext - - - processor = BatchProcessor(event_type=EventType.SQS) - tracer = Tracer() - logger = Logger() - - - @tracer.capture_method - def record_handler(record: SQSRecord): - payload: str = record.body - if payload: - item: dict = json.loads(payload) - ... - - @logger.inject_lambda_context - @tracer.capture_lambda_handler - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context: LambdaContext): - return processor.response() + --8<-- "docs/examples/utilities/batch/testing_src_app.py" ``` === "Sample SQS event" @@ -1123,8 +460,6 @@ Given a SQS batch where the first batch record succeeds and the second fails pro } ``` - - ## FAQ ### Choosing between decorator and context manager @@ -1137,26 +472,15 @@ When using Sentry.io for error monitoring, you can override `failure_handler` to > Credits to [Charles-Axel Dein](https://github.com/awslabs/aws-lambda-powertools-python/issues/293#issuecomment-781961732) -```python hl_lines="4 7-8" title="Integrating error tracking with Sentry.io" -from typing import Tuple - -from aws_lambda_powertools.utilities.batch import BatchProcessor, FailureResponse -from sentry_sdk import capture_exception - - -class MyProcessor(BatchProcessor): - def failure_handler(self, record, exception) -> FailureResponse: - capture_exception() # send exception to Sentry - return super().failure_handler(record, exception) +```python hl_lines="3 8-9" title="Integrating error tracking with Sentry.io" +--8<-- "docs/examples/utilities/batch/sentry_integration.py" ``` - ## Legacy ???+ tip This is kept for historical purposes. Use the new [BatchProcessor](#processing-messages-from-sqs) instead. - ### Migration guide ???+ info @@ -1175,82 +499,28 @@ You can migrate in three steps: 2. If you were using **`PartialSQSProcessor`** you can now use **`BatchProcessor`** 3. Change your Lambda Handler to return the new response format - === "Decorator: Before" - ```python hl_lines="1 6" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - - def record_handler(record): - return do_something_with(record["body"]) - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return {"statusCode": 200} + ```python hl_lines="1 8" + --8<-- "docs/examples/utilities/batch/migration_decorator_before.py" ``` === "Decorator: After" - ```python hl_lines="3 5 11" - import json - - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - - processor = BatchProcessor(event_type=EventType.SQS) - - - def record_handler(record): - return do_something_with(record["body"]) - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - return processor.response() + ```python hl_lines="3 5 12" + --8<-- "docs/examples/utilities/batch/migration_decorator_after.py" ``` - === "Context manager: Before" - ```python hl_lines="1-2 4 14 19" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(config=config) - - with processor(records, record_handler): - result = processor.process() - - return result + ```python hl_lines="1 3 5 16 21" + --8<-- "docs/examples/utilities/batch/migration_context_manager_before.py" ``` === "Context manager: After" - ```python hl_lines="1 11" - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - - - def record_handler(record): - return_value = do_something_with(record["body"]) - return return_value - - def lambda_handler(event, context): - records = event["Records"] - - processor = BatchProcessor(event_type=EventType.SQS) - - with processor(records, record_handler): - result = processor.process() - - return processor.response() + ```python hl_lines="1 12" + --8<-- "docs/examples/utilities/batch/migration_context_manager_after.py" ``` ### Customizing boto configuration @@ -1263,94 +533,28 @@ decorator or `PartialSQSProcessor` class. === "Decorator" - ```python hl_lines="4 12" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - @sqs_batch_processor(record_handler=record_handler, config=config) - def lambda_handler(event, context): - return {"statusCode": 200} + ```python hl_lines="5 15" + --8<-- "docs/examples/utilities/batch/custom_config_decorator.py" ``` === "Context manager" - ```python hl_lines="4 16" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(config=config) - - with processor(records, record_handler): - result = processor.process() - - return result + ```python hl_lines="5 18" + --8<-- "docs/examples/utilities/batch/custom_config_context_manager.py" ``` > Custom boto3 session example === "Decorator" - ```python hl_lines="4 12" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - from botocore.config import Config - - session = boto3.session.Session() - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - @sqs_batch_processor(record_handler=record_handler, boto3_session=session) - def lambda_handler(event, context): - return {"statusCode": 200} + ```python hl_lines="5 15" + --8<-- "docs/examples/utilities/batch/custom_boto3_session_decorator.py" ``` === "Context manager" - ```python hl_lines="4 16" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - import boto3 - - session = boto3.session.Session() - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(boto3_session=session) - - with processor(records, record_handler): - result = processor.process() - - return result + ```python hl_lines="5 18" + --8<-- "docs/examples/utilities/batch/custom_boto3_session_context_manager.py" ``` ### Suppressing exceptions @@ -1359,21 +563,12 @@ If you want to disable the default behavior where `SQSBatchProcessingError` is r === "Decorator" - ```python hl_lines="3" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - - @sqs_batch_processor(record_handler=record_handler, config=config, suppress_exception=True) - def lambda_handler(event, context): - return {"statusCode": 200} + ```python hl_lines="6" + --8<-- "docs/examples/utilities/batch/suppress_exception_decorator_sqs_batch_processor.py" ``` === "Context manager" - ```python hl_lines="3" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - - processor = PartialSQSProcessor(config=config, suppress_exception=True) - - with processor(records, record_handler): - result = processor.process() + ```python hl_lines="5" + --8<-- "docs/examples/utilities/batch/suppress_exception_partial_sqs_processor.py" ``` diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index 8353d904bb1..c317c149342 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -29,27 +29,18 @@ For example, if your Lambda function is being triggered by an API Gateway proxy === "app.py" - ```python hl_lines="1 4" - from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent - - def lambda_handler(event: dict, context): - event = APIGatewayProxyEvent(event) - if 'helloworld' in event.path and event.http_method == 'GET': - do_something_with(event.body, user) + ```python hl_lines="1 5" + --8<-- "docs/examples/utilities/data_classes/using_data_classes.py" ``` Same example as above, but using the `event_source` decorator === "app.py" - ```python hl_lines="1 3" - from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent - - @event_source(data_class=APIGatewayProxyEvent) - def lambda_handler(event: APIGatewayProxyEvent, context): - if 'helloworld' in event.path and event.http_method == 'GET': - do_something_with(event.body, user) + ```python hl_lines="1 4" + --8<-- "docs/examples/utilities/data_classes/using_data_classes_event_source.py" ``` + **Autocomplete with self-documented properties and methods** ![Utilities Data Classes](../media/utilities_data_classes.png) @@ -92,21 +83,8 @@ for more details. === "app.py" - ```python hl_lines="4-5 9-10" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.active_mq_event import ActiveMQEvent - - logger = Logger() - - @event_source(data_class=ActiveMQEvent) - def lambda_handler(event: ActiveMQEvent, context): - for message in event.messages: - logger.debug(f"MessageID: {message.message_id}") - data: Dict = message.json_data - logger.debug("Process json in base64 encoded data str", data) + ```python hl_lines="4-5 10-11" + --8<-- "docs/examples/utilities/data_classes/app_active_mq.py" ``` ### API Gateway Authorizer @@ -123,86 +101,13 @@ Use **`APIGatewayAuthorizerRequestEvent`** for type `REQUEST` and **`APIGatewayA When the user is found, it includes the user details in the request context that will be available to the back-end, and returns a full access policy for admin users. - ```python hl_lines="2-6 29 36-42 47 49" - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( - DENY_ALL_RESPONSE, - APIGatewayAuthorizerRequestEvent, - APIGatewayAuthorizerResponse, - HttpVerb, - ) - from secrets import compare_digest - - - def get_user_by_token(token): - if compare_digest(token, "admin-foo"): - return {"id": 0, "name": "Admin", "isAdmin": True} - elif compare_digest(token, "regular-foo"): - return {"id": 1, "name": "Joe"} - else: - return None - - - @event_source(data_class=APIGatewayAuthorizerRequestEvent) - def handler(event: APIGatewayAuthorizerRequestEvent, context): - user = get_user_by_token(event.get_header_value("Authorization")) - - if user is None: - # No user was found - # to return 401 - `{"message":"Unauthorized"}`, but pollutes lambda error count metrics - # raise Exception("Unauthorized") - # to return 403 - `{"message":"Forbidden"}` - return DENY_ALL_RESPONSE - - # parse the `methodArn` as an `APIGatewayRouteArn` - arn = event.parsed_arn - - # Create the response builder from parts of the `methodArn` - # and set the logged in user id and context - policy = APIGatewayAuthorizerResponse( - principal_id=user["id"], - context=user, - region=arn.region, - aws_account_id=arn.aws_account_id, - api_id=arn.api_id, - stage=arn.stage, - ) - - # Conditional IAM Policy - if user.get("isAdmin", False): - policy.allow_all_routes() - else: - policy.allow_route(HttpVerb.GET, "/user-profile") - - return policy.asdict() + ```python hl_lines="4-9 30 37-44 48 50 52" + --8<-- "docs/examples/utilities/data_classes/app_rest_api_type_request.py" ``` === "app_type_token.py" ```python hl_lines="2-5 12-18 21 23-24" - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( - APIGatewayAuthorizerTokenEvent, - APIGatewayAuthorizerResponse, - ) - - - @event_source(data_class=APIGatewayAuthorizerTokenEvent) - def handler(event: APIGatewayAuthorizerTokenEvent, context): - arn = event.parsed_arn - - policy = APIGatewayAuthorizerResponse( - principal_id="user", - region=arn.region, - aws_account_id=arn.aws_account_id, - api_id=arn.api_id, - stage=arn.stage - ) - - if event.authorization_token == "42": - policy.allow_all_routes() - else: - policy.deny_all_routes() - return policy.asdict() + --8<-- "docs/examples/utilities/data_classes/app_rest_api_type_token.py" ``` ### API Gateway Authorizer V2 @@ -216,31 +121,8 @@ See also [this blog post](https://aws.amazon.com/blogs/compute/introducing-iam-a This example looks up user details via `x-token` header. It uses `APIGatewayAuthorizerResponseV2` to return a deny policy when user is not found or authorized. - ```python hl_lines="2-5 21 24" - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.api_gateway_authorizer_event import ( - APIGatewayAuthorizerEventV2, - APIGatewayAuthorizerResponseV2, - ) - from secrets import compare_digest - - - def get_user_by_token(token): - if compare_digest(token, "Foo"): - return {"name": "Foo"} - return None - - - @event_source(data_class=APIGatewayAuthorizerEventV2) - def handler(event: APIGatewayAuthorizerEventV2, context): - user = get_user_by_token(event.get_header_value("x-token")) - - if user is None: - # No user was found, so we return not authorized - return APIGatewayAuthorizerResponseV2().asdict() - - # Found the user and setting the details in the context - return APIGatewayAuthorizerResponseV2(authorize=True, context=user).asdict() + ```python hl_lines="4-7 21 24" + --8<-- "docs/examples/utilities/data_classes/app_http_api_authorizer.py" ``` ### API Gateway Proxy @@ -250,15 +132,7 @@ It is used for either API Gateway REST API or HTTP API using v1 proxy event. === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEvent - - @event_source(data_class=APIGatewayProxyEvent) - def lambda_handler(event: APIGatewayProxyEvent, context): - if "helloworld" in event.path and event.http_method == "GET": - request_context = event.request_context - identity = request_context.identity - user = identity.user - do_something_with(event.json_body, user) + --8<-- "docs/examples/utilities/data_classes/app_rest_api.py" ``` ### API Gateway Proxy V2 @@ -268,12 +142,7 @@ It is used for HTTP API using v2 proxy event. === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, APIGatewayProxyEventV2 - - @event_source(data_class=APIGatewayProxyEventV2) - def lambda_handler(event: APIGatewayProxyEventV2, context): - if "helloworld" in event.path and event.http_method == "POST": - do_something_with(event.json_body, event.query_string_parameters) + --8<-- "docs/examples/utilities/data_classes/app_http_api.py" ``` ### Application Load Balancer @@ -283,12 +152,7 @@ Is it used for Application load balancer event. === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, ALBEvent - - @event_source(data_class=ALBEvent) - def lambda_handler(event: ALBEvent, context): - if "helloworld" in event.path and event.http_method == "POST": - do_something_with(event.json_body, event.query_string_parameters) + --8<-- "docs/examples/utilities/data_classes/app_alb.py" ``` ### AppSync Authorizer @@ -304,39 +168,7 @@ In this example extract the `requestId` as the `correlation_id` for logging, use === "app.py" ```python - from typing import Dict - - from aws_lambda_powertools.logging import correlation_paths - from aws_lambda_powertools.logging.logger import Logger - from aws_lambda_powertools.utilities.data_classes.appsync_authorizer_event import ( - AppSyncAuthorizerEvent, - AppSyncAuthorizerResponse, - ) - from aws_lambda_powertools.utilities.data_classes.event_source import event_source - - logger = Logger() - - - def get_user_by_token(token: str): - """Look a user by token""" - ... - - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_AUTHORIZER) - @event_source(data_class=AppSyncAuthorizerEvent) - def lambda_handler(event: AppSyncAuthorizerEvent, context) -> Dict: - user = get_user_by_token(event.authorization_token) - - if not user: - # No user found, return not authorized - return AppSyncAuthorizerResponse().asdict() - - return AppSyncAuthorizerResponse( - authorize=True, - resolver_context={"id": user.id}, - # Only allow admins to delete events - deny_fields=None if user.is_admin else ["Mutation.deleteEvent"], - ).asdict() + --8<-- "docs/examples/utilities/data_classes/app_appsync_authorizer.py" ``` ### AppSync Resolver @@ -350,40 +182,8 @@ In this example, we also use the new Logger `correlation_id` and built-in `corre === "app.py" - ```python hl_lines="2-5 12 14 19 21 29-30" - from aws_lambda_powertools.logging import Logger, correlation_paths - from aws_lambda_powertools.utilities.data_classes.appsync_resolver_event import ( - AppSyncResolverEvent, - AppSyncIdentityCognito - ) - - logger = Logger() - - def get_locations(name: str = None, size: int = 0, page: int = 0): - """Your resolver logic here""" - - @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) - def lambda_handler(event, context): - event: AppSyncResolverEvent = AppSyncResolverEvent(event) - - # Case insensitive look up of request headers - x_forwarded_for = event.get_header_value("x-forwarded-for") - - # Support for AppSyncIdentityCognito or AppSyncIdentityIAM identity types - assert isinstance(event.identity, AppSyncIdentityCognito) - identity: AppSyncIdentityCognito = event.identity - - # Logging with correlation_id - logger.debug({ - "x-forwarded-for": x_forwarded_for, - "username": identity.username - }) - - if event.type_name == "Merchant" and event.field_name == "locations": - return get_locations(**event.arguments) - - raise ValueError(f"Unsupported field resolver: {event.field_name}") - + ```python hl_lines="2-5 14 16 21 23 33-34" + --8<-- "docs/examples/utilities/data_classes/app_appsync_resolver.py" ``` === "Example AppSync Event" @@ -446,15 +246,7 @@ decompress and parse json data from the event. === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, CloudWatchLogsEvent - from aws_lambda_powertools.utilities.data_classes.cloud_watch_logs_event import CloudWatchLogsDecodedData - - @event_source(data_class=CloudWatchLogsEvent) - def lambda_handler(event: CloudWatchLogsEvent, context): - decompressed_log: CloudWatchLogsDecodedData = event.parse_logs_data - log_events = decompressed_log.log_events - for event in log_events: - do_something_with(event.timestamp, event.message) + --8<-- "docs/examples/utilities/data_classes/app_cloudwatch_logs.py" ``` ### CodePipeline Job @@ -464,48 +256,7 @@ Data classes and utility functions to help create continuous delivery pipelines === "app.py" ```python - from aws_lambda_powertools import Logger - from aws_lambda_powertools.utilities.data_classes import event_source, CodePipelineJobEvent - - logger = Logger() - - @event_source(data_class=CodePipelineJobEvent) - def lambda_handler(event, context): - """The Lambda function handler - - If a continuing job then checks the CloudFormation stack status - and updates the job accordingly. - - If a new job then kick of an update or creation of the target - CloudFormation stack. - """ - - # Extract the Job ID - job_id = event.get_id - - # Extract the params - params: dict = event.decoded_user_parameters - stack = params["stack"] - artifact_name = params["artifact"] - template_file = params["file"] - - try: - if event.data.continuation_token: - # If we're continuing then the create/update has already been triggered - # we just need to check if it has finished. - check_stack_update_status(job_id, stack) - else: - template = event.get_artifact(artifact_name, template_file) - # Kick off a stack update or create - start_update_or_create(job_id, stack, template) - except Exception as e: - # If any other exceptions which we didn't expect are raised - # then fail the job and log the exception message. - logger.exception("Function failed due to exception.") - put_job_failure(job_id, "Function exception: " + str(e)) - - logger.debug("Function complete.") - return "Complete." + --8<-- "docs/examples/utilities/data_classes/app_codepipeline_job.py" ``` ### Cognito User Pool @@ -531,13 +282,7 @@ Verify Auth Challenge | `data_classes.cognito_user_pool_event.VerifyAuthChalleng === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import PostConfirmationTriggerEvent - - def lambda_handler(event, context): - event: PostConfirmationTriggerEvent = PostConfirmationTriggerEvent(event) - - user_attributes = event.request.user_attributes - do_something_with(user_attributes) + --8<-- "docs/examples/utilities/data_classes/app_cognito_post_confirmation.py" ``` #### Define Auth Challenge Example @@ -550,37 +295,7 @@ This example is based on the AWS Cognito docs for [Define Auth Challenge Lambda === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import DefineAuthChallengeTriggerEvent - - def handler(event: dict, context) -> dict: - event: DefineAuthChallengeTriggerEvent = DefineAuthChallengeTriggerEvent(event) - if ( - len(event.request.session) == 1 - and event.request.session[0].challenge_name == "SRP_A" - ): - event.response.issue_tokens = False - event.response.fail_authentication = False - event.response.challenge_name = "PASSWORD_VERIFIER" - elif ( - len(event.request.session) == 2 - and event.request.session[1].challenge_name == "PASSWORD_VERIFIER" - and event.request.session[1].challenge_result - ): - event.response.issue_tokens = False - event.response.fail_authentication = False - event.response.challenge_name = "CUSTOM_CHALLENGE" - elif ( - len(event.request.session) == 3 - and event.request.session[2].challenge_name == "CUSTOM_CHALLENGE" - and event.request.session[2].challenge_result - ): - event.response.issue_tokens = True - event.response.fail_authentication = False - else: - event.response.issue_tokens = False - event.response.fail_authentication = True - - return event.raw_event + --8<-- "docs/examples/utilities/data_classes/app_cognito_define_auth_challenge.py" ``` === "SPR_A response" @@ -704,16 +419,7 @@ This example is based on the AWS Cognito docs for [Create Auth Challenge Lambda === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import CreateAuthChallengeTriggerEvent - - @event_source(data_class=CreateAuthChallengeTriggerEvent) - def handler(event: CreateAuthChallengeTriggerEvent, context) -> dict: - if event.request.challenge_name == "CUSTOM_CHALLENGE": - event.response.public_challenge_parameters = {"captchaUrl": "url/123.jpg"} - event.response.private_challenge_parameters = {"answer": "5"} - event.response.challenge_metadata = "CAPTCHA_CHALLENGE" - return event.raw_event + --8<-- "docs/examples/utilities/data_classes/app_cognito_create_auth_challenge.py" ``` #### Verify Auth Challenge Response Example @@ -723,15 +429,7 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.cognito_user_pool_event import VerifyAuthChallengeResponseTriggerEvent - - @event_source(data_class=VerifyAuthChallengeResponseTriggerEvent) - def handler(event: VerifyAuthChallengeResponseTriggerEvent, context) -> dict: - event.response.answer_correct = ( - event.request.private_challenge_parameters.get("answer") == event.request.challenge_answer - ) - return event.raw_event + --8<-- "docs/examples/utilities/data_classes/app_cognito_verify_auth_challenge_response.py" ``` ### Connect Contact Flow @@ -741,19 +439,7 @@ This example is based on the AWS Cognito docs for [Verify Auth Challenge Respons === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes.connect_contact_flow_event import ( - ConnectContactFlowChannel, - ConnectContactFlowEndpointType, - ConnectContactFlowEvent, - ConnectContactFlowInitiationMethod, - ) - - def lambda_handler(event, context): - event: ConnectContactFlowEvent = ConnectContactFlowEvent(event) - assert event.contact_data.attributes == {"Language": "en-US"} - assert event.contact_data.channel == ConnectContactFlowChannel.VOICE - assert event.contact_data.customer_endpoint.endpoint_type == ConnectContactFlowEndpointType.TELEPHONE_NUMBER - assert event.contact_data.initiation_method == ConnectContactFlowInitiationMethod.API + --8<-- "docs/examples/utilities/data_classes/app_connect_contact_flow.py" ``` ### DynamoDB Streams @@ -765,40 +451,13 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import ( - DynamoDBStreamEvent, - DynamoDBRecordEventName - ) - - def lambda_handler(event, context): - event: DynamoDBStreamEvent = DynamoDBStreamEvent(event) - - # Multiple records can be delivered in a single event - for record in event.records: - if record.event_name == DynamoDBRecordEventName.MODIFY: - do_something_with(record.dynamodb.new_image) - do_something_with(record.dynamodb.old_image) + --8<-- "docs/examples/utilities/data_classes/app_dynamodb.py" ``` === "multiple_records_types.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, DynamoDBStreamEvent - from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import AttributeValueType, AttributeValue - from aws_lambda_powertools.utilities.typing import LambdaContext - - - @event_source(data_class=DynamoDBStreamEvent) - def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext): - for record in event.records: - key: AttributeValue = record.dynamodb.keys["id"] - if key == AttributeValueType.Number: - # {"N": "123.45"} => "123.45" - assert key.get_value == key.n_value - print(key.get_value) - elif key == AttributeValueType.Map: - assert key.get_value == key.map_value - print(key.get_value) + --8<-- "docs/examples/utilities/data_classes/app_dynamodb_multiple_records_types.py" ``` ### EventBridge @@ -806,12 +465,7 @@ attributes values (`AttributeValue`), as well as enums for stream view type (`St === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent - - @event_source(data_class=EventBridgeEvent) - def lambda_handler(event: EventBridgeEvent, context): - do_something_with(event.detail) - + --8<-- "docs/examples/utilities/data_classes/app_event_bridge.py" ``` ### Kinesis streams @@ -822,19 +476,7 @@ or plain text, depending on the original payload. === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, KinesisStreamEvent - - @event_source(data_class=KinesisStreamEvent) - def lambda_handler(event: KinesisStreamEvent, context): - kinesis_record = next(event.records).kinesis - - # if data was delivered as text - data = kinesis_record.data_as_text() - - # if data was delivered as json - data = kinesis_record.data_as_json() - - do_something_with(data) + --8<-- "docs/examples/utilities/data_classes/app_kinesis_data_streams.py" ``` ### Rabbit MQ @@ -845,23 +487,8 @@ for more details. === "app.py" - ```python hl_lines="4-5 9-10" - from typing import Dict - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.utilities.data_classes import event_source - from aws_lambda_powertools.utilities.data_classes.rabbit_mq_event import RabbitMQEvent - - logger = Logger() - - @event_source(data_class=RabbitMQEvent) - def lambda_handler(event: RabbitMQEvent, context): - for queue_name, messages in event.rmq_messages_by_queue.items(): - logger.debug(f"Messages for queue: {queue_name}") - for message in messages: - logger.debug(f"MessageID: {message.basic_properties.message_id}") - data: Dict = message.json_data - logger.debug("Process json in base64 encoded data str", data) + ```python hl_lines="4-5 10-11" + --8<-- "docs/examples/utilities/data_classes/app_rabbit_mq.py" ``` ### S3 @@ -869,18 +496,7 @@ for more details. === "app.py" ```python - from urllib.parse import unquote_plus - from aws_lambda_powertools.utilities.data_classes import event_source, S3Event - - @event_source(data_class=S3Event) - def lambda_handler(event: S3Event, context): - bucket_name = event.bucket_name - - # Multiple records can be delivered in a single event - for record in event.records: - object_key = unquote_plus(record.s3.get_object.key) - - do_something_with(f"{bucket_name}/{object_key}") + --8<-- "docs/examples/utilities/data_classes/app_s3.py" ``` ### S3 Object Lambda @@ -889,35 +505,8 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" - ```python hl_lines="5-6 12 14" - import boto3 - import requests - - from aws_lambda_powertools import Logger - from aws_lambda_powertools.logging.correlation_paths import S3_OBJECT_LAMBDA - from aws_lambda_powertools.utilities.data_classes.s3_object_event import S3ObjectLambdaEvent - - logger = Logger() - session = boto3.Session() - s3 = session.client("s3") - - @logger.inject_lambda_context(correlation_id_path=S3_OBJECT_LAMBDA, log_event=True) - def lambda_handler(event, context): - event = S3ObjectLambdaEvent(event) - - # Get object from S3 - response = requests.get(event.input_s3_url) - original_object = response.content.decode("utf-8") - - # Make changes to the object about to be returned - transformed_object = original_object.upper() - - # Write object back to S3 Object Lambda - s3.write_get_object_response( - Body=transformed_object, RequestRoute=event.request_route, RequestToken=event.request_token - ) - - return {"status_code": 200} + ```python hl_lines="5-6 13 15" + --8<-- "docs/examples/utilities/data_classes/app_s3_object_lambda.py" ``` ### SES @@ -925,16 +514,7 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, SESEvent - - @event_source(data_class=SESEvent) - def lambda_handler(event: SESEvent, context): - # Multiple records can be delivered in a single event - for record in event.records: - mail = record.ses.mail - common_headers = mail.common_headers - - do_something_with(common_headers.to, common_headers.subject) + --8<-- "docs/examples/utilities/data_classes/app_ses.py" ``` ### SNS @@ -942,16 +522,7 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, SNSEvent - - @event_source(data_class=SNSEvent) - def lambda_handler(event: SNSEvent, context): - # Multiple records can be delivered in a single event - for record in event.records: - message = record.sns.message - subject = record.sns.subject - - do_something_with(subject, message) + --8<-- "docs/examples/utilities/data_classes/app_sns.py" ``` ### SQS @@ -959,11 +530,5 @@ This example is based on the AWS Blog post [Introducing Amazon S3 Object Lambda === "app.py" ```python - from aws_lambda_powertools.utilities.data_classes import event_source, SQSEvent - - @event_source(data_class=SQSEvent) - def lambda_handler(event: SQSEvent, context): - # Multiple records can be delivered in a single event - for record in event.records: - do_something_with(record.body) + --8<-- "docs/examples/utilities/data_classes/app_sqs.py" ``` diff --git a/docs/utilities/feature_flags.md b/docs/utilities/feature_flags.md index 95efc5d051c..69ecad6f8a1 100644 --- a/docs/utilities/feature_flags.md +++ b/docs/utilities/feature_flags.md @@ -49,130 +49,13 @@ The following sample infrastructure will be used throughout this documentation: === "template.yaml" ```yaml hl_lines="5 11 18 25 31-50 54" - AWSTemplateFormatVersion: "2010-09-09" - Description: Lambda Powertools Feature flags sample template - Resources: - FeatureStoreApp: - Type: AWS::AppConfig::Application - Properties: - Description: "AppConfig Application for feature toggles" - Name: product-catalogue - - FeatureStoreDevEnv: - Type: AWS::AppConfig::Environment - Properties: - ApplicationId: !Ref FeatureStoreApp - Description: "Development Environment for the App Config Store" - Name: dev - - FeatureStoreConfigProfile: - Type: AWS::AppConfig::ConfigurationProfile - Properties: - ApplicationId: !Ref FeatureStoreApp - Name: features - LocationUri: "hosted" - - HostedConfigVersion: - Type: AWS::AppConfig::HostedConfigurationVersion - Properties: - ApplicationId: !Ref FeatureStoreApp - ConfigurationProfileId: !Ref FeatureStoreConfigProfile - Description: 'A sample hosted configuration version' - Content: | - { - "premium_features": { - "default": false, - "rules": { - "customer tier equals premium": { - "when_match": true, - "conditions": [ - { - "action": "EQUALS", - "key": "tier", - "value": "premium" - } - ] - } - } - }, - "ten_percent_off_campaign": { - "default": false - } - } - ContentType: 'application/json' - - ConfigDeployment: - Type: AWS::AppConfig::Deployment - Properties: - ApplicationId: !Ref FeatureStoreApp - ConfigurationProfileId: !Ref FeatureStoreConfigProfile - ConfigurationVersion: !Ref HostedConfigVersion - DeploymentStrategyId: "AppConfig.AllAtOnce" - EnvironmentId: !Ref FeatureStoreDevEnv + --8<-- "docs/examples/utilities/feature_flags/template.yml" ``` === "CDK" ```python hl_lines="11-22 24 29 35 42 50" - import json - - import aws_cdk.aws_appconfig as appconfig - from aws_cdk import core - - - class SampleFeatureFlagStore(core.Construct): - def __init__(self, scope: core.Construct, id_: str) -> None: - super().__init__(scope, id_) - - features_config = { - "premium_features": { - "default": False, - "rules": { - "customer tier equals premium": { - "when_match": True, - "conditions": [{"action": "EQUALS", "key": "tier", "value": "premium"}], - } - }, - }, - "ten_percent_off_campaign": {"default": True}, - } - - self.config_app = appconfig.CfnApplication( - self, - id="app", - name="product-catalogue", - ) - self.config_env = appconfig.CfnEnvironment( - self, - id="env", - application_id=self.config_app.ref, - name="dev-env", - ) - self.config_profile = appconfig.CfnConfigurationProfile( - self, - id="profile", - application_id=self.config_app.ref, - location_uri="hosted", - name="features", - ) - self.hosted_cfg_version = appconfig.CfnHostedConfigurationVersion( - self, - "version", - application_id=self.config_app.ref, - configuration_profile_id=self.config_profile.ref, - content=json.dumps(features_config), - content_type="application/json", - ) - self.app_config_deployment = appconfig.CfnDeployment( - self, - id="deploy", - application_id=self.config_app.ref, - configuration_profile_id=self.config_profile.ref, - configuration_version=self.hosted_cfg_version.ref, - deployment_strategy_id="AppConfig.AllAtOnce", - environment_id=self.config_env.ref, - ) - + --8<-- "docs/examples/utilities/feature_flags/cdk_app.py" ``` ### Evaluating a single feature flag @@ -186,28 +69,8 @@ The `evaluate` method supports two optional parameters: === "app.py" - ```python hl_lines="3 9 13 17-19" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - def lambda_handler(event, context): - # Get customer's tier from incoming request - ctx = { "tier": event.get("tier", "standard") } - - # Evaluate whether customer's tier has access to premium features - # based on `has_premium_features` rules - has_premium_features: bool = feature_flags.evaluate(name="premium_features", - context=ctx, default=False) - if has_premium_features: - # enable premium features - ... + ```python hl_lines="3 9 14 18-23" + --8<-- "docs/examples/utilities/feature_flags/single_feature_flag.py" ``` === "event.json" @@ -252,24 +115,8 @@ In this case, we could omit the `context` parameter and simply evaluate whether === "app.py" - ```python hl_lines="12-13" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - def lambda_handler(event, context): - apply_discount: bool = feature_flags.evaluate(name="ten_percent_off_campaign", - default=False) - - if apply_discount: - # apply 10% discount to product - ... + ```python hl_lines="13-16" + --8<-- "docs/examples/utilities/feature_flags/static_flag.py" ``` === "features.json" @@ -290,40 +137,8 @@ You can use `get_enabled_features` method for scenarios where you need a list of === "app.py" - ```python hl_lines="17-20 23" - from aws_lambda_powertools.event_handler import APIGatewayRestResolver - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app = APIGatewayRestResolver() - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - @app.get("/products") - def list_products(): - ctx = { - **app.current_event.headers, - **app.current_event.json_body - } - - # all_features is evaluated to ["geo_customer_campaign", "ten_percent_off_campaign"] - all_features: list[str] = feature_flags.get_enabled_features(context=ctx) - - if "geo_customer_campaign" in all_features: - # apply discounts based on geo - ... - - if "ten_percent_off_campaign" in all_features: - # apply additional 10% for all customers - ... - - def lambda_handler(event, context): - return app.resolve(event, context) + ```python hl_lines="16-19 22" + --8<-- "docs/examples/utilities/feature_flags/get_enabled_features.py" ``` === "event.json" @@ -387,30 +202,10 @@ You can use `get_enabled_features` method for scenarios where you need a list of Feature flags can return any JSON values when `boolean_type` parameter is set to `false`. These can be dictionaries, list, string, integers, etc. - === "app.py" - ```python hl_lines="3 9 13 16 18" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features" - ) - - feature_flags = FeatureFlags(store=app_config) - - def lambda_handler(event, context): - # Get customer's tier from incoming request - ctx = { "tier": event.get("tier", "standard") } - - # Evaluate `has_premium_features` base don customer's tier - premium_features: list[str] = feature_flags.evaluate(name="premium_features", - context=ctx, default=False) - for feature in premium_features: - # enable premium features - ... + ```python hl_lines="3 9 14 17 22" + --8<-- "docs/examples/utilities/feature_flags/non_boolean_flag.py" ``` === "event.json" @@ -456,14 +251,7 @@ You can override `max_age` parameter when instantiating the store. === "app.py" ```python hl_lines="7" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="features", - max_age=300 - ) + --8<-- "docs/examples/utilities/feature_flags/cache_config.py" ``` ### Getting fetched configuration @@ -478,18 +266,7 @@ You can access the configuration fetched from the store via `get_raw_configurati === "app.py" ```python hl_lines="12" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="configuration", - envelope = "feature_flags" - ) - - feature_flags = FeatureFlags(store=app_config) - - config = app_config.get_raw_configuration + --8<-- "docs/examples/utilities/feature_flags/get_raw_configuration.py" ``` ### Schema @@ -614,14 +391,7 @@ For this to work, you need to use a JMESPath expression via the `envelope` param === "app.py" ```python hl_lines="7" - from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore - - app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="configuration", - envelope = "feature_flags" - ) + --8<-- "docs/examples/utilities/feature_flags/envelope.py" ``` === "configuration.json" @@ -655,7 +425,6 @@ For this to work, you need to use a JMESPath expression via the `envelope` param } ``` - ### Built-in store provider ???+ info @@ -678,36 +447,8 @@ Parameter | Default | Description **jmespath_options** | `None` | For advanced use cases when you want to bring your own [JMESPath functions](https://github.com/jmespath/jmespath.py#custom-functions){target="_blank"} **logger** | `logging.Logger` | Logger to use for debug. You can optionally supply an instance of Powertools Logger. - -```python hl_lines="21-27" title="AppConfigStore sample" -from botocore.config import Config - -import jmespath - -from aws_lambda_powertools.utilities.feature_flags import AppConfigStore - -boto_config = Config(read_timeout=10, retries={"total_max_attempts": 2}) - -# Custom JMESPath functions -class CustomFunctions(jmespath.functions.Functions): - - @jmespath.functions.signature({'types': ['string']}) - def _func_special_decoder(self, s): - return my_custom_decoder_logic(s) - - -custom_jmespath_options = {"custom_functions": CustomFunctions()} - - -app_config = AppConfigStore( - environment="dev", - application="product-catalogue", - name="configuration", - max_age=120, - envelope = "features", - sdk_config=boto_config, - jmespath_options=custom_jmespath_options -) +```python hl_lines="18-24" title="AppConfigStore sample" +--8<-- "docs/examples/utilities/feature_flags/app_config.py" ``` ## Testing your code @@ -720,54 +461,7 @@ You can unit test your feature flags locally and independently without setting u This excerpt relies on `pytest` and `pytest-mock` dependencies. ```python hl_lines="7-9" title="Unit testing feature flags" -from aws_lambda_powertools.utilities.feature_flags import FeatureFlags, AppConfigStore, RuleAction - - -def init_feature_flags(mocker, mock_schema, envelope="") -> FeatureFlags: - """Mock AppConfig Store get_configuration method to use mock schema instead""" - - method_to_mock = "aws_lambda_powertools.utilities.feature_flags.AppConfigStore.get_configuration" - mocked_get_conf = mocker.patch(method_to_mock) - mocked_get_conf.return_value = mock_schema - - app_conf_store = AppConfigStore( - environment="test_env", - application="test_app", - name="test_conf_name", - envelope=envelope, - ) - - return FeatureFlags(store=app_conf_store) - - -def test_flags_condition_match(mocker): - # GIVEN - expected_value = True - mocked_app_config_schema = { - "my_feature": { - "default": expected_value, - "rules": { - "tenant id equals 12345": { - "when_match": True, - "conditions": [ - { - "action": RuleAction.EQUALS.value, - "key": "tenant_id", - "value": "12345", - } - ], - } - }, - } - } - - # WHEN - ctx = {"tenant_id": "12345", "username": "a"} - feature_flags = init_feature_flags(mocker=mocker, mock_schema=mocked_app_config_schema) - flag = feature_flags.evaluate(name="my_feature", context=ctx, default=False) - - # THEN - assert flag == expected_value +--8<-- "docs/examples/utilities/feature_flags/unit_test.py" ``` ## Feature flags vs Parameters vs env vars @@ -778,7 +472,6 @@ Method | When to use | Requires new deployment on changes | Supported services **[Parameters utility](parameters.md)** | Access to secrets, or fetch parameters in different formats from AWS System Manager Parameter Store or Amazon DynamoDB. | No | Parameter Store, DynamoDB, Secrets Manager, AppConfig **Feature flags utility** | Rule engine to define when one or multiple features should be enabled depending on the input. | No | AppConfig - ## Deprecation list when GA Breaking change | Recommendation diff --git a/docs/utilities/idempotency.md b/docs/utilities/idempotency.md index 4b03b66abd4..aa57c51edff 100644 --- a/docs/utilities/idempotency.md +++ b/docs/utilities/idempotency.md @@ -43,30 +43,8 @@ TTL attribute name | `expiration` | This can only be configured after your table ???+ tip "Tip: You can share a single state table for all functions" You can reuse the same DynamoDB table to store idempotency state. We add your `function_name` in addition to the idempotency key as a hash key. -```yaml hl_lines="5-13 21-23" title="AWS Serverless Application Model (SAM) example" -Resources: - IdempotencyTable: - Type: AWS::DynamoDB::Table - Properties: - AttributeDefinitions: - - AttributeName: id - AttributeType: S - KeySchema: - - AttributeName: id - KeyType: HASH - TimeToLiveSpecification: - AttributeName: expiration - Enabled: true - BillingMode: PAY_PER_REQUEST - - HelloWorldFunction: - Type: AWS::Serverless::Function - Properties: - Runtime: python3.8 - ... - Policies: - - DynamoDBCrudPolicy: - TableName: !Ref IdempotencyTable +```yaml hl_lines="7-15 24-26" title="AWS Serverless Application Model (SAM) example" +--8<-- "docs/examples/utilities/idempotency/template.yml" ``` ???+ warning "Warning: Large responses with DynamoDB persistence layer" @@ -86,25 +64,8 @@ You can quickly start by initializing the `DynamoDBPersistenceLayer` class and u === "app.py" - ```python hl_lines="1-3 5 7 14" - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - payment = create_subscription_payment( - user=event['user'], - product=event['product_id'] - ) - ... - return { - "payment_id": payment.id, - "message": "success", - "statusCode": 200, - } + ```python hl_lines="1 3 6 10" + --8<-- "docs/examples/utilities/idempotency/idempotent_decorator.py" ``` === "Example event" @@ -116,7 +77,7 @@ You can quickly start by initializing the `DynamoDBPersistenceLayer` class and u } ``` -### Idempotent_function decorator +### Idempotent function decorator Similar to [idempotent decorator](#idempotent-decorator), you can use `idempotent_function` decorator for any synchronous Python function. @@ -131,37 +92,8 @@ When using `idempotent_function`, you must tell us which keyword parameter in yo This example also demonstrates how you can integrate with [Batch utility](batch.md), so you can process each record in an idempotent manner. - ```python hl_lines="4-5 16 21 29" - from aws_lambda_powertools.utilities.batch import (BatchProcessor, EventType, - batch_processor) - from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function) - - - processor = BatchProcessor(event_type=EventType.SQS) - dynamodb = DynamoDBPersistenceLayer(table_name="idem") - config = IdempotencyConfig( - event_key_jmespath="messageId", # see Choosing a payload subset section - use_local_cache=True, - ) - - - @idempotent_function(data_keyword_argument="record", config=config, persistence_store=dynamodb) - def record_handler(record: SQSRecord): - return {"message": record["body"]} - - - @idempotent_function(data_keyword_argument="data", config=config, persistence_store=dynamodb) - def dummy(arg_one, arg_two, data: dict, **kwargs): - return {"data": data} - - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - # `data` parameter must be called as a keyword argument to work - dummy("hello", "universe", data="test") - return processor.response() + ```python hl_lines="3 13 18 26" + --8<-- "docs/examples/utilities/idempotency/batch_sample.py" ``` === "Batch event" @@ -197,75 +129,14 @@ When using `idempotent_function`, you must tell us which keyword parameter in yo === "dataclass_sample.py" - ```python hl_lines="3-4 23 32" - from dataclasses import dataclass - - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function) - - dynamodb = DynamoDBPersistenceLayer(table_name="idem") - config = IdempotencyConfig( - event_key_jmespath="order_id", # see Choosing a payload subset section - use_local_cache=True, - ) - - @dataclass - class OrderItem: - sku: str - description: str - - @dataclass - class Order: - item: OrderItem - order_id: int - - - @idempotent_function(data_keyword_argument="order", config=config, persistence_store=dynamodb) - def process_order(order: Order): - return f"processed order {order.order_id}" - - - order_item = OrderItem(sku="fake", description="sample") - order = Order(item=order_item, order_id="fake-id") - - # `order` parameter must be called as a keyword argument to work - process_order(order=order) + ```python hl_lines="3 24 33" + --8<-- "docs/examples/utilities/idempotency/dataclass_sample.py" ``` === "parser_pydantic_sample.py" - ```python hl_lines="1-2 22 31" - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, IdempotencyConfig, idempotent_function) - from aws_lambda_powertools.utilities.parser import BaseModel - - dynamodb = DynamoDBPersistenceLayer(table_name="idem") - config = IdempotencyConfig( - event_key_jmespath="order_id", # see Choosing a payload subset section - use_local_cache=True, - ) - - - class OrderItem(BaseModel): - sku: str - description: str - - - class Order(BaseModel): - item: OrderItem - order_id: int - - - @idempotent_function(data_keyword_argument="order", config=config, persistence_store=dynamodb) - def process_order(order: Order): - return f"processed order {order.order_id}" - - - order_item = OrderItem(sku="fake", description="sample") - order = Order(item=order_item, order_id="fake-id") - - # `order` parameter must be called as a keyword argument to work - process_order(order=order) + ```python hl_lines="1 21 30" + --8<-- "docs/examples/utilities/idempotency/parser_pydantic_sample.py" ``` ### Choosing a payload subset for idempotency @@ -288,31 +159,8 @@ Imagine the function executes successfully, but the client never receives the re === "payment.py" - ```python hl_lines="2-4 10 12 15 20" - import json - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - # Treat everything under the "body" key - # in the event json object as our payload - config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - body = json.loads(event['body']) - payment = create_subscription_payment( - user=body['user'], - product=body['product_id'] - ) - ... - return { - "payment_id": payment.id, - "message": "success", - "statusCode": 200 - } + ```python hl_lines="3 9 12 15 20" + --8<-- "docs/examples/utilities/idempotency/payment.py" ``` === "Example event" @@ -350,7 +198,6 @@ Imagine the function executes successfully, but the client never receives the re } ``` - ### Idempotency request flow This sequence diagram shows an example flow of what happens in the payment scenario: @@ -367,30 +214,14 @@ The client was successful in receiving the result after the retry. Since the Lam If you are using the `idempotent` decorator on your Lambda handler, any unhandled exceptions that are raised during the code execution will cause **the record in the persistence layer to be deleted**. This means that new invocations will execute your code again despite having the same payload. If you don't want the record to be deleted, you need to catch exceptions within the idempotent function and return a successful response. - ![Idempotent sequence exception](../media/idempotent_sequence_exception.png) If you are using `idempotent_function`, any unhandled exceptions that are raised _inside_ the decorated function will cause the record in the persistence layer to be deleted, and allow the function to be executed again if retried. If an Exception is raised _outside_ the scope of the decorated function and after your function has been called, the persistent record will not be affected. In this case, idempotency will be maintained for your decorated function. Example: -```python hl_lines="2-4 8-10" title="Exception not affecting idempotency record sample" -def lambda_handler(event, context): - # If an exception is raised here, no idempotent record will ever get created as the - # idempotent function does not get called - do_some_stuff() - - result = call_external_service(data={"user": "user1", "id": 5}) - - # This exception will not cause the idempotent record to be deleted, since it - # happens after the decorated function has been successfully called - raise Exception - - -@idempotent_function(data_keyword_argument="data", config=config, persistence_store=dynamodb) -def call_external_service(data: dict, **kwargs): - result = requests.post('http://example.com', json={"user": data['user'], "transaction_id": data['id']} - return result.json() +```python hl_lines="10-12 16-18" title="Exception not affecting idempotency record sample" +--8<-- "docs/examples/utilities/idempotency/idempotency_exception_sample.py" ``` ???+ warning @@ -405,16 +236,7 @@ def call_external_service(data: dict, **kwargs): This persistence layer is built-in, and you can either use an existing DynamoDB table or create a new one dedicated for idempotency state (recommended). ```python hl_lines="5-9" title="Customizing DynamoDBPersistenceLayer to suit your table structure" -from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer - -persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - key_attr="idempotency_key", - expiry_attr="expires_at", - status_attr="current_status", - data_attr="result_data", - validation_key_attr="validation_key", -) +--8<-- "docs/examples/utilities/idempotency/dynamodb_persistence_layer_customization.py" ``` When using DynamoDB as a persistence layer, you can alter the attribute names by passing these parameters when initializing the persistence layer: @@ -464,20 +286,8 @@ This is a locking mechanism for correctness. Since we don't know the result from You can enable in-memory caching with the **`use_local_cache`** parameter: -```python hl_lines="8 11" title="Caching idempotent transactions in-memory to prevent multiple calls to storage" -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") -config = IdempotencyConfig( - event_key_jmespath="body", - use_local_cache=True, -) - -@idempotent(config=config, persistence_store=persistence_layer) -def handler(event, context): - ... +```python hl_lines="6 10" title="Caching idempotent transactions in-memory to prevent multiple calls to storage" +--8<-- "docs/examples/utilities/idempotency/idempotency_in_memory_cache.py" ``` When enabled, the default is to cache a maximum of 256 records in each Lambda execution environment - You can change it with the **`local_cache_max_items`** parameter. @@ -491,20 +301,8 @@ In most cases, it is not desirable to store the idempotency records forever. Rat You can change this window with the **`expires_after_seconds`** parameter: -```python hl_lines="8 11" title="Adjusting cache TTL" -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") -config = IdempotencyConfig( - event_key_jmespath="body", - expires_after_seconds=5*60, # 5 minutes -) - -@idempotent(config=config, persistence_store=persistence_layer) -def handler(event, context): - ... +```python hl_lines="6 10" title="Adjusting cache TTL" +--8<-- "docs/examples/utilities/idempotency/idempotency_cache_ttl.py" ``` This will mark any records older than 5 minutes as expired, and the lambda handler will be executed as normal if it is invoked with a matching payload. @@ -523,33 +321,8 @@ With **`payload_validation_jmespath`**, you can provide an additional JMESPath e === "app.py" - ```python hl_lines="7 11 18 25" - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - config = IdempotencyConfig( - event_key_jmespath="[userDetail, productId]", - payload_validation_jmespath="amount" - ) - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - # Creating a subscription payment is a side - # effect of calling this function! - payment = create_subscription_payment( - user=event['userDetail']['username'], - product=event['product_id'], - amount=event['amount'] - ) - ... - return { - "message": "success", - "statusCode": 200, - "payment_id": payment.id, - "amount": payment.amount - } + ```python hl_lines="5 10 17 24" + --8<-- "docs/examples/utilities/idempotency/idempotency_payload_validation.py" ``` === "Example Event 1" @@ -597,22 +370,8 @@ This means that we will raise **`IdempotencyKeyError`** if the evaluation of **` === "app.py" - ```python hl_lines="9-10 13" - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - - # Requires "user"."uid" and "order_id" to be present - config = IdempotencyConfig( - event_key_jmespath="[user.uid, order_id]", - raise_on_no_idempotency_key=True, - ) - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - pass + ```python hl_lines="7-8 12" + --8<-- "docs/examples/utilities/idempotency/idempotency_key_required.py" ``` === "Success Event" @@ -647,42 +406,14 @@ The **`boto_config`** and **`boto3_session`** parameters enable you to pass in a === "Custom session" - ```python hl_lines="1 6 9 14" - import boto3 - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - boto3_session = boto3.session.Session() - persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - boto3_session=boto3_session - ) - - config = IdempotencyConfig(event_key_jmespath="body") - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - ... + ```python hl_lines="1 5 8 14" + --8<-- "docs/examples/utilities/idempotency/idempotency_custom_session.py" ``` + === "Custom config" - ```python hl_lines="1 7 10" - from botocore.config import Config - from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent - ) - - config = IdempotencyConfig(event_key_jmespath="body") - boto_config = Config() - persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - boto_config=boto_config - ) - - @idempotent(config=config, persistence_store=persistence_layer) - def handler(event, context): - ... + ```python hl_lines="1 6 9 13" + --8<-- "docs/examples/utilities/idempotency/idempotency_custom_config.py" ``` ### Using a DynamoDB table with a composite primary key @@ -694,16 +425,7 @@ With this setting, we will save the idempotency key in the sort key instead of t You can optionally set a static value for the partition key using the `static_pk_value` parameter. ```python hl_lines="5" title="Reusing a DynamoDB table that uses a composite primary key" -from aws_lambda_powertools.utilities.idempotency import DynamoDBPersistenceLayer, idempotent - -persistence_layer = DynamoDBPersistenceLayer( - table_name="IdempotencyTable", - sort_key_attr='sort_key') - - -@idempotent(persistence_store=persistence_layer) -def handler(event, context): - return {"message": "success": "id": event['body']['id]} +--8<-- "docs/examples/utilities/idempotency/idempotency_composite_primary_key.py" ``` The example function above would cause data to be stored in DynamoDB like this: @@ -722,132 +444,7 @@ You can inherit from the `BasePersistenceLayer` class and implement the abstract `_update_record` and `_delete_record`. ```python hl_lines="8-13 57 65 74 96 124" title="Excerpt DynamoDB Persisntence Layer implementation for reference" -import datetime -import logging -from typing import Any, Dict, Optional - -import boto3 -from botocore.config import Config - -from aws_lambda_powertools.utilities.idempotency import BasePersistenceLayer -from aws_lambda_powertools.utilities.idempotency.exceptions import ( - IdempotencyItemAlreadyExistsError, - IdempotencyItemNotFoundError, -) -from aws_lambda_powertools.utilities.idempotency.persistence.base import DataRecord - -logger = logging.getLogger(__name__) - - -class DynamoDBPersistenceLayer(BasePersistenceLayer): - def __init__( - self, - table_name: str, - key_attr: str = "id", - expiry_attr: str = "expiration", - status_attr: str = "status", - data_attr: str = "data", - validation_key_attr: str = "validation", - boto_config: Optional[Config] = None, - boto3_session: Optional[boto3.session.Session] = None, - ): - boto_config = boto_config or Config() - session = boto3_session or boto3.session.Session() - self._ddb_resource = session.resource("dynamodb", config=boto_config) - self.table_name = table_name - self.table = self._ddb_resource.Table(self.table_name) - self.key_attr = key_attr - self.expiry_attr = expiry_attr - self.status_attr = status_attr - self.data_attr = data_attr - self.validation_key_attr = validation_key_attr - super(DynamoDBPersistenceLayer, self).__init__() - - def _item_to_data_record(self, item: Dict[str, Any]) -> DataRecord: - """ - Translate raw item records from DynamoDB to DataRecord - - Parameters - ---------- - item: Dict[str, Union[str, int]] - Item format from dynamodb response - - Returns - ------- - DataRecord - representation of item - - """ - return DataRecord( - idempotency_key=item[self.key_attr], - status=item[self.status_attr], - expiry_timestamp=item[self.expiry_attr], - response_data=item.get(self.data_attr), - payload_hash=item.get(self.validation_key_attr), - ) - - def _get_record(self, idempotency_key) -> DataRecord: - response = self.table.get_item(Key={self.key_attr: idempotency_key}, ConsistentRead=True) - - try: - item = response["Item"] - except KeyError: - raise IdempotencyItemNotFoundError - return self._item_to_data_record(item) - - def _put_record(self, data_record: DataRecord) -> None: - item = { - self.key_attr: data_record.idempotency_key, - self.expiry_attr: data_record.expiry_timestamp, - self.status_attr: data_record.status, - } - - if self.payload_validation_enabled: - item[self.validation_key_attr] = data_record.payload_hash - - now = datetime.datetime.now() - try: - logger.debug(f"Putting record for idempotency key: {data_record.idempotency_key}") - self.table.put_item( - Item=item, - ConditionExpression=f"attribute_not_exists({self.key_attr}) OR {self.expiry_attr} < :now", - ExpressionAttributeValues={":now": int(now.timestamp())}, - ) - except self._ddb_resource.meta.client.exceptions.ConditionalCheckFailedException: - logger.debug(f"Failed to put record for already existing idempotency key: {data_record.idempotency_key}") - raise IdempotencyItemAlreadyExistsError - - def _update_record(self, data_record: DataRecord): - logger.debug(f"Updating record for idempotency key: {data_record.idempotency_key}") - update_expression = "SET #response_data = :response_data, #expiry = :expiry, #status = :status" - expression_attr_values = { - ":expiry": data_record.expiry_timestamp, - ":response_data": data_record.response_data, - ":status": data_record.status, - } - expression_attr_names = { - "#response_data": self.data_attr, - "#expiry": self.expiry_attr, - "#status": self.status_attr, - } - - if self.payload_validation_enabled: - update_expression += ", #validation_key = :validation_key" - expression_attr_values[":validation_key"] = data_record.payload_hash - expression_attr_names["#validation_key"] = self.validation_key_attr - - kwargs = { - "Key": {self.key_attr: data_record.idempotency_key}, - "UpdateExpression": update_expression, - "ExpressionAttributeValues": expression_attr_values, - "ExpressionAttributeNames": expression_attr_names, - } - - self.table.update_item(**kwargs) - - def _delete_record(self, data_record: DataRecord) -> None: - logger.debug(f"Deleting record for idempotency key: {data_record.idempotency_key}") - self.table.delete_item(Key={self.key_attr: data_record.idempotency_key},) +--8<-- "docs/examples/utilities/idempotency/bring_your_own_persistent_store.py" ``` ???+ danger @@ -867,61 +464,31 @@ The idempotency utility can be used with the `validator` decorator. Ensure that Make sure to account for this behaviour, if you set the `event_key_jmespath`. -```python hl_lines="9 10" title="Using Idempotency with JSONSchema Validation utility" -from aws_lambda_powertools.utilities.validation import validator, envelopes -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -config = IdempotencyConfig(event_key_jmespath="[message, username]") -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") - -@validator(envelope=envelopes.API_GATEWAY_HTTP) -@idempotent(config=config, persistence_store=persistence_layer) -def lambda_handler(event, context): - cause_some_side_effects(event['username') - return {"message": event['message'], "statusCode": 200} +```python hl_lines="8-9" title="Using Idempotency with JSONSchema Validation utility" +--8<-- "docs/examples/utilities/idempotency/idempotency_with_validator.py" ``` ???+ tip "Tip: JMESPath Powertools functions are also available" Built-in functions known in the validation utility like `powertools_json`, `powertools_base64`, `powertools_base64_gzip` are also available to use in this utility. - ## Testing your code The idempotency utility provides several routes to test your code. ### Disabling the idempotency utility + When testing your code, you may wish to disable the idempotency logic altogether and focus on testing your business logic. To do this, you can set the environment variable `POWERTOOLS_IDEMPOTENCY_DISABLED` with a truthy value. If you prefer setting this for specific tests, and are using Pytest, you can use [monkeypatch](https://docs.pytest.org/en/latest/monkeypatch.html) fixture: === "tests.py" - ```python hl_lines="2 3" - def test_idempotent_lambda_handler(monkeypatch): - # Set POWERTOOLS_IDEMPOTENCY_DISABLED before calling decorated functions - monkeypatch.setenv("POWERTOOLS_IDEMPOTENCY_DISABLED", 1) - - result = handler() - ... + ```python hl_lines="5-6" + --8<-- "docs/examples/utilities/idempotency/testing_idempotency_disabled_test.py" ``` === "app.py" ```python - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - print('expensive operation') - return { - "payment_id": 12345, - "message": "success", - "statusCode": 200, - } + --8<-- "docs/examples/utilities/idempotency/testing_idempotency_disabled_app.py" ``` ### Testing with DynamoDB Local @@ -931,37 +498,13 @@ To test with [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/ === "tests.py" ```python hl_lines="6 7 8" - import boto3 - - import app - - def test_idempotent_lambda(): - # Create our own Table resource using the endpoint for our DynamoDB Local instance - resource = boto3.resource("dynamodb", endpoint_url='http://localhost:8000') - table = resource.Table(app.persistence_layer.table_name) - app.persistence_layer.table = table - - result = app.handler({'testkey': 'testvalue'}, {}) - assert result['payment_id'] == 12345 + --8<-- "docs/examples/utilities/idempotency/testing_with_dynamodb_local_test.py" ``` === "app.py" ```python - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - print('expensive operation') - return { - "payment_id": 12345, - "message": "success", - "statusCode": 200, - } + --8<-- "docs/examples/utilities/idempotency/testing_with_dynamodb_local_app.py" ``` ### How do I mock all DynamoDB I/O operations @@ -971,39 +514,17 @@ This means it is possible to pass a mocked Table resource, or stub various metho === "tests.py" - ```python hl_lines="6 7 8 9" - from unittest.mock import MagicMock - - import app - - def test_idempotent_lambda(): - table = MagicMock() - app.persistence_layer.table = table - result = app.handler({'testkey': 'testvalue'}, {}) - table.put_item.assert_called() - ... + ```python hl_lines="7-10" + --8<-- "docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_test.py" ``` === "app.py" ```python - from aws_lambda_powertools.utilities.idempotency import ( - DynamoDBPersistenceLayer, idempotent - ) - - persistence_layer = DynamoDBPersistenceLayer(table_name="idempotency") - - @idempotent(persistence_store=persistence_layer) - def handler(event, context): - print('expensive operation') - return { - "payment_id": 12345, - "message": "success", - "statusCode": 200, - } + --8<-- "docs/examples/utilities/idempotency/testing_with_mocked_dynamodb_app.py" ``` ## Extra resources If you're interested in a deep dive on how Amazon uses idempotency when building our APIs, check out -[this article](https://aws.amazon.com/builders-library/making-retries-safe-with-idempotent-APIs/). +[this article](https://aws.amazon.com/builders-library/making-retries-safe-with-idempotent-APIs/){target="_blank"}. diff --git a/docs/utilities/jmespath_functions.md b/docs/utilities/jmespath_functions.md index 03b5fce1fd5..0258d592d4a 100644 --- a/docs/utilities/jmespath_functions.md +++ b/docs/utilities/jmespath_functions.md @@ -28,16 +28,8 @@ You can use the `extract_data_from_envelope` function along with any [JMESPath e === "app.py" - ```python hl_lines="1 7" - from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope - - from aws_lambda_powertools.utilities.typing import LambdaContext - - - def handler(event: dict, context: LambdaContext): - payload = extract_data_from_envelope(data=event, envelope="powertools_json(body)") - customer = payload.get("customerId") # now deserialized - ... + ```python hl_lines="1 6" + --8<-- "docs/examples/utilities/jmespath_functions/extract_data_jmespath.py" ``` === "event.json" @@ -54,16 +46,8 @@ We provide built-in envelopes for popular JMESPath expressions used when looking === "app.py" - ```python hl_lines="1 7" - from aws_lambda_powertools.utilities.jmespath_utils import extract_data_from_envelope, envelopes - - from aws_lambda_powertools.utilities.typing import LambdaContext - - - def handler(event: dict, context: LambdaContext): - payload = extract_data_from_envelope(data=event, envelope=envelopes.SNS) - customer = payload.get("customerId") # now deserialized - ... + ```python hl_lines="1 6" + --8<-- "docs/examples/utilities/jmespath_functions/extract_data_built_in_jmespath.py" ``` === "event.json" @@ -107,6 +91,7 @@ Envelope | JMESPath expression ## Advanced ### Built-in JMESPath functions + You can use our built-in JMESPath functions within your expressions to do exactly that to decode JSON Strings, base64, and uncompress gzip data. ???+ info @@ -123,20 +108,12 @@ This sample will decode the value within the `data` key into a valid JSON before === "powertools_json_jmespath_function.py" ```python hl_lines="9" - from aws_lambda_powertools.utilities.validation import validate - - import schemas - - sample_event = { - 'data': '{"payload": {"message": "hello hello", "username": "blah blah"}}' - } - - validate(event=sample_event, schema=schemas.INPUT, envelope="powertools_json(data)") + --8<-- "docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function.py" ``` === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -144,28 +121,8 @@ This sample will decode the value within the `data` key into a valid JSON before This sample will decode the value within the `body` key of an API Gateway event into a valid JSON object to ensure the Idempotency utility processes a JSON object instead of a string. -```python hl_lines="7" title="Deserializing JSON before using as idempotency key" -import json -from aws_lambda_powertools.utilities.idempotency import ( - IdempotencyConfig, DynamoDBPersistenceLayer, idempotent -) - -persistence_layer = DynamoDBPersistenceLayer(table_name="IdempotencyTable") -config = IdempotencyConfig(event_key_jmespath="powertools_json(body)") - -@idempotent(config=config, persistence_store=persistence_layer) -def handler(event:APIGatewayProxyEvent, context): - body = json.loads(event['body']) - payment = create_subscription_payment( - user=body['user'], - product=body['product_id'] - ) - ... - return { - "payment_id": payment.id, - "message": "success", - "statusCode": 200 - } +```python hl_lines="6" title="Deserializing JSON before using as idempotency key" +--8<-- "docs/examples/utilities/jmespath_functions/powertools_json_jmespath_function_idempotency.py" ``` #### powertools_base64 function @@ -174,27 +131,15 @@ Use `powertools_base64` function to decode any base64 data. This sample will decode the base64 value within the `data` key, and decode the JSON string into a valid JSON before we can validate it. -=== "powertools_json_jmespath_function.py" +=== "powertools_base64_jmespath_function.py" ```python hl_lines="12" - from aws_lambda_powertools.utilities.validation import validate - - import schemas - - sample_event = { - "data": "eyJtZXNzYWdlIjogImhlbGxvIGhlbGxvIiwgInVzZXJuYW1lIjogImJsYWggYmxhaCJ9=" - } - - validate( - event=sample_event, - schema=schemas.INPUT, - envelope="powertools_json(powertools_base64(data))" - ) + --8<-- "docs/examples/utilities/jmespath_functions/powertools_base64_jmespath_function.py" ``` === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -204,27 +149,15 @@ Use `powertools_base64_gzip` function to decompress and decode base64 data. This sample will decompress and decode base64 data, then use JMESPath pipeline expression to pass the result for decoding its JSON string. -=== "powertools_json_jmespath_function.py" +=== "powertools_base64_gzip_jmespath_function.py" ```python hl_lines="12" - from aws_lambda_powertools.utilities.validation import validate - - import schemas - - sample_event = { - "data": "H4sIACZAXl8C/52PzUrEMBhFX2UILpX8tPbHXWHqIOiq3Q1F0ubrWEiakqTWofTdTYYB0YWL2d5zvnuTFellBIOedoiyKH5M0iwnlKH7HZL6dDB6ngLDfLFYctUKjie9gHFaS/sAX1xNEq525QxwFXRGGMEkx4Th491rUZdV3YiIZ6Ljfd+lfSyAtZloacQgAkqSJCGhxM6t7cwwuUGPz4N0YKyvO6I9WDeMPMSo8Z4Ca/kJ6vMEYW5f1MX7W1lVxaG8vqX8hNFdjlc0iCBBSF4ERT/3Pl7RbMGMXF2KZMh/C+gDpNS7RRsp0OaRGzx0/t8e0jgmcczyLCWEePhni/23JWalzjdu0a3ZvgEaNLXeugEAAA==" - } - - validate( - event=sample_event, - schema=schemas.INPUT, - envelope="powertools_base64_gzip(data) | powertools_json(@)" - ) + --8<-- "docs/examples/utilities/jmespath_functions/powertools_base64_gzip_jmespath_function.py" ``` === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -239,25 +172,8 @@ In order to keep the built-in functions from Powertools, you can subclass from ` === "custom_jmespath_function.py" - ```python hl_lines="2-3 6-9 11 17" - from aws_lambda_powertools.utilities.jmespath_utils import ( - PowertoolsFunctions, extract_data_from_envelope) - from jmespath.functions import signature - - - class CustomFunctions(PowertoolsFunctions): - @signature({'types': ['string']}) # Only decode if value is a string - def _func_special_decoder(self, s): - return my_custom_decoder_logic(s) - - custom_jmespath_options = {"custom_functions": CustomFunctions()} - - def handler(event, context): - # use the custom name after `_func_` - extract_data_from_envelope(data=event, - envelope="special_decoder(body)", - jmespath_options=**custom_jmespath_options) - ... + ```python hl_lines="1 3 6-9 12 20" + --8<-- "docs/examples/utilities/jmespath_functions/custom_jmespath_function.py" ``` === "event.json" diff --git a/docs/utilities/middleware_factory.md b/docs/utilities/middleware_factory.md index 6133fb3c8af..d0101c041a7 100644 --- a/docs/utilities/middleware_factory.md +++ b/docs/utilities/middleware_factory.md @@ -18,39 +18,16 @@ You can create your own middleware using `lambda_handler_decorator`. The decorat * **event** - Lambda function invocation event * **context** - Lambda function context object -```python hl_lines="3-4 10" title="Creating your own middleware for before/after logic" -from aws_lambda_powertools.middleware_factory import lambda_handler_decorator - -@lambda_handler_decorator -def middleware_before_after(handler, event, context): - # logic_before_handler_execution() - response = handler(event, context) - # logic_after_handler_execution() - return response - -@middleware_before_after -def lambda_handler(event, context): - ... +```python hl_lines="4-5 12" title="Creating your own middleware for before/after logic" +--8<-- "docs/examples/utilities/middleware_factory/middleware_no_params.py" ``` ## Middleware with params You can also have your own keyword arguments after the mandatory arguments. -```python hl_lines="2 12" title="Accepting arbitrary keyword arguments" -@lambda_handler_decorator -def obfuscate_sensitive_data(handler, event, context, fields: List = None): - # Obfuscate email before calling Lambda handler - if fields: - for field in fields: - if field in event: - event[field] = obfuscate(event[field]) - - return handler(event, context) - -@obfuscate_sensitive_data(fields=["email"]) -def lambda_handler(event, context): - ... +```python hl_lines="7 17" title="Accepting arbitrary keyword arguments" +--8<-- "docs/examples/utilities/middleware_factory/middleware_with_params.py" ``` ## Tracing middleware execution @@ -59,32 +36,16 @@ If you are making use of [Tracer](../core/tracer.md), you can trace the executio This makes use of an existing Tracer instance that you may have initialized anywhere in your code. -```python hl_lines="3" title="Tracing custom middlewares with Tracer" -from aws_lambda_powertools.middleware_factory import lambda_handler_decorator - -@lambda_handler_decorator(trace_execution=True) -def my_middleware(handler, event, context): - return handler(event, context) - -@my_middleware -def lambda_handler(event, context): - ... +```python hl_lines="4" title="Tracing custom middlewares with Tracer" +--8<-- "docs/examples/utilities/middleware_factory/middleware_trace_execution.py" ``` When executed, your middleware name will [appear in AWS X-Ray Trace details as](../core/tracer.md) `## middleware_name`. For advanced use cases, you can instantiate [Tracer](../core/tracer.md) inside your middleware, and add annotations as well as metadata for additional operational insights. -```python hl_lines="6-8" title="Add custom tracing insights before/after in your middlware" -from aws_lambda_powertools.middleware_factory import lambda_handler_decorator -from aws_lambda_powertools import Tracer - -@lambda_handler_decorator(trace_execution=True) -def middleware_name(handler, event, context): - # tracer = Tracer() # Takes a copy of an existing tracer instance - # tracer.add_annotation... - # tracer.add_metadata... - return handler(event, context) +```python hl_lines="7-9" title="Add custom tracing insights before/after in your middlware" +--8<-- "docs/examples/utilities/middleware_factory/middleware_trace_custom.py" ``` ## Tips diff --git a/docs/utilities/parameters.md b/docs/utilities/parameters.md index d02a3feb73a..1da70bca755 100644 --- a/docs/utilities/parameters.md +++ b/docs/utilities/parameters.md @@ -39,30 +39,16 @@ You can retrieve a single parameter using `get_parameter` high-level function. For multiple parameters, you can use `get_parameters` and pass a path to retrieve them recursively. -```python hl_lines="1 5 9" title="Fetching multiple parameters recursively" -from aws_lambda_powertools.utilities import parameters - -def handler(event, context): - # Retrieve a single parameter - value = parameters.get_parameter("/my/parameter") - - # Retrieve multiple parameters from a path prefix recursively - # This returns a dict with the parameter name as key - values = parameters.get_parameters("/my/path/prefix") - for k, v in values.items(): - print(f"{k}: {v}") +```python hl_lines="1 6 10" title="Fetching multiple parameters recursively" +--8<-- "docs/examples/utilities/parameters/recursively_parameters.py" ``` ### Fetching secrets You can fetch secrets stored in Secrets Manager using `get_secrets`. -```python hl_lines="1 5" title="Fetching secrets" -from aws_lambda_powertools.utilities import parameters - -def handler(event, context): - # Retrieve a single secret - value = parameters.get_secret("my-secret") +```python hl_lines="1 6" title="Fetching secrets" +--8<-- "docs/examples/utilities/parameters/fetching_secrets.py" ``` ### Fetching app configurations @@ -71,12 +57,8 @@ You can fetch application configurations in AWS AppConfig using `get_app_config` The following will retrieve the latest version and store it in the cache. -```python hl_lines="1 5" title="Fetching latest config from AppConfig" -from aws_lambda_powertools.utilities import parameters - -def handler(event, context): - # Retrieve a single configuration, latest version - value: bytes = parameters.get_app_config(name="my_configuration", environment="my_env", application="my_app") +```python hl_lines="1 6-10" title="Fetching latest config from AppConfig" +--8<-- "docs/examples/utilities/parameters/fetching_app_config.py" ``` ## Advanced @@ -90,33 +72,16 @@ By default, we cache parameters retrieved in-memory for 5 seconds. You can adjust how long we should keep values in cache by using the param `max_age`, when using `get()` or `get_multiple()` methods across all providers. -```python hl_lines="9" title="Caching parameter(s) value in memory for longer than 5 seconds" -from aws_lambda_powertools.utilities import parameters -from botocore.config import Config - -config = Config(region_name="us-west-1") -ssm_provider = parameters.SSMProvider(config=config) - -def handler(event, context): - # Retrieve a single parameter - value = ssm_provider.get("/my/parameter", max_age=60) # 1 minute - - # Retrieve multiple parameters from a path prefix - values = ssm_provider.get_multiple("/my/path/prefix", max_age=60) - for k, v in values.items(): - print(f"{k}: {v}") +```python hl_lines="11 14" title="Caching parameter(s) value in memory for longer than 5 seconds" +--8<-- "docs/examples/utilities/parameters/custom_caching_parameters.py" ``` ### Always fetching the latest If you'd like to always ensure you fetch the latest parameter from the store regardless if already available in cache, use `force_fetch` param. -```python hl_lines="5" title="Forcefully fetching the latest parameter whether TTL has expired or not" -from aws_lambda_powertools.utilities import parameters - -def handler(event, context): - # Retrieve a single parameter - value = parameters.get_parameter("/my/parameter", force_fetch=True) +```python hl_lines="6" title="Forcefully fetching the latest parameter whether TTL has expired or not" +--8<-- "docs/examples/utilities/parameters/force_fetch_parameters.py" ``` ### Built-in provider class @@ -128,21 +93,8 @@ For greater flexibility such as configuring the underlying SDK client used by bu #### SSMProvider -```python hl_lines="5 9 12" title="Example with SSMProvider for further extensibility" -from aws_lambda_powertools.utilities import parameters -from botocore.config import Config - -config = Config(region_name="us-west-1") -ssm_provider = parameters.SSMProvider(config=config) # or boto3_session=boto3.Session() - -def handler(event, context): - # Retrieve a single parameter - value = ssm_provider.get("/my/parameter") - - # Retrieve multiple parameters from a path prefix - values = ssm_provider.get_multiple("/my/path/prefix") - for k, v in values.items(): - print(f"{k}: {v}") +```python hl_lines="6 11 14" title="Example with SSMProvider for further extensibility" +--8<-- "docs/examples/utilities/parameters/ssm_provider.py" ``` The AWS Systems Manager Parameter Store provider supports two additional arguments for the `get()` and `get_multiple()` methods: @@ -152,29 +104,14 @@ The AWS Systems Manager Parameter Store provider supports two additional argumen | **decrypt** | `False` | Will automatically decrypt the parameter. | **recursive** | `True` | For `get_multiple()` only, will fetch all parameter values recursively based on a path prefix. -```python hl_lines="6 8" title="Example with get() and get_multiple()" -from aws_lambda_powertools.utilities import parameters - -ssm_provider = parameters.SSMProvider() - -def handler(event, context): - decrypted_value = ssm_provider.get("/my/encrypted/parameter", decrypt=True) - - no_recursive_values = ssm_provider.get_multiple("/my/path/prefix", recursive=False) +```python hl_lines="7 9" title="Example with get() and get_multiple()" +--8<-- "docs/examples/utilities/parameters/ssm_provider_get_options.py" ``` #### SecretsProvider -```python hl_lines="5 9" title="Example with SecretsProvider for further extensibility" -from aws_lambda_powertools.utilities import parameters -from botocore.config import Config - -config = Config(region_name="us-west-1") -secrets_provider = parameters.SecretsProvider(config=config) - -def handler(event, context): - # Retrieve a single secret - value = secrets_provider.get("my-secret") +```python hl_lines="6 11" title="Example with SecretsProvider for further extensibility" +--8<-- "docs/examples/utilities/parameters/secrets_provider.py" ``` #### DynamoDBProvider @@ -196,23 +133,16 @@ For single parameters, you must use `id` as the [partition key](https://docs.aws With this table, `dynamodb_provider.get("my-param")` will return `my-value`. === "app.py" - ```python hl_lines="3 7" - from aws_lambda_powertools.utilities import parameters - - dynamodb_provider = parameters.DynamoDBProvider(table_name="my-table") - def handler(event, context): - # Retrieve a value from DynamoDB - value = dynamodb_provider.get("my-parameter") + ```python hl_lines="3 8" + --8<-- "docs/examples/utilities/parameters/dynamodb_provider.py" ``` === "DynamoDB Local example" You can initialize the DynamoDB provider pointing to [DynamoDB Local](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBLocal.html) using `endpoint_url` parameter: - ```python hl_lines="3" - from aws_lambda_powertools.utilities import parameters - - dynamodb_provider = parameters.DynamoDBProvider(table_name="my-table", endpoint_url="http://localhost:8000") + ```python hl_lines="5" + --8<-- "docs/examples/utilities/parameters/dynamodb_provider_local.py" ``` **DynamoDB table structure for multiple values parameters** @@ -232,19 +162,9 @@ You can retrieve multiple parameters sharing the same `id` by having a sort key With this table, `dynamodb_provider.get_multiple("my-hash-key")` will return a dictionary response in the shape of `sk:value`. === "app.py" - ```python hl_lines="3 8" - from aws_lambda_powertools.utilities import parameters - - dynamodb_provider = parameters.DynamoDBProvider(table_name="my-table") - - def handler(event, context): - # Retrieve multiple values by performing a Query on the DynamoDB table - # This returns a dict with the sort key attribute as dict key. - parameters = dynamodb_provider.get_multiple("my-hash-key") - for k, v in parameters.items(): - # k: param-a - # v: "my-value-a" - print(f"{k}: {v}") + + ```python hl_lines="3 9" + --8<-- "docs/examples/utilities/parameters/dynamodb_provider_get_multiple.py" ``` === "parameters dict response" @@ -269,31 +189,13 @@ DynamoDB provider can be customized at initialization to match your table struct | **value_attr** | No | `value` | Name of the attribute containing the parameter value. ```python hl_lines="3-8" title="Customizing DynamoDBProvider to suit your table design" -from aws_lambda_powertools.utilities import parameters - -dynamodb_provider = parameters.DynamoDBProvider( - table_name="my-table", - key_attr="MyKeyAttr", - sort_attr="MySortAttr", - value_attr="MyvalueAttr" -) - -def handler(event, context): - value = dynamodb_provider.get("my-parameter") +--8<-- "docs/examples/utilities/parameters/dynamodb_provider_customization.py" ``` #### AppConfigProvider -```python hl_lines="5 9" title="Using AppConfigProvider" -from aws_lambda_powertools.utilities import parameters -from botocore.config import Config - -config = Config(region_name="us-west-1") -appconf_provider = parameters.AppConfigProvider(environment="my_env", application="my_app", config=config) - -def handler(event, context): - # Retrieve a single secret - value: bytes = appconf_provider.get("my_conf") +```python hl_lines="6-10 15" title="Using AppConfigProvider" +--8<-- "docs/examples/utilities/parameters/app_config_provider.py" ``` ### Create your own provider @@ -304,57 +206,8 @@ All transformation and caching logic is handled by the `get()` and `get_multiple Here is an example implementation using S3 as a custom parameter store: -```python hl_lines="3 6 17 27" title="Creating a S3 Provider to fetch parameters" -import copy - -from aws_lambda_powertools.utilities import BaseProvider -import boto3 - -class S3Provider(BaseProvider): - bucket_name = None - client = None - - def __init__(self, bucket_name: str): - # Initialize the client to your custom parameter store - # E.g.: - - self.bucket_name = bucket_name - self.client = boto3.client("s3") - - def _get(self, name: str, **sdk_options) -> str: - # Retrieve a single value - # E.g.: - - sdk_options["Bucket"] = self.bucket_name - sdk_options["Key"] = name - - response = self.client.get_object(**sdk_options) - return - - def _get_multiple(self, path: str, **sdk_options) -> Dict[str, str]: - # Retrieve multiple values - # E.g.: - - list_sdk_options = copy.deepcopy(sdk_options) - - list_sdk_options["Bucket"] = self.bucket_name - list_sdk_options["Prefix"] = path - - list_response = self.client.list_objects_v2(**list_sdk_options) - - parameters = {} - - for obj in list_response.get("Contents", []): - get_sdk_options = copy.deepcopy(sdk_options) - - get_sdk_options["Bucket"] = self.bucket_name - get_sdk_options["Key"] = obj["Key"] - - get_response = self.client.get_object(**get_sdk_options) - - parameters[obj["Key"]] = get_response["Body"].read().decode() - - return parameters +```python hl_lines="6 9 20 30" title="Creating a S3 Provider to fetch parameters" +--8<-- "docs/examples/utilities/parameters/create_your_own_s3_provider.py" ``` ### Deserializing values with transform parameter @@ -366,26 +219,14 @@ For parameters stored in JSON or Base64 format, you can use the `transform` argu === "High level functions" - ```python hl_lines="4" - from aws_lambda_powertools.utilities import parameters - - def handler(event, context): - value_from_json = parameters.get_parameter("/my/json/parameter", transform="json") + ```python hl_lines="5" + --8<-- "docs/examples/utilities/parameters/parameters_transform.py" ``` === "Providers" - ```python hl_lines="7 10" - from aws_lambda_powertools.utilities import parameters - - ssm_provider = parameters.SSMProvider() - - def handler(event, context): - # Transform a JSON string - value_from_json = ssm_provider.get("/my/json/parameter", transform="json") - - # Transform a Base64 encoded string - value_from_binary = ssm_provider.get("/my/binary/parameter", transform="binary") + ```python hl_lines="8 11" + --8<-- "docs/examples/utilities/parameters/parameters_transform_providers.py" ``` #### Partial transform failures with `get_multiple()` @@ -396,25 +237,8 @@ You can override this by setting the `raise_on_transform_error` argument to `Tru For example, if you have three parameters, */param/a*, */param/b* and */param/c*, but */param/c* is malformed: -```python hl_lines="9 16" title="Raising TransformParameterError at first malformed parameter" -from aws_lambda_powertools.utilities import parameters - -ssm_provider = parameters.SSMProvider() - -def handler(event, context): - # This will display: - # /param/a: [some value] - # /param/b: [some value] - # /param/c: None - values = ssm_provider.get_multiple("/param", transform="json") - for k, v in values.items(): - print(f"{k}: {v}") - - try: - # This will raise a TransformParameterError exception - values = ssm_provider.get_multiple("/param", transform="json", raise_on_transform_error=True) - except parameters.exceptions.TransformParameterError: - ... +```python hl_lines="11 17" title="Raising TransformParameterError at first malformed parameter" +--8<-- "docs/examples/utilities/parameters/parameters_transform_raise_on_transform_error.py" ``` #### Auto-transform values on suffix @@ -426,13 +250,8 @@ You can do this with a single request by using `transform="auto"`. This will ins ???+ info `transform="auto"` feature is available across all providers, including the high level functions. -```python hl_lines="6" title="Deserializing parameter values based on their suffix" -from aws_lambda_powertools.utilities import parameters - -ssm_provider = parameters.SSMProvider() - -def handler(event, context): - values = ssm_provider.get_multiple("/param", transform="auto") +```python hl_lines="7" title="Deserializing parameter values based on their suffix" +--8<-- "docs/examples/utilities/parameters/parameters_transform_auto.py" ``` For example, if you have two parameters with the following suffixes `.json` and `.binary`: @@ -455,14 +274,8 @@ The return of `ssm_provider.get_multiple("/param", transform="auto")` call will You can use arbitrary keyword arguments to pass it directly to the underlying SDK method. -```python hl_lines="8" title="" -from aws_lambda_powertools.utilities import parameters - -secrets_provider = parameters.SecretsProvider() - -def handler(event, context): - # The 'VersionId' argument will be passed to the underlying get_secret_value() call. - value = secrets_provider.get("my-secret", VersionId="e62ec170-6b01-48c7-94f3-d7497851a8d2") +```python hl_lines="7-8" title="" +--8<-- "docs/examples/utilities/parameters/parameters_sdk_args.py" ``` Here is the mapping between this utility's functions and methods and the underlying SDK: @@ -479,7 +292,6 @@ Here is the mapping between this utility's functions and methods and the underly | DynamoDB | `DynamoDBProvider.get_multiple` | `dynamodb` | ([Table resource](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#table)) | [query](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.query) | App Config | `get_app_config` | `appconfig` | [get_configuration](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/appconfig.html#AppConfig.Client.get_configuration) | - ### Customizing boto configuration The **`config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when constructing any of the built-in provider classes. @@ -489,31 +301,13 @@ The **`config`** and **`boto3_session`** parameters enable you to pass in a cust === "Custom session" - ```python hl_lines="2 4 5" - from aws_lambda_powertools.utilities import parameters - import boto3 - - boto3_session = boto3.session.Session() - ssm_provider = parameters.SSMProvider(boto3_session=boto3_session) - - def handler(event, context): - # Retrieve a single parameter - value = ssm_provider.get("/my/parameter") - ... + ```python hl_lines="1 5-6" + --8<-- "docs/examples/utilities/parameters/parameters_custom_session.py" ``` === "Custom config" - ```python hl_lines="2 4 5" - from aws_lambda_powertools.utilities import parameters - from botocore.config import Config - - boto_config = Config() - ssm_provider = parameters.SSMProvider(config=boto_config) - - def handler(event, context): - # Retrieve a single parameter - value = ssm_provider.get("/my/parameter") - ... + ```python hl_lines="1 5-6" + --8<-- "docs/examples/utilities/parameters/parameters_custom_config.py" ``` ## Testing your code @@ -523,49 +317,23 @@ can be achieved in a number of ways - in this example, we use the [pytest monkey to patch the `parameters.get_parameter` method: === "tests.py" - ```python - from src import index - - def test_handler(monkeypatch): - - def mockreturn(name): - return "mock_value" - monkeypatch.setattr(index.parameters, "get_parameter", mockreturn) - return_val = index.handler({}, {}) - assert return_val.get('message') == 'mock_value' + ```python + --8<-- "docs/examples/utilities/parameters/testing_parameters_tests.py" ``` === "src/index.py" - ```python - from aws_lambda_powertools.utilities import parameters - def handler(event, context): - # Retrieve a single parameter - value = parameters.get_parameter("my-parameter-name") - return {"message": value} + ```python + --8<-- "docs/examples/utilities/parameters/testing_parameters_index.py" ``` If we need to use this pattern across multiple tests, we can avoid repetition by refactoring to use our own pytest fixture: === "tests.py" - ```python - import pytest - - from src import index - - @pytest.fixture - def mock_parameter_response(monkeypatch): - def mockreturn(name): - return "mock_value" - - monkeypatch.setattr(index.parameters, "get_parameter", mockreturn) - - # Pass our fixture as an argument to all tests where we want to mock the get_parameter response - def test_handler(mock_parameter_response): - return_val = index.handler({}, {}) - assert return_val.get('message') == 'mock_value' + ```python + --8<-- "docs/examples/utilities/parameters/testing_parameters_fixture.py" ``` Alternatively, if we need more fully featured mocking (for example checking the arguments passed to `get_parameter`), we @@ -575,16 +343,5 @@ object named `get_parameter_mock`. === "tests.py" ```python - from unittest.mock import patch - from src import index - - # Replaces "aws_lambda_powertools.utilities.parameters.get_parameter" with a Mock object - @patch("aws_lambda_powertools.utilities.parameters.get_parameter") - def test_handler(get_parameter_mock): - get_parameter_mock.return_value = 'mock_value' - - return_val = index.handler({}, {}) - get_parameter_mock.assert_called_with("my-parameter-name") - assert return_val.get('message') == 'mock_value' - + --8<-- "docs/examples/utilities/parameters/testing_parameters_mock.py" ``` diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index c17e2f173c5..e31f29cfcd4 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -29,19 +29,7 @@ Install parser's extra dependencies using **`pip install aws-lambda-powertools[p You can define models to parse incoming events by inheriting from `BaseModel`. ```python title="Defining an Order data model" -from aws_lambda_powertools.utilities.parser import BaseModel -from typing import List, Optional - -class OrderItem(BaseModel): - id: int - quantity: int - description: str - -class Order(BaseModel): - id: int - description: str - items: List[OrderItem] # nesting models are supported - optional_field: Optional[str] # this field may or may not be available when parsing +--8<-- "docs/examples/utilities/parser/parser_models.py" ``` These are simply Python classes that inherit from BaseModel. **Parser** enforces type hints declared in your model at runtime. @@ -59,93 +47,16 @@ Use the decorator for fail fast scenarios where you want your Lambda function to ???+ note **This decorator will replace the `event` object with the parsed model if successful**. This means you might be careful when nesting other decorators that expect `event` to be a `dict`. -```python hl_lines="18" title="Parsing and validating upon invocation with event_parser decorator" -from aws_lambda_powertools.utilities.parser import event_parser, BaseModel -from aws_lambda_powertools.utilities.typing import LambdaContext -from typing import List, Optional - -import json - -class OrderItem(BaseModel): - id: int - quantity: int - description: str - -class Order(BaseModel): - id: int - description: str - items: List[OrderItem] # nesting models are supported - optional_field: Optional[str] # this field may or may not be available when parsing - - -@event_parser(model=Order) -def handler(event: Order, context: LambdaContext): - print(event.id) - print(event.description) - print(event.items) - - order_items = [item for item in event.items] - ... - -payload = { - "id": 10876546789, - "description": "My order", - "items": [ - { - "id": 1015938732, - "quantity": 1, - "description": "item xpto" - } - ] -} - -handler(event=payload, context=LambdaContext()) -handler(event=json.dumps(payload), context=LambdaContext()) # also works if event is a JSON string +```python hl_lines="21" title="Parsing and validating upon invocation with event_parser decorator" +--8<-- "docs/examples/utilities/parser/parser_event_parser_decorator.py" ``` ### parse function Use this standalone function when you want more control over the data validation process, for example returning a 400 error for malformed payloads. -```python hl_lines="21 30" title="Using standalone parse function for more flexibility" -from aws_lambda_powertools.utilities.parser import parse, BaseModel, ValidationError -from typing import List, Optional - -class OrderItem(BaseModel): - id: int - quantity: int - description: str - -class Order(BaseModel): - id: int - description: str - items: List[OrderItem] # nesting models are supported - optional_field: Optional[str] # this field may or may not be available when parsing - - -payload = { - "id": 10876546789, - "description": "My order", - "items": [ - { - # this will cause a validation error - "id": [1015938732], - "quantity": 1, - "description": "item xpto" - } - ] -} - -def my_function(): - try: - parsed_payload: Order = parse(event=payload, model=Order) - # payload dict is now parsed into our model - return parsed_payload.items - except ValidationError: - return { - "status_code": 400, - "message": "Invalid order" - } +```python hl_lines="24 35" title="Using standalone parse function for more flexibility" +--8<-- "docs/examples/utilities/parser/parser_parse_function.py" ``` ## Built-in models @@ -174,56 +85,8 @@ You can extend them to include your own models, and yet have all other known fie ???+ tip For Mypy users, we only allow type override for fields where payload is injected e.g. `detail`, `body`, etc. - -```python hl_lines="16-17 28 41" title="Extending EventBridge model as an example" -from aws_lambda_powertools.utilities.parser import parse, BaseModel -from aws_lambda_powertools.utilities.parser.models import EventBridgeModel - -from typing import List, Optional - -class OrderItem(BaseModel): - id: int - quantity: int - description: str - -class Order(BaseModel): - id: int - description: str - items: List[OrderItem] - -class OrderEventModel(EventBridgeModel): - detail: Order - -payload = { - "version": "0", - "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", - "detail-type": "OrderPurchased", - "source": "OrderService", - "account": "111122223333", - "time": "2020-10-22T18:43:48Z", - "region": "us-west-1", - "resources": ["some_additional"], - "detail": { - "id": 10876546789, - "description": "My order", - "items": [ - { - "id": 1015938732, - "quantity": 1, - "description": "item xpto" - } - ] - } -} - -ret = parse(model=OrderEventModel, event=payload) - -assert ret.source == "OrderService" -assert ret.detail.description == "My order" -assert ret.detail_type == "OrderPurchased" # we rename it to snake_case since detail-type is an invalid name - -for order_item in ret.detail.items: - ... +```python hl_lines="19-20 32 45" title="Extending EventBridge model as an example" +--8<-- "docs/examples/utilities/parser/parser_extending_builtin_models.py" ``` **What's going on here, you might ask**: @@ -248,40 +111,8 @@ Envelopes can be used via `envelope` parameter available in both `parse` functio Here's an example of parsing a model found in an event coming from EventBridge, where all you want is what's inside the `detail` key. -```python hl_lines="18-22 25 31" title="Parsing payload in a given key only using envelope feature" -from aws_lambda_powertools.utilities.parser import event_parser, parse, BaseModel, envelopes -from aws_lambda_powertools.utilities.typing import LambdaContext - -class UserModel(BaseModel): - username: str - password1: str - password2: str - -payload = { - "version": "0", - "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", - "detail-type": "CustomerSignedUp", - "source": "CustomerService", - "account": "111122223333", - "time": "2020-10-22T18:43:48Z", - "region": "us-west-1", - "resources": ["some_additional_"], - "detail": { - "username": "universe", - "password1": "myp@ssword", - "password2": "repeat password" - } -} - -ret = parse(model=UserModel, envelope=envelopes.EventBridgeEnvelope, event=payload) - -# Parsed model only contains our actual model, not the entire EventBridge + Payload parsed -assert ret.password1 == ret.password2 - -# Same behaviour but using our decorator -@event_parser(model=UserModel, envelope=envelopes.EventBridgeEnvelope) -def handler(event: UserModel, context: LambdaContext): - assert event.password1 == event.password2 +```python hl_lines="20-24 27 33" title="Parsing payload in a given key only using envelope feature" +--8<-- "docs/examples/utilities/parser/parser_envelope.py" ``` **What's going on here, you might ask**: @@ -316,53 +147,13 @@ Here's a snippet of how the EventBridge envelope we demonstrated previously is i === "EventBridge Model" ```python - from datetime import datetime - from typing import Any, Dict, List - - from aws_lambda_powertools.utilities.parser import BaseModel, Field - - - class EventBridgeModel(BaseModel): - version: str - id: str # noqa: A003,VNE003 - source: str - account: str - time: datetime - region: str - resources: List[str] - detail_type: str = Field(None, alias="detail-type") - detail: Dict[str, Any] + --8<-- "docs/examples/utilities/parser/parser_event_bridge_model.py" ``` === "EventBridge Envelope" - ```python hl_lines="8 10 25 26" - from aws_lambda_powertools.utilities.parser import BaseEnvelope, models - from aws_lambda_powertools.utilities.parser.models import EventBridgeModel - - from typing import Any, Dict, Optional, TypeVar - - Model = TypeVar("Model", bound=BaseModel) - - class EventBridgeEnvelope(BaseEnvelope): - - def parse(self, data: Optional[Union[Dict[str, Any], Any]], model: Model) -> Optional[Model]: - """Parses data found with model provided - - Parameters - ---------- - data : Dict - Lambda event to be parsed - model : Model - Data model provided to parse after extracting data using envelope - - Returns - ------- - Any - Parsed detail payload with model provided - """ - parsed_envelope = EventBridgeModel.parse_obj(data) - return self._parse(data=parsed_envelope.detail, model=model) + ```python hl_lines="9-10 25 26" + --8<-- "docs/examples/utilities/parser/parser_event_bridge_envelope.py" ``` **What's going on here, you might ask**: @@ -393,19 +184,8 @@ Keep the following in mind regardless of which decorator you end up using it: Quick validation to verify whether the field `message` has the value of `hello world`. -```python hl_lines="6" title="Data field validation with validator" -from aws_lambda_powertools.utilities.parser import parse, BaseModel, validator - -class HelloWorldModel(BaseModel): - message: str - - @validator('message') - def is_hello_world(cls, v): - if v != "hello world": - raise ValueError("Message must be hello world!") - return v - -parse(model=HelloWorldModel, event={"message": "hello universe"}) +```python hl_lines="7" title="Data field validation with validator" +--8<-- "docs/examples/utilities/parser/parser_validator.py" ``` If you run as-is, you should expect the following error with the message we provided in our exception: @@ -417,21 +197,8 @@ message Alternatively, you can pass `'*'` as an argument for the decorator so that you can validate every value available. -```python hl_lines="7" title="Validating all data fields with custom logic" -from aws_lambda_powertools.utilities.parser import parse, BaseModel, validator - -class HelloWorldModel(BaseModel): - message: str - sender: str - - @validator('*') - def has_whitespace(cls, v): - if ' ' not in v: - raise ValueError("Must have whitespace...") - - return v - -parse(model=HelloWorldModel, event={"message": "hello universe", "sender": "universe"}) +```python hl_lines="8" title="Validating all data fields with custom logic" +--8<-- "docs/examples/utilities/parser/parser_validator_all.py" ``` ### validating entire model @@ -439,27 +206,7 @@ parse(model=HelloWorldModel, event={"message": "hello universe", "sender": "univ `root_validator` can help when you have a complex validation mechanism. For example finding whether data has been omitted, comparing field values, etc. ```python title="Comparing and validating multiple fields at once with root_validator" -from aws_lambda_powertools.utilities.parser import parse, BaseModel, root_validator - -class UserModel(BaseModel): - username: str - password1: str - password2: str - - @root_validator - def check_passwords_match(cls, values): - pw1, pw2 = values.get('password1'), values.get('password2') - if pw1 is not None and pw2 is not None and pw1 != pw2: - raise ValueError('passwords do not match') - return values - -payload = { - "username": "universe", - "password1": "myp@ssword", - "password2": "repeat password" -} - -parse(model=UserModel, event=payload) +--8<-- "docs/examples/utilities/parser/parser_validator_root.py" ``` ???+ info @@ -474,38 +221,8 @@ There are number of advanced use cases well documented in Pydantic's doc such as Two possible unknown use cases are Models and exception' serialization. Models have methods to [export them](https://pydantic-docs.helpmanual.io/usage/exporting_models/) as `dict`, `JSON`, `JSON Schema`, and Validation exceptions can be exported as JSON. -```python hl_lines="21 28-31" title="Converting data models in various formats" -from aws_lambda_powertools.utilities import Logger -from aws_lambda_powertools.utilities.parser import parse, BaseModel, ValidationError, validator - -logger = Logger(service="user") - -class UserModel(BaseModel): - username: str - password1: str - password2: str - -payload = { - "username": "universe", - "password1": "myp@ssword", - "password2": "repeat password" -} - -def my_function(): - try: - return parse(model=UserModel, event=payload) - except ValidationError as e: - logger.exception(e.json()) - return { - "status_code": 400, - "message": "Invalid username" - } - -User: UserModel = my_function() -user_dict = User.dict() -user_json = User.json() -user_json_schema_as_dict = User.schema() -user_json_schema_as_json = User.schema_json(indent=2) +```python hl_lines="24 29-32" title="Converting data models in various formats" +--8<-- "docs/examples/utilities/parser/parser_model_export.py" ``` These can be quite useful when manipulating models that later need to be serialized as inputs for services like DynamoDB, EventBridge, etc. diff --git a/docs/utilities/typing.md b/docs/utilities/typing.md index c1b4dbad32b..75587fef07b 100644 --- a/docs/utilities/typing.md +++ b/docs/utilities/typing.md @@ -11,11 +11,6 @@ This typing utility provides static typing classes that can be used to ease the The `LambdaContext` typing is typically used in the handler method for the Lambda function. -```python hl_lines="4" title="Annotating Lambda context type" -from typing import Any, Dict -from aws_lambda_powertools.utilities.typing import LambdaContext - -def handler(event: Dict[str, Any], context: LambdaContext) -> Dict[str, Any]: - # Insert business logic - return event +```python hl_lines="6" title="Annotating Lambda context type" +--8<-- "docs/examples/utilities/typing/lambda_context.py" ``` diff --git a/docs/utilities/validation.md b/docs/utilities/validation.md index e6ca0841d2d..8ce76a22cb2 100644 --- a/docs/utilities/validation.md +++ b/docs/utilities/validation.md @@ -33,14 +33,8 @@ It will fail fast with `SchemaValidationError` exception if event or response do === "validator_decorator.py" - ```python hl_lines="3 5" - from aws_lambda_powertools.utilities.validation import validator - - import schemas - - @validator(inbound_schema=schemas.INPUT, outbound_schema=schemas.OUTPUT) - def handler(event, context): - return event + ```python hl_lines="1 6" + --8<-- "docs/examples/utilities/validation/validator_decorator.py" ``` === "event.json" @@ -54,7 +48,7 @@ It will fail fast with `SchemaValidationError` exception if event or response do === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -67,22 +61,10 @@ It will fail fast with `SchemaValidationError` exception if event or response do You can also gracefully handle schema validation errors by catching `SchemaValidationError` exception. -=== "validator_decorator.py" - - ```python hl_lines="8" - from aws_lambda_powertools.utilities.validation import validate - from aws_lambda_powertools.utilities.validation.exceptions import SchemaValidationError - - import schemas +=== "validator_function.py" - def handler(event, context): - try: - validate(event=event, schema=schemas.INPUT) - except SchemaValidationError as e: - # do something before re-raising - raise - - return event + ```python hl_lines="9" + --8<-- "docs/examples/utilities/validation/validator_function.py" ``` === "event.json" @@ -96,7 +78,7 @@ You can also gracefully handle schema validation errors by catching `SchemaValid === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -112,14 +94,8 @@ Here is a sample custom EventBridge event, where we only validate what's inside We use the `envelope` parameter to extract the payload inside the `detail` key before validating. - ```python hl_lines="5" - from aws_lambda_powertools.utilities.validation import validator - - import schemas - - @validator(inbound_schema=schemas.INPUT, envelope="detail") - def handler(event, context): - return event + ```python hl_lines="6" + --8<-- "docs/examples/utilities/validation/unwrapping_events.py" ``` === "sample_wrapped_event.json" @@ -130,7 +106,7 @@ Here is a sample custom EventBridge event, where we only validate what's inside === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -142,14 +118,8 @@ This utility comes with built-in envelopes to easily extract the payload from po === "unwrapping_popular_event_sources.py" - ```python hl_lines="5 7" - from aws_lambda_powertools.utilities.validation import envelopes, validator - - import schemas - - @validator(inbound_schema=schemas.INPUT, envelope=envelopes.EVENTBRIDGE) - def handler(event, context): - return event + ```python hl_lines="6 8" + --8<-- "docs/examples/utilities/validation/unwrapping_popular_event_sources.py" ``` === "sample_wrapped_event.json" @@ -160,7 +130,7 @@ This utility comes with built-in envelopes to easily extract the payload from po === "schemas.py" - ```python hl_lines="7 14 16 23 39 45 47 52" + ```python hl_lines="8 10 17 34 36 41" --8<-- "docs/shared/validation_basic_jsonschema.py" ``` @@ -200,152 +170,13 @@ For each format defined in a dictionary key, you must use a regex, or a function === "validate_custom_format.py" ```python hl_lines="5-8 10" - from aws_lambda_powertools.utilities.validation import validate - - import schema - - custom_format = { - "int64": True, # simply ignore it, - "positive": lambda x: False if x < 0 else True - } - - validate(event=event, schema=schemas.INPUT, formats=custom_format) + --8<-- "docs/examples/utilities/validation/validate_custom_format.py" ``` === "schemas.py" - ```python hl_lines="68" 91 93" - INPUT = { - "$schema": "http://json-schema.org/draft-04/schema#", - "definitions": { - "AWSAPICallViaCloudTrail": { - "properties": { - "additionalEventData": {"$ref": "#/definitions/AdditionalEventData"}, - "awsRegion": {"type": "string"}, - "errorCode": {"type": "string"}, - "errorMessage": {"type": "string"}, - "eventID": {"type": "string"}, - "eventName": {"type": "string"}, - "eventSource": {"type": "string"}, - "eventTime": {"format": "date-time", "type": "string"}, - "eventType": {"type": "string"}, - "eventVersion": {"type": "string"}, - "recipientAccountId": {"type": "string"}, - "requestID": {"type": "string"}, - "requestParameters": {"$ref": "#/definitions/RequestParameters"}, - "resources": {"items": {"type": "object"}, "type": "array"}, - "responseElements": {"type": ["object", "null"]}, - "sourceIPAddress": {"type": "string"}, - "userAgent": {"type": "string"}, - "userIdentity": {"$ref": "#/definitions/UserIdentity"}, - "vpcEndpointId": {"type": "string"}, - "x-amazon-open-api-schema-readOnly": {"type": "boolean"}, - }, - "required": [ - "eventID", - "awsRegion", - "eventVersion", - "responseElements", - "sourceIPAddress", - "eventSource", - "requestParameters", - "resources", - "userAgent", - "readOnly", - "userIdentity", - "eventType", - "additionalEventData", - "vpcEndpointId", - "requestID", - "eventTime", - "eventName", - "recipientAccountId", - ], - "type": "object", - }, - "AdditionalEventData": { - "properties": { - "objectRetentionInfo": {"$ref": "#/definitions/ObjectRetentionInfo"}, - "x-amz-id-2": {"type": "string"}, - }, - "required": ["x-amz-id-2"], - "type": "object", - }, - "Attributes": { - "properties": { - "creationDate": {"format": "date-time", "type": "string"}, - "mfaAuthenticated": {"type": "string"}, - }, - "required": ["mfaAuthenticated", "creationDate"], - "type": "object", - }, - "LegalHoldInfo": { - "properties": { - "isUnderLegalHold": {"type": "boolean"}, - "lastModifiedTime": {"format": "int64", "type": "integer"}, - }, - "type": "object", - }, - "ObjectRetentionInfo": { - "properties": { - "legalHoldInfo": {"$ref": "#/definitions/LegalHoldInfo"}, - "retentionInfo": {"$ref": "#/definitions/RetentionInfo"}, - }, - "type": "object", - }, - "RequestParameters": { - "properties": { - "bucketName": {"type": "string"}, - "key": {"type": "string"}, - "legal-hold": {"type": "string"}, - "retention": {"type": "string"}, - }, - "required": ["bucketName", "key"], - "type": "object", - }, - "RetentionInfo": { - "properties": { - "lastModifiedTime": {"format": "int64", "type": "integer"}, - "retainUntilMode": {"type": "string"}, - "retainUntilTime": {"format": "int64", "type": "integer"}, - }, - "type": "object", - }, - "SessionContext": { - "properties": {"attributes": {"$ref": "#/definitions/Attributes"}}, - "required": ["attributes"], - "type": "object", - }, - "UserIdentity": { - "properties": { - "accessKeyId": {"type": "string"}, - "accountId": {"type": "string"}, - "arn": {"type": "string"}, - "principalId": {"type": "string"}, - "sessionContext": {"$ref": "#/definitions/SessionContext"}, - "type": {"type": "string"}, - }, - "required": ["accessKeyId", "sessionContext", "accountId", "principalId", "type", "arn"], - "type": "object", - }, - }, - "properties": { - "account": {"type": "string"}, - "detail": {"$ref": "#/definitions/AWSAPICallViaCloudTrail"}, - "detail-type": {"type": "string"}, - "id": {"type": "string"}, - "region": {"type": "string"}, - "resources": {"items": {"type": "string"}, "type": "array"}, - "source": {"type": "string"}, - "time": {"format": "date-time", "type": "string"}, - "version": {"type": "string"}, - }, - "required": ["detail-type", "resources", "id", "source", "time", "detail", "region", "version", "account"], - "title": "AWSAPICallViaCloudTrail", - "type": "object", - "x-amazon-events-detail-type": "AWS API Call via CloudTrail", - "x-amazon-events-source": "aws.s3", - } + ```python hl_lines="68 91 93" + --8<-- "docs/examples/utilities/validation/validate_jsonschema.py" ``` === "event.json"