Skip to content

Commit 213caed

Browse files
authored
refactor: simplify custom formatter for minor changes (#417)
1 parent daa35aa commit 213caed

File tree

12 files changed

+82
-30
lines changed

12 files changed

+82
-30
lines changed

aws_lambda_powertools/logging/formatter.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def __init__(
6969
7070
The `log_record_order` kwarg is used to specify the order of the keys used in
7171
the structured json logs. By default the order is: "level", "location", "message", "timestamp",
72-
"service" and "sampling_rate".
72+
"service".
7373
7474
Other kwargs are used to specify log field format strings.
7575
@@ -113,6 +113,10 @@ def __init__(
113113
keys_combined = {**self._build_default_keys(), **kwargs}
114114
self.log_format.update(**keys_combined)
115115

116+
def serialize(self, log: Dict) -> str:
117+
"""Serialize structured log dict to JSON str"""
118+
return self.json_serializer(log)
119+
116120
def format(self, record: logging.LogRecord) -> str: # noqa: A003
117121
"""Format logging record as structured JSON str"""
118122
formatted_log = self._extract_log_keys(log_record=record)
@@ -121,7 +125,7 @@ def format(self, record: logging.LogRecord) -> str: # noqa: A003
121125
formatted_log["xray_trace_id"] = self._get_latest_trace_id()
122126
formatted_log = self._strip_none_records(records=formatted_log)
123127

124-
return self.json_serializer(formatted_log)
128+
return self.serialize(log=formatted_log)
125129

126130
def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -> str:
127131
record_ts = self.converter(record.created)

aws_lambda_powertools/logging/logger.py

+11-9
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import os
55
import random
66
import sys
7-
from typing import Any, Callable, Dict, Iterable, Optional, Union
7+
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union
88

99
import jmespath
1010

@@ -19,6 +19,8 @@
1919

2020
is_cold_start = True
2121

22+
PowertoolsFormatter = TypeVar("PowertoolsFormatter", bound=BasePowertoolsFormatter)
23+
2224

2325
def _is_cold_start() -> bool:
2426
"""Verifies whether is cold start
@@ -70,8 +72,8 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init]
7072
sample rate for debug calls within execution context defaults to 0.0
7173
stream: sys.stdout, optional
7274
valid output for a logging stream, by default sys.stdout
73-
logger_formatter: BasePowertoolsFormatter, optional
74-
custom logging formatter that implements BasePowertoolsFormatter
75+
logger_formatter: PowertoolsFormatter, optional
76+
custom logging formatter that implements PowertoolsFormatter
7577
logger_handler: logging.Handler, optional
7678
custom logging handler e.g. logging.FileHandler("file.log")
7779
@@ -87,7 +89,7 @@ class Logger(logging.Logger): # lgtm [py/missing-call-to-init]
8789
json_default : Callable, optional
8890
function to coerce unserializable values, by default `str()`
8991
90-
Only used when no custom JSON encoder is set
92+
Only used when no custom formatter is set
9193
utc : bool, optional
9294
set logging timestamp to UTC, by default False to continue to use local time as per stdlib
9395
log_record_order : list, optional
@@ -170,7 +172,7 @@ def __init__(
170172
child: bool = False,
171173
sampling_rate: float = None,
172174
stream: sys.stdout = None,
173-
logger_formatter: Optional[BasePowertoolsFormatter] = None,
175+
logger_formatter: Optional[PowertoolsFormatter] = None,
174176
logger_handler: Optional[logging.Handler] = None,
175177
**kwargs,
176178
):
@@ -198,7 +200,7 @@ def __getattr__(self, name):
198200
return getattr(self._logger, name)
199201

200202
def _get_logger(self):
201-
""" Returns a Logger named {self.service}, or {self.service.filename} for child loggers"""
203+
"""Returns a Logger named {self.service}, or {self.service.filename} for child loggers"""
202204
logger_name = self.service
203205
if self.child:
204206
logger_name = f"{self.service}.{self._get_caller_filename()}"
@@ -346,7 +348,7 @@ def registered_handler(self) -> logging.Handler:
346348
return handlers[0]
347349

348350
@property
349-
def registered_formatter(self) -> Optional[BasePowertoolsFormatter]:
351+
def registered_formatter(self) -> Optional[PowertoolsFormatter]:
350352
"""Convenience property to access logger formatter"""
351353
return self.registered_handler.formatter
352354

@@ -384,7 +386,7 @@ def set_correlation_id(self, value: str):
384386

385387
@staticmethod
386388
def _get_log_level(level: Union[str, int, None]) -> Union[str, int]:
387-
""" Returns preferred log level set by the customer in upper case """
389+
"""Returns preferred log level set by the customer in upper case"""
388390
if isinstance(level, int):
389391
return level
390392

@@ -396,7 +398,7 @@ def _get_log_level(level: Union[str, int, None]) -> Union[str, int]:
396398

397399
@staticmethod
398400
def _get_caller_filename():
399-
""" Return caller filename by finding the caller frame """
401+
"""Return caller filename by finding the caller frame"""
400402
# Current frame => _get_logger()
401403
# Previous frame => logger.py
402404
# Before previous frame => Caller

aws_lambda_powertools/tracing/tracer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -720,7 +720,7 @@ def __build_config(
720720
patch_modules: Union[List, Tuple] = None,
721721
provider: BaseProvider = None,
722722
):
723-
""" Populates Tracer config for new and existing initializations """
723+
"""Populates Tracer config for new and existing initializations"""
724724
is_disabled = disabled if disabled is not None else self._is_tracer_disabled()
725725
is_service = resolve_env_var_choice(choice=service, env=os.getenv(constants.SERVICE_NAME_ENV))
726726

aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ def path(self) -> str:
164164

165165
@property
166166
def stage(self) -> str:
167-
"""The deployment stage of the API request """
167+
"""The deployment stage of the API request"""
168168
return self["requestContext"]["stage"]
169169

170170
@property
@@ -352,7 +352,7 @@ def authorizer(self) -> Optional[RequestContextV2Authorizer]:
352352

353353
@property
354354
def domain_name(self) -> str:
355-
"""A domain name """
355+
"""A domain name"""
356356
return self["requestContext"]["domainName"]
357357

358358
@property
@@ -375,7 +375,7 @@ def route_key(self) -> str:
375375

376376
@property
377377
def stage(self) -> str:
378-
"""The deployment stage of the API request """
378+
"""The deployment stage of the API request"""
379379
return self["requestContext"]["stage"]
380380

381381
@property

aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class AppSyncIdentityIAM(DictWrapper):
2222

2323
@property
2424
def source_ip(self) -> List[str]:
25-
"""The source IP address of the caller received by AWS AppSync. """
25+
"""The source IP address of the caller received by AWS AppSync."""
2626
return self["sourceIp"]
2727

2828
@property
@@ -67,7 +67,7 @@ class AppSyncIdentityCognito(DictWrapper):
6767

6868
@property
6969
def source_ip(self) -> List[str]:
70-
"""The source IP address of the caller received by AWS AppSync. """
70+
"""The source IP address of the caller received by AWS AppSync."""
7171
return self["sourceIp"]
7272

7373
@property

aws_lambda_powertools/utilities/data_classes/event_bridge_event.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def detail_type(self) -> str:
6060

6161
@property
6262
def detail(self) -> Dict[str, Any]:
63-
"""A JSON object, whose content is at the discretion of the service originating the event. """
63+
"""A JSON object, whose content is at the discretion of the service originating the event."""
6464
return self["detail"]
6565

6666
@property

aws_lambda_powertools/utilities/data_classes/s3_object_event.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def payload(self) -> str:
5353

5454

5555
class S3ObjectUserRequest(DictWrapper):
56-
""" Information about the original call to S3 Object Lambda."""
56+
"""Information about the original call to S3 Object Lambda."""
5757

5858
@property
5959
def url(self) -> str:

aws_lambda_powertools/utilities/data_classes/sns_event.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def message_id(self) -> str:
4646

4747
@property
4848
def message(self) -> str:
49-
"""A string that describes the message. """
49+
"""A string that describes the message."""
5050
return self["Sns"]["Message"]
5151

5252
@property

aws_lambda_powertools/utilities/data_classes/sqs_event.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ def binary_value(self) -> Optional[str]:
7070

7171
@property
7272
def data_type(self) -> str:
73-
""" The message attribute data type. Supported types include `String`, `Number`, and `Binary`."""
73+
"""The message attribute data type. Supported types include `String`, `Number`, and `Binary`."""
7474
return self["dataType"]
7575

7676

@@ -120,7 +120,7 @@ def md5_of_body(self) -> str:
120120

121121
@property
122122
def event_source(self) -> str:
123-
"""The AWS service from which the SQS record originated. For SQS, this is `aws:sqs` """
123+
"""The AWS service from which the SQS record originated. For SQS, this is `aws:sqs`"""
124124
return self["eventSource"]
125125

126126
@property

aws_lambda_powertools/utilities/idempotency/persistence/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ class BasePersistenceLayer(ABC):
110110
"""
111111

112112
def __init__(self):
113-
"""Initialize the defaults """
113+
"""Initialize the defaults"""
114114
self.configured = False
115115
self.event_key_jmespath: Optional[str] = None
116116
self.event_key_compiled_jmespath = None

docs/core/logger.md

+50-4
Original file line numberDiff line numberDiff line change
@@ -555,6 +555,32 @@ Sampling decision happens at the Logger initialization. This means sampling may
555555
}
556556
```
557557

558+
### LambdaPowertoolsFormatter
559+
560+
Logger propagates a few formatting configurations to the built-in `LambdaPowertoolsFormatter` logging formatter.
561+
562+
If you prefer configuring it separately, or you'd want to bring this JSON Formatter to another application, these are the supported settings:
563+
564+
Parameter | Description | Default
565+
------------------------------------------------- | ------------------------------------------------- | -------------------------------------------------
566+
**`json_serializer`** | function to serialize `obj` to a JSON formatted `str` | `json.dumps`
567+
**`json_deserializer`** | function to deserialize `str`, `bytes`, `bytearray` containing a JSON document to a Python obj | `json.loads`
568+
**`json_default`** | function to coerce unserializable values, when no custom serializer/deserializer is set | `str`
569+
**`datefmt`** | string directives (strftime) to format log timestamp | `%Y-%m-%d %H:%M:%S,%F%z`, where `%F` is a custom ms directive
570+
**`utc`** | set logging timestamp to UTC | `False`
571+
**`log_record_order`** | set order of log keys when logging | `["level", "location", "message", "timestamp"]`
572+
**`kwargs`** | key-value to be included in log messages | `None`
573+
574+
=== "LambdaPowertoolsFormatter.py"
575+
576+
```python hl_lines="2 4-5"
577+
from aws_lambda_powertools import Logger
578+
from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter
579+
580+
formatter = LambdaPowertoolsFormatter(utc=True, log_record_order=["message"])
581+
logger = Logger(service="example", logger_formatter=formatter)
582+
```
583+
558584
### Migrating from other Loggers
559585

560586
If you're migrating from other Loggers, there are few key points to be aware of: [Service parameter](#the-service-parameter), [Inheriting Loggers](#inheriting-loggers), [Overriding Log records](#overriding-log-records), and [Logging exceptions](#logging-exceptions).
@@ -645,7 +671,6 @@ Logger allows you to either change the format or suppress the following keys alt
645671
}
646672
```
647673

648-
649674
#### Reordering log keys position
650675

651676
You can change the order of [standard Logger keys](#standard-structured-keys) or any keys that will be appended later at runtime via the `log_record_order` parameter.
@@ -744,9 +769,30 @@ By default, Logger uses StreamHandler and logs to standard output. You can overr
744769

745770
#### Bring your own formatter
746771

747-
By default, Logger uses a custom Formatter that persists its custom structure between non-cold start invocations. There could be scenarios where the existing feature set isn't sufficient to your formatting needs.
772+
By default, Logger uses [LambdaPowertoolsFormatter](#lambdapowertoolsformatter) that persists its custom structure between non-cold start invocations. There could be scenarios where the existing feature set isn't sufficient to your formatting needs.
773+
774+
For **minor changes like remapping keys** after all log record processing has completed, you can override `serialize` method from [LambdaPowertoolsFormatter](#lambdapowertoolsformatter):
775+
776+
=== "custom_formatter.py"
777+
778+
```python
779+
from aws_lambda_powertools import Logger
780+
from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter
781+
782+
from typing import Dict
783+
784+
class CustomFormatter(LambdaPowertoolsFormatter):
785+
def serialize(self, log: Dict) -> str:
786+
"""Serialize final structured log dict to JSON str"""
787+
log["event"] = log.pop("message") # rename message key to event
788+
return self.json_serializer(log) # use configured json serializer
789+
790+
my_formatter = CustomFormatter()
791+
logger = Logger(service="example", logger_formatter=my_formatter)
792+
logger.info("hello")
793+
```
748794

749-
For this, you can subclass `BasePowertoolsFormatter`, implement `append_keys` method, and override `format` standard logging method. This ensures the current feature set of Logger like injecting Lambda context and sampling will continue to work.
795+
For **replacing the formatter entirely**, you can subclass `BasePowertoolsFormatter`, implement `append_keys` method, and override `format` standard logging method. This ensures the current feature set of Logger like [injecting Lambda context](#capturing-lambda-context-info) and [sampling](#sampling-debug-logs) will continue to work.
750796

751797
!!! info
752798
You might need to implement `remove_keys` method if you make use of the feature too.
@@ -758,7 +804,7 @@ For this, you can subclass `BasePowertoolsFormatter`, implement `append_keys` me
758804
from aws_lambda_powertools.logging.formatter import BasePowertoolsFormatter
759805

760806
class CustomFormatter(BasePowertoolsFormatter):
761-
custom_format = {} # will hold our structured keys
807+
custom_format = {} # arbitrary dict to hold our structured keys
762808

763809
def append_keys(self, **additional_keys):
764810
# also used by `inject_lambda_context` decorator

tests/functional/test_metrics.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def a_hundred_metrics() -> List[Dict[str, str]]:
8585
def serialize_metrics(
8686
metrics: List[Dict], dimensions: List[Dict], namespace: str, metadatas: List[Dict] = None
8787
) -> Dict:
88-
""" Helper function to build EMF object from a list of metrics, dimensions """
88+
"""Helper function to build EMF object from a list of metrics, dimensions"""
8989
my_metrics = MetricManager(namespace=namespace)
9090
for dimension in dimensions:
9191
my_metrics.add_dimension(**dimension)
@@ -102,7 +102,7 @@ def serialize_metrics(
102102

103103

104104
def serialize_single_metric(metric: Dict, dimension: Dict, namespace: str, metadata: Dict = None) -> Dict:
105-
""" Helper function to build EMF object from a given metric, dimension and namespace """
105+
"""Helper function to build EMF object from a given metric, dimension and namespace"""
106106
my_metrics = MetricManager(namespace=namespace)
107107
my_metrics.add_metric(**metric)
108108
my_metrics.add_dimension(**dimension)
@@ -114,7 +114,7 @@ def serialize_single_metric(metric: Dict, dimension: Dict, namespace: str, metad
114114

115115

116116
def remove_timestamp(metrics: List):
117-
""" Helper function to remove Timestamp key from EMF objects as they're built at serialization """
117+
"""Helper function to remove Timestamp key from EMF objects as they're built at serialization"""
118118
for metric in metrics:
119119
del metric["_aws"]["Timestamp"]
120120

0 commit comments

Comments
 (0)