Skip to content

Commit a7d7c86

Browse files
authored
Merge pull request #140 from gyft/fix-log-key-order
feat(logger): customizing logger output
2 parents f137ba7 + 42f6b22 commit a7d7c86

File tree

3 files changed

+95
-52
lines changed

3 files changed

+95
-52
lines changed

aws_lambda_powertools/logging/formatter.py

+16-31
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,5 @@
11
import json
22
import logging
3-
from typing import Any
4-
5-
6-
def json_formatter(unserializable_value: Any):
7-
"""JSON custom serializer to cast unserializable values to strings.
8-
9-
Example
10-
-------
11-
12-
**Serialize unserializable value to string**
13-
14-
class X: pass
15-
value = {"x": X()}
16-
17-
json.dumps(value, default=json_formatter)
18-
19-
Parameters
20-
----------
21-
unserializable_value: Any
22-
Python object unserializable by JSON
23-
"""
24-
return str(unserializable_value)
253

264

275
class JsonFormatter(logging.Formatter):
@@ -42,19 +20,23 @@ def __init__(self, **kwargs):
4220
unserializable values. It must not throw. Defaults to a function that
4321
coerces the value to a string.
4422
23+
The `log_record_order` kwarg is used to specify the order of the keys used in
24+
the structured json logs. By default the order is: "level", "location", "message", "timestamp",
25+
"service" and "sampling_rate".
26+
4527
Other kwargs are used to specify log field format strings.
4628
"""
47-
self.default_json_formatter = kwargs.pop("json_default", json_formatter)
48-
datefmt = kwargs.pop("datefmt", None)
29+
# Set the default unserializable function, by default values will be cast as str.
30+
self.default_json_formatter = kwargs.pop("json_default", str)
31+
# Set the insertion order for the log messages
32+
self.format_dict = dict.fromkeys(kwargs.pop("log_record_order", ["level", "location", "message", "timestamp"]))
33+
# Set the date format used by `asctime`
34+
super(JsonFormatter, self).__init__(datefmt=kwargs.pop("datefmt", None))
4935

50-
super(JsonFormatter, self).__init__(datefmt=datefmt)
5136
self.reserved_keys = ["timestamp", "level", "location"]
52-
self.format_dict = {
53-
"timestamp": "%(asctime)s",
54-
"level": "%(levelname)s",
55-
"location": "%(funcName)s:%(lineno)d",
56-
}
57-
self.format_dict.update(kwargs)
37+
self.format_dict.update(
38+
{"level": "%(levelname)s", "location": "%(funcName)s:%(lineno)d", "timestamp": "%(asctime)s", **kwargs}
39+
)
5840

5941
def update_formatter(self, **kwargs):
6042
self.format_dict.update(kwargs)
@@ -94,4 +76,7 @@ def format(self, record): # noqa: A003
9476
if record.exc_text:
9577
log_dict["exception"] = record.exc_text
9678

79+
# Filter out top level key with values that are None
80+
log_dict = {k: v for k, v in log_dict.items() if v is not None}
81+
9782
return json.dumps(log_dict, default=self.default_json_formatter)

aws_lambda_powertools/logging/logger.py

+25-21
Original file line numberDiff line numberDiff line change
@@ -136,16 +136,6 @@ def __getattr__(self, name):
136136
# https://github.com/awslabs/aws-lambda-powertools-python/issues/97
137137
return getattr(self._logger, name)
138138

139-
def _get_log_level(self, level: Union[str, int]) -> Union[str, int]:
140-
""" Returns preferred log level set by the customer in upper case """
141-
if isinstance(level, int):
142-
return level
143-
144-
log_level: str = level or os.getenv("LOG_LEVEL")
145-
log_level = log_level.upper() if log_level is not None else logging.INFO
146-
147-
return log_level
148-
149139
def _get_logger(self):
150140
""" Returns a Logger named {self.service}, or {self.service.filename} for child loggers"""
151141
logger_name = self.service
@@ -154,17 +144,6 @@ def _get_logger(self):
154144

155145
return logging.getLogger(logger_name)
156146

157-
def _get_caller_filename(self):
158-
""" Return caller filename by finding the caller frame """
159-
# Current frame => _get_logger()
160-
# Previous frame => logger.py
161-
# Before previous frame => Caller
162-
frame = inspect.currentframe()
163-
caller_frame = frame.f_back.f_back.f_back
164-
filename = caller_frame.f_globals["__name__"]
165-
166-
return filename
167-
168147
def _init_logger(self, **kwargs):
169148
"""Configures new logger"""
170149

@@ -207,6 +186,8 @@ def inject_lambda_context(self, lambda_handler: Callable[[Dict, Any], Any] = Non
207186
208187
Parameters
209188
----------
189+
lambda_handler : Callable
190+
Method to inject the lambda context
210191
log_event : bool, optional
211192
Instructs logger to log Lambda Event, by default False
212193
@@ -291,6 +272,29 @@ def structure_logs(self, append: bool = False, **kwargs):
291272
# Set a new formatter for a logger handler
292273
handler.setFormatter(JsonFormatter(**self._default_log_keys, **kwargs))
293274

275+
@staticmethod
276+
def _get_log_level(level: Union[str, int]) -> Union[str, int]:
277+
""" Returns preferred log level set by the customer in upper case """
278+
if isinstance(level, int):
279+
return level
280+
281+
log_level: str = level or os.getenv("LOG_LEVEL")
282+
log_level = log_level.upper() if log_level is not None else logging.INFO
283+
284+
return log_level
285+
286+
@staticmethod
287+
def _get_caller_filename():
288+
""" Return caller filename by finding the caller frame """
289+
# Current frame => _get_logger()
290+
# Previous frame => logger.py
291+
# Before previous frame => Caller
292+
frame = inspect.currentframe()
293+
caller_frame = frame.f_back.f_back.f_back
294+
filename = caller_frame.f_globals["__name__"]
295+
296+
return filename
297+
294298

295299
def set_package_logger(
296300
level: Union[str, int] = logging.DEBUG, stream: sys.stdout = None, formatter: logging.Formatter = None

tests/functional/test_aws_lambda_logging.py

+54
Original file line numberDiff line numberDiff line change
@@ -116,3 +116,57 @@ class Unserializable:
116116
# THEN json_default should not be in the log message and the custom unserializable handler should be used
117117
assert log_dict["message"]["x"] == "<non-serializable: Unserializable>"
118118
assert "json_default" not in log_dict
119+
120+
121+
def test_log_dict_key_seq(stdout):
122+
# GIVEN the default logger configuration
123+
logger = Logger(stream=stdout)
124+
125+
# WHEN logging a message
126+
logger.info("Message")
127+
128+
log_dict: dict = json.loads(stdout.getvalue())
129+
130+
# THEN the beginning key sequence must be `level,location,message,timestamp`
131+
assert ",".join(list(log_dict.keys())[:4]) == "level,location,message,timestamp"
132+
133+
134+
def test_log_dict_key_custom_seq(stdout):
135+
# GIVEN a logger configuration with format_keys set to ["message"]
136+
logger = Logger(stream=stdout, log_record_order=["message"])
137+
138+
# WHEN logging a message
139+
logger.info("Message")
140+
141+
log_dict: dict = json.loads(stdout.getvalue())
142+
143+
# THEN the first key should be "message"
144+
assert list(log_dict.keys())[0] == "message"
145+
146+
147+
def test_log_custom_formatting(stdout):
148+
# GIVEN a logger where we have a custom `location`, 'datefmt' format
149+
logger = Logger(stream=stdout, location="[%(funcName)s] %(module)s", datefmt="fake-datefmt")
150+
151+
# WHEN logging a message
152+
logger.info("foo")
153+
154+
log_dict: dict = json.loads(stdout.getvalue())
155+
156+
# THEN the `location` and "timestamp" should match the formatting
157+
assert log_dict["location"] == "[test_log_custom_formatting] test_aws_lambda_logging"
158+
assert log_dict["timestamp"] == "fake-datefmt"
159+
160+
161+
def test_log_dict_key_strip_nones(stdout):
162+
# GIVEN a logger confirmation where we set `location` and `timestamp` to None
163+
# Note: level, sampling_rate and service can not be suppressed
164+
logger = Logger(stream=stdout, level=None, location=None, timestamp=None, sampling_rate=None, service=None)
165+
166+
# WHEN logging a message
167+
logger.info("foo")
168+
169+
log_dict: dict = json.loads(stdout.getvalue())
170+
171+
# THEN the keys should only include `level`, `message`, `service`, `sampling_rate`
172+
assert sorted(log_dict.keys()) == ["level", "message", "sampling_rate", "service"]

0 commit comments

Comments
 (0)