Skip to content

Commit df75e42

Browse files
fix(logger): correctly pick powertools or custom handler in custom environments (#6083)
* Fixing Powertools logger handler * Fixing Powertools logger handler * Make code check tool happy
1 parent 4ad37b5 commit df75e42

File tree

6 files changed

+176
-32
lines changed

6 files changed

+176
-32
lines changed

aws_lambda_powertools/logging/exceptions.py

+8
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,11 @@ class InvalidLoggerSamplingRateError(Exception):
44
"""
55

66
pass
7+
8+
9+
class OrphanedChildLoggerError(Exception):
10+
"""
11+
Orphaned Child logger exception
12+
"""
13+
14+
pass

aws_lambda_powertools/logging/logger.py

+34-6
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,14 @@
1414
import sys
1515
import warnings
1616
from contextlib import contextmanager
17-
from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Iterable, Mapping, TypeVar, overload
17+
from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Iterable, Mapping, TypeVar, cast, overload
1818

1919
from aws_lambda_powertools.logging.constants import (
20+
LOGGER_ATTRIBUTE_HANDLER,
21+
LOGGER_ATTRIBUTE_POWERTOOLS_HANDLER,
2022
LOGGER_ATTRIBUTE_PRECONFIGURED,
2123
)
22-
from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError
24+
from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError, OrphanedChildLoggerError
2325
from aws_lambda_powertools.logging.filters import SuppressFilter
2426
from aws_lambda_powertools.logging.formatter import (
2527
RESERVED_FORMATTER_CUSTOM_KEYS,
@@ -230,13 +232,14 @@ def __init__(
230232
self.child = child
231233
self.logger_formatter = logger_formatter
232234
self._stream = stream or sys.stdout
233-
self.logger_handler = logger_handler or logging.StreamHandler(self._stream)
235+
234236
self.log_uncaught_exceptions = log_uncaught_exceptions
235237

236238
self._is_deduplication_disabled = resolve_truthy_env_var_choice(
237239
env=os.getenv(constants.LOGGER_LOG_DEDUPLICATION_ENV, "false"),
238240
)
239241
self._logger = self._get_logger()
242+
self.logger_handler = logger_handler or self._get_handler()
240243

241244
# NOTE: This is primarily to improve UX, so IDEs can autocomplete LambdaPowertoolsFormatter options
242245
# previously, we masked all of them as kwargs thus limiting feature discovery
@@ -275,6 +278,23 @@ def _get_logger(self) -> logging.Logger:
275278

276279
return logging.getLogger(logger_name)
277280

281+
def _get_handler(self) -> logging.Handler:
282+
# is a logger handler already configured?
283+
if getattr(self, LOGGER_ATTRIBUTE_HANDLER, None):
284+
return self.logger_handler
285+
286+
# Detect Powertools logger by checking for unique handler
287+
# Retrieve the first handler if it's a Powertools instance
288+
if getattr(self._logger, "powertools_handler", None):
289+
return self._logger.handlers[0]
290+
291+
# for children, use parent's handler
292+
if self.child:
293+
return getattr(self._logger.parent, LOGGER_ATTRIBUTE_POWERTOOLS_HANDLER, None) # type: ignore[return-value] # always checked in formatting
294+
295+
# otherwise, create a new stream handler (first time init)
296+
return logging.StreamHandler(self._stream)
297+
278298
def _init_logger(
279299
self,
280300
formatter_options: dict | None = None,
@@ -317,6 +337,7 @@ def _init_logger(
317337
# std logging will return the same Logger with our attribute if name is reused
318338
logger.debug(f"Marking logger {self.service} as preconfigured")
319339
self._logger.init = True # type: ignore[attr-defined]
340+
self._logger.powertools_handler = self.logger_handler # type: ignore[attr-defined]
320341

321342
def _configure_sampling(self) -> None:
322343
"""Dynamically set log level based on sampling rate
@@ -723,13 +744,20 @@ def registered_handler(self) -> logging.Handler:
723744
"""Convenience property to access the first logger handler"""
724745
# We ignore mypy here because self.child encodes whether or not self._logger.parent is
725746
# None, mypy can't see this from context but we can
726-
handlers = self._logger.parent.handlers if self.child else self._logger.handlers # type: ignore[union-attr]
727-
return handlers[0]
747+
return self._get_handler()
728748

729749
@property
730750
def registered_formatter(self) -> BasePowertoolsFormatter:
731751
"""Convenience property to access the first logger formatter"""
732-
return self.registered_handler.formatter # type: ignore[return-value]
752+
handler = self.registered_handler
753+
if handler is None:
754+
raise OrphanedChildLoggerError(
755+
"Orphan child loggers cannot append nor remove keys until a parent is initialized first. "
756+
"To solve this issue, you can A) make sure a parent logger is initialized first, or B) move append/remove keys operations to a later stage." # noqa: E501
757+
"Reference: https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#reusing-logger-across-your-code",
758+
)
759+
760+
return cast(BasePowertoolsFormatter, handler.formatter)
733761

734762
@property
735763
def log_level(self) -> int:

docs/core/logger.md

+2-25
Original file line numberDiff line numberDiff line change
@@ -722,31 +722,9 @@ stateDiagram-v2
722722
```
723723
</center>
724724

725-
> Python Logging hierarchy happens via the dot notation: `service`, `service.child`, `service.child_2`
726-
For inheritance, Logger uses a `child=True` parameter along with `service` being the same value across Loggers.
725+
For inheritance, Logger uses `child` parameter to ensure we don't compete with its parents config. We name child Loggers following Python's convention: _`{service}`.`{filename}`_.
727726

728-
For child Loggers, we introspect the name of your module where `Logger(child=True, service="name")` is called, and we name your Logger as **{service}.{filename}**.
729-
730-
???+ danger
731-
A common issue when migrating from other Loggers is that `service` might be defined in the parent Logger (no child param), and not defined in the child Logger:
732-
733-
=== "logging_inheritance_bad.py"
734-
735-
```python hl_lines="1 9"
736-
--8<-- "examples/logger/src/logging_inheritance_bad.py"
737-
```
738-
739-
=== "logging_inheritance_module.py"
740-
```python hl_lines="1 9"
741-
--8<-- "examples/logger/src/logging_inheritance_module.py"
742-
```
743-
744-
In this case, Logger will register a Logger named `payment`, and a Logger named `service_undefined`. The latter isn't inheriting from the parent, and will have no handler, resulting in no message being logged to standard output.
745-
746-
???+ tip
747-
This can be fixed by either ensuring both has the `service` value as `payment`, or simply use the environment variable `POWERTOOLS_SERVICE_NAME` to ensure service value will be the same across all Loggers when not explicitly set.
748-
749-
Do this instead:
727+
Changes are bidirectional between parents and loggers. That is, appending a key in a child or parent will ensure both have them. This means, having the same `service` name is important when instantiating them.
750728

751729
=== "logging_inheritance_good.py"
752730

@@ -773,7 +751,6 @@ There are two important side effects when using child loggers:
773751
```
774752

775753
=== "logging_inheritance_module.py"
776-
777754
```python hl_lines="1 9"
778755
--8<-- "examples/logger/src/logging_inheritance_module.py"
779756
```
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from aws_lambda_powertools import Logger
2+
3+
# Instance 1
4+
logger = Logger()
5+
6+
# Simulating importing from another file
7+
logger = Logger()
8+
9+
10+
@logger.inject_lambda_context
11+
def lambda_handler(event, context):
12+
message, append_keys = event.get("message", ""), event.get("append_keys", {})
13+
logger.append_keys(**append_keys)
14+
logger.info(message)
15+
return "success"

tests/e2e/logger/test_logger.py

+35
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,16 @@ def tz_handler_fn_arn(infrastructure: dict) -> str:
2929
return infrastructure.get("TzHandlerArn", "")
3030

3131

32+
@pytest.fixture
33+
def multiple_logger_instances_fn(infrastructure: dict) -> str:
34+
return infrastructure.get("MultipleLoggerInstances", "")
35+
36+
37+
@pytest.fixture
38+
def multiple_logger_instances_arn(infrastructure: dict) -> str:
39+
return infrastructure.get("MultipleLoggerInstancesArn", "")
40+
41+
3242
@pytest.mark.xdist_group(name="logger")
3343
def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn):
3444
# GIVEN
@@ -50,6 +60,31 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn):
5060
assert logs.have_keys(*LOGGER_LAMBDA_CONTEXT_KEYS) is True
5161

5262

63+
@pytest.mark.xdist_group(name="logger")
64+
def test_multiple_logger_instances(multiple_logger_instances_fn, multiple_logger_instances_arn):
65+
# GIVEN
66+
message = "logs should be visible with default settings"
67+
custom_key = "order_id"
68+
additional_keys = {custom_key: f"{uuid4()}"}
69+
payload = json.dumps({"message": message, "append_keys": additional_keys})
70+
71+
# WHEN
72+
_, execution_time = data_fetcher.get_lambda_response(lambda_arn=multiple_logger_instances_arn, payload=payload)
73+
data_fetcher.get_lambda_response(lambda_arn=multiple_logger_instances_arn, payload=payload)
74+
75+
# THEN
76+
logs = data_fetcher.get_logs(
77+
function_name=multiple_logger_instances_fn,
78+
start_time=execution_time,
79+
minimum_log_entries=2,
80+
)
81+
82+
assert len(logs) == 2
83+
assert len(logs.get_cold_start_log()) == 1
84+
assert len(logs.get_log(key=custom_key)) == 2
85+
assert logs.have_keys(*LOGGER_LAMBDA_CONTEXT_KEYS) is True
86+
87+
5388
@pytest.mark.xdist_group(name="logger")
5489
@pytest.mark.parametrize("tz", ["US/Eastern", "UTC", "Asia/Shanghai"])
5590
@pytest.mark.parametrize("datefmt", ["%z", None])

tests/functional/logger/required_dependencies/test_logger.py

+82-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from aws_lambda_powertools import Logger
1818
from aws_lambda_powertools.logging import correlation_paths
19-
from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError
19+
from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError, OrphanedChildLoggerError
2020
from aws_lambda_powertools.logging.formatter import (
2121
BasePowertoolsFormatter,
2222
LambdaPowertoolsFormatter,
@@ -1286,3 +1286,84 @@ def test_clear_state_log_output(stdout, service_name):
12861286
assert "custom_key" in first_log
12871287
assert first_log["custom_key"] == "test_value"
12881288
assert "custom_key" not in second_log
1289+
1290+
1291+
def test_logger_registered_handler_is_custom_handler(service_name):
1292+
# GIVEN a library or environment pre-setup a logger for us using the same name (see #4277)
1293+
class ForeignHandler(logging.StreamHandler): ...
1294+
1295+
foreign_handler = ForeignHandler()
1296+
logging.getLogger(service_name).addHandler(foreign_handler)
1297+
1298+
# WHEN Logger init with a custom handler
1299+
custom_handler = logging.StreamHandler()
1300+
logger = Logger(service=service_name, logger_handler=custom_handler)
1301+
1302+
# THEN registered handler should always return what we provided
1303+
assert logger.registered_handler is not foreign_handler
1304+
assert logger.registered_handler is custom_handler
1305+
assert logger.logger_handler is custom_handler
1306+
assert logger.handlers == [foreign_handler, custom_handler]
1307+
1308+
1309+
def test_child_logger_registered_handler_is_custom_handler(service_name):
1310+
# GIVEN
1311+
class ForeignHandler(logging.StreamHandler): ...
1312+
1313+
foreign_handler = ForeignHandler()
1314+
logging.getLogger(service_name).addHandler(foreign_handler)
1315+
1316+
custom_handler = logging.StreamHandler()
1317+
custom_handler.name = "CUSTOM HANDLER"
1318+
parent = Logger(service=service_name, logger_handler=custom_handler)
1319+
1320+
# WHEN a child Logger init
1321+
child = Logger(service=service_name, child=True)
1322+
1323+
# THEN child registered handler should always return what we provided in the parent
1324+
assert child.registered_handler is not foreign_handler
1325+
assert child.registered_handler is custom_handler
1326+
assert child.registered_handler is parent.registered_handler
1327+
1328+
1329+
def test_logger_handler_is_created_despite_env_pre_setup(service_name):
1330+
# GIVEN a library or environment pre-setup a logger for us using the same name
1331+
environment_handler = logging.StreamHandler()
1332+
logging.getLogger(service_name).addHandler(environment_handler)
1333+
1334+
# WHEN Logger init without a custom handler
1335+
logger = Logger(service=service_name)
1336+
1337+
# THEN registered handler should be Powertools default handler, not env
1338+
assert logger.registered_handler is not environment_handler
1339+
1340+
1341+
def test_child_logger_append_keys_before_parent(stdout, service_name):
1342+
# GIVEN a child Logger is initialized before its/without parent
1343+
child = Logger(stream=stdout, service=service_name, child=True)
1344+
1345+
# WHEN a child Logger appends a key
1346+
# THEN it will raise an AttributeError
1347+
with pytest.raises(OrphanedChildLoggerError):
1348+
child.append_keys(customer_id="value")
1349+
1350+
1351+
def test_powertools_logger_handler_is_created_only_once_and_propagated(lambda_context, stdout, service_name):
1352+
# GIVEN an instance of Logger
1353+
logger = Logger(service=service_name, stream=stdout)
1354+
request_id = "xxx-111-222"
1355+
mock_event = {"requestContext": {"requestId": request_id}}
1356+
1357+
# GIVEN another instance of Logger to mimic importing from another file
1358+
logger = Logger(service=service_name, stream=stdout)
1359+
1360+
# WHEN we use inject_lambda_context
1361+
@logger.inject_lambda_context(correlation_id_path=correlation_paths.API_GATEWAY_REST)
1362+
def handler(event, context):
1363+
logger.info("Foo")
1364+
1365+
handler(mock_event, lambda_context)
1366+
1367+
# THEN we must be able to inject context
1368+
log = capture_logging_output(stdout)
1369+
assert request_id == log["correlation_id"]

0 commit comments

Comments
 (0)