Skip to content

Commit bdb3925

Browse files
authored
Merge pull request #250 from heitorlessa/fix/#249
fix: prevent touching preconfigured loggers #249
2 parents 835789e + f33cffe commit bdb3925

File tree

4 files changed

+128
-95
lines changed

4 files changed

+128
-95
lines changed

CHANGELOG.md

+3
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
77
## [Unreleased]
88
- **Docs**: Add clarification to Tracer docs for how `capture_method` decorator can cause function responses to be read and serialized.
99

10+
### Fixed
11+
- **Logger**: Bugfix to prevent parent loggers with the same name being configured more than once
12+
1013
## [1.9.0] - 2020-12-04
1114

1215
### Added

aws_lambda_powertools/logging/logger.py

+26-15
Original file line numberDiff line numberDiff line change
@@ -148,21 +148,32 @@ def _get_logger(self):
148148
def _init_logger(self, **kwargs):
149149
"""Configures new logger"""
150150

151-
# Skip configuration if it's a child logger to prevent
152-
# multiple handlers being attached as well as different sampling mechanisms
153-
# and multiple messages from being logged as handlers can be duplicated
154-
if not self.child:
155-
self._configure_sampling()
156-
self._logger.setLevel(self.log_level)
157-
self._logger.addHandler(self._handler)
158-
self.structure_logs(**kwargs)
159-
160-
logger.debug("Adding filter in root logger to suppress child logger records to bubble up")
161-
for handler in logging.root.handlers:
162-
# It'll add a filter to suppress any child logger from self.service
163-
# Where service is Order, it'll reject parent logger Order,
164-
# and child loggers such as Order.checkout, Order.shared
165-
handler.addFilter(SuppressFilter(self.service))
151+
# Skip configuration if it's a child logger or a pre-configured logger
152+
# to prevent the following:
153+
# a) multiple handlers being attached
154+
# b) different sampling mechanisms
155+
# c) multiple messages from being logged as handlers can be duplicated
156+
is_logger_preconfigured = getattr(self._logger, "init", False)
157+
if self.child or is_logger_preconfigured:
158+
return
159+
160+
self._configure_sampling()
161+
self._logger.setLevel(self.log_level)
162+
self._logger.addHandler(self._handler)
163+
self.structure_logs(**kwargs)
164+
165+
logger.debug("Adding filter in root logger to suppress child logger records to bubble up")
166+
for handler in logging.root.handlers:
167+
# It'll add a filter to suppress any child logger from self.service
168+
# Where service is Order, it'll reject parent logger Order,
169+
# and child loggers such as Order.checkout, Order.shared
170+
handler.addFilter(SuppressFilter(self.service))
171+
172+
# as per bug in #249, we should not be pre-configuring an existing logger
173+
# therefore we set a custom attribute in the Logger that will be returned
174+
# std logging will return the same Logger with our attribute if name is reused
175+
logger.debug(f"Marking logger {self.service} as preconfigured")
176+
self._logger.init = True
166177

167178
def _configure_sampling(self):
168179
"""Dynamically set log level based on sampling rate

tests/functional/test_aws_lambda_logging.py

+41-27
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
"""aws_lambda_logging tests."""
22
import io
33
import json
4+
import random
5+
import string
46

57
import pytest
68

@@ -12,9 +14,15 @@ def stdout():
1214
return io.StringIO()
1315

1416

17+
@pytest.fixture
18+
def service_name():
19+
chars = string.ascii_letters + string.digits
20+
return "".join(random.SystemRandom().choice(chars) for _ in range(15))
21+
22+
1523
@pytest.mark.parametrize("level", ["DEBUG", "WARNING", "ERROR", "INFO", "CRITICAL"])
16-
def test_setup_with_valid_log_levels(stdout, level):
17-
logger = Logger(level=level, stream=stdout, request_id="request id!", another="value")
24+
def test_setup_with_valid_log_levels(stdout, level, service_name):
25+
logger = Logger(service=service_name, level=level, stream=stdout, request_id="request id!", another="value")
1826
msg = "This is a test"
1927
log_command = {
2028
"INFO": logger.info,
@@ -37,8 +45,8 @@ def test_setup_with_valid_log_levels(stdout, level):
3745
assert "exception" not in log_dict
3846

3947

40-
def test_logging_exception_traceback(stdout):
41-
logger = Logger(level="DEBUG", stream=stdout)
48+
def test_logging_exception_traceback(stdout, service_name):
49+
logger = Logger(service=service_name, level="DEBUG", stream=stdout)
4250

4351
try:
4452
raise ValueError("Boom")
@@ -52,9 +60,9 @@ def test_logging_exception_traceback(stdout):
5260
assert "exception" in log_dict
5361

5462

55-
def test_setup_with_invalid_log_level(stdout):
63+
def test_setup_with_invalid_log_level(stdout, service_name):
5664
with pytest.raises(ValueError) as e:
57-
Logger(level="not a valid log level")
65+
Logger(service=service_name, level="not a valid log level")
5866
assert "Unknown level" in e.value.args[0]
5967

6068

@@ -65,8 +73,8 @@ def check_log_dict(log_dict):
6573
assert "message" in log_dict
6674

6775

68-
def test_with_dict_message(stdout):
69-
logger = Logger(level="DEBUG", stream=stdout)
76+
def test_with_dict_message(stdout, service_name):
77+
logger = Logger(service=service_name, level="DEBUG", stream=stdout)
7078

7179
msg = {"x": "isx"}
7280
logger.critical(msg)
@@ -76,8 +84,8 @@ def test_with_dict_message(stdout):
7684
assert msg == log_dict["message"]
7785

7886

79-
def test_with_json_message(stdout):
80-
logger = Logger(stream=stdout)
87+
def test_with_json_message(stdout, service_name):
88+
logger = Logger(service=service_name, stream=stdout)
8189

8290
msg = {"x": "isx"}
8391
logger.info(json.dumps(msg))
@@ -87,8 +95,8 @@ def test_with_json_message(stdout):
8795
assert msg == log_dict["message"]
8896

8997

90-
def test_with_unserializable_value_in_message(stdout):
91-
logger = Logger(level="DEBUG", stream=stdout)
98+
def test_with_unserializable_value_in_message(stdout, service_name):
99+
logger = Logger(service=service_name, level="DEBUG", stream=stdout)
92100

93101
class Unserializable:
94102
pass
@@ -101,12 +109,17 @@ class Unserializable:
101109
assert log_dict["message"]["x"].startswith("<")
102110

103111

104-
def test_with_unserializable_value_in_message_custom(stdout):
112+
def test_with_unserializable_value_in_message_custom(stdout, service_name):
105113
class Unserializable:
106114
pass
107115

108116
# GIVEN a custom json_default
109-
logger = Logger(level="DEBUG", stream=stdout, json_default=lambda o: f"<non-serializable: {type(o).__name__}>")
117+
logger = Logger(
118+
service=service_name,
119+
level="DEBUG",
120+
stream=stdout,
121+
json_default=lambda o: f"<non-serializable: {type(o).__name__}>",
122+
)
110123

111124
# WHEN we log a message
112125
logger.debug({"x": Unserializable()})
@@ -118,9 +131,9 @@ class Unserializable:
118131
assert "json_default" not in log_dict
119132

120133

121-
def test_log_dict_key_seq(stdout):
134+
def test_log_dict_key_seq(stdout, service_name):
122135
# GIVEN the default logger configuration
123-
logger = Logger(stream=stdout)
136+
logger = Logger(service=service_name, stream=stdout)
124137

125138
# WHEN logging a message
126139
logger.info("Message")
@@ -131,9 +144,9 @@ def test_log_dict_key_seq(stdout):
131144
assert ",".join(list(log_dict.keys())[:4]) == "level,location,message,timestamp"
132145

133146

134-
def test_log_dict_key_custom_seq(stdout):
147+
def test_log_dict_key_custom_seq(stdout, service_name):
135148
# GIVEN a logger configuration with log_record_order set to ["message"]
136-
logger = Logger(stream=stdout, log_record_order=["message"])
149+
logger = Logger(service=service_name, stream=stdout, log_record_order=["message"])
137150

138151
# WHEN logging a message
139152
logger.info("Message")
@@ -144,9 +157,9 @@ def test_log_dict_key_custom_seq(stdout):
144157
assert list(log_dict.keys())[0] == "message"
145158

146159

147-
def test_log_custom_formatting(stdout):
160+
def test_log_custom_formatting(stdout, service_name):
148161
# GIVEN a logger where we have a custom `location`, 'datefmt' format
149-
logger = Logger(stream=stdout, location="[%(funcName)s] %(module)s", datefmt="fake-datefmt")
162+
logger = Logger(service=service_name, stream=stdout, location="[%(funcName)s] %(module)s", datefmt="fake-datefmt")
150163

151164
# WHEN logging a message
152165
logger.info("foo")
@@ -158,7 +171,7 @@ def test_log_custom_formatting(stdout):
158171
assert log_dict["timestamp"] == "fake-datefmt"
159172

160173

161-
def test_log_dict_key_strip_nones(stdout):
174+
def test_log_dict_key_strip_nones(stdout, service_name):
162175
# GIVEN a logger confirmation where we set `location` and `timestamp` to None
163176
# Note: level, sampling_rate and service can not be suppressed
164177
logger = Logger(stream=stdout, level=None, location=None, timestamp=None, sampling_rate=None, service=None)
@@ -170,14 +183,15 @@ def test_log_dict_key_strip_nones(stdout):
170183

171184
# THEN the keys should only include `level`, `message`, `service`, `sampling_rate`
172185
assert sorted(log_dict.keys()) == ["level", "message", "sampling_rate", "service"]
186+
assert log_dict["service"] == "service_undefined"
173187

174188

175-
def test_log_dict_xray_is_present_when_tracing_is_enabled(stdout, monkeypatch):
189+
def test_log_dict_xray_is_present_when_tracing_is_enabled(stdout, monkeypatch, service_name):
176190
# GIVEN a logger is initialized within a Lambda function with X-Ray enabled
177191
trace_id = "1-5759e988-bd862e3fe1be46a994272793"
178192
trace_header = f"Root={trace_id};Parent=53995c3f42cd8ad8;Sampled=1"
179193
monkeypatch.setenv(name="_X_AMZN_TRACE_ID", value=trace_header)
180-
logger = Logger(stream=stdout)
194+
logger = Logger(service=service_name, stream=stdout)
181195

182196
# WHEN logging a message
183197
logger.info("foo")
@@ -190,9 +204,9 @@ def test_log_dict_xray_is_present_when_tracing_is_enabled(stdout, monkeypatch):
190204
monkeypatch.delenv(name="_X_AMZN_TRACE_ID")
191205

192206

193-
def test_log_dict_xray_is_not_present_when_tracing_is_disabled(stdout, monkeypatch):
207+
def test_log_dict_xray_is_not_present_when_tracing_is_disabled(stdout, monkeypatch, service_name):
194208
# GIVEN a logger is initialized within a Lambda function with X-Ray disabled (default)
195-
logger = Logger(stream=stdout)
209+
logger = Logger(service=service_name, stream=stdout)
196210

197211
# WHEN logging a message
198212
logger.info("foo")
@@ -203,12 +217,12 @@ def test_log_dict_xray_is_not_present_when_tracing_is_disabled(stdout, monkeypat
203217
assert "xray_trace_id" not in log_dict
204218

205219

206-
def test_log_dict_xray_is_updated_when_tracing_id_changes(stdout, monkeypatch):
220+
def test_log_dict_xray_is_updated_when_tracing_id_changes(stdout, monkeypatch, service_name):
207221
# GIVEN a logger is initialized within a Lambda function with X-Ray enabled
208222
trace_id = "1-5759e988-bd862e3fe1be46a994272793"
209223
trace_header = f"Root={trace_id};Parent=53995c3f42cd8ad8;Sampled=1"
210224
monkeypatch.setenv(name="_X_AMZN_TRACE_ID", value=trace_header)
211-
logger = Logger(stream=stdout)
225+
logger = Logger(service=service_name, stream=stdout)
212226

213227
# WHEN logging a message
214228
logger.info("foo")

0 commit comments

Comments
 (0)