|
7 | 7 |
|
8 | 8 | from aws_lambda_powertools import Logger, Tracer
|
9 | 9 | from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError
|
10 |
| -from aws_lambda_powertools.logging.logger import JsonFormatter, set_package_logger |
| 10 | +from aws_lambda_powertools.logging.logger import set_package_logger |
11 | 11 |
|
12 | 12 |
|
13 | 13 | @pytest.fixture
|
@@ -39,227 +39,220 @@ def lambda_context():
|
39 | 39 | return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values())
|
40 | 40 |
|
41 | 41 |
|
| 42 | +@pytest.fixture |
| 43 | +def lambda_event(): |
| 44 | + return {"greeting": "hello"} |
| 45 | + |
| 46 | + |
| 47 | +def capture_logging_output(stdout): |
| 48 | + return json.loads(stdout.getvalue()) |
| 49 | + |
| 50 | + |
| 51 | +def capture_multiple_logging_statements_output(stdout): |
| 52 | + return [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] |
| 53 | + |
| 54 | + |
42 | 55 | def test_setup_service_name(root_logger, stdout):
|
43 |
| - # GIVEN service is explicitly defined |
44 |
| - # WHEN logger is setup |
45 |
| - # THEN service field should be equals service given |
46 | 56 | service_name = "payment"
|
| 57 | + # GIVEN Logger is initialized |
| 58 | + # WHEN service is explicitly defined |
47 | 59 | logger = Logger(service=service_name, stream=stdout)
|
48 | 60 |
|
49 | 61 | logger.info("Hello")
|
50 |
| - log = json.loads(stdout.getvalue()) |
51 | 62 |
|
| 63 | + # THEN service field should be equals service given |
| 64 | + log = capture_logging_output(stdout) |
52 | 65 | assert service_name == log["service"]
|
53 | 66 |
|
54 | 67 |
|
55 | 68 | def test_setup_no_service_name(stdout):
|
56 |
| - # GIVEN no service is explicitly defined |
57 |
| - # WHEN logger is setup |
58 |
| - # THEN service field should be "service_undefined" |
| 69 | + # GIVEN Logger is initialized |
| 70 | + # WHEN no service is explicitly defined |
59 | 71 | logger = Logger(stream=stdout)
|
| 72 | + |
60 | 73 | logger.info("Hello")
|
61 |
| - log = json.loads(stdout.getvalue()) |
62 | 74 |
|
| 75 | + # THEN service field should be "service_undefined" |
| 76 | + log = capture_logging_output(stdout) |
63 | 77 | assert "service_undefined" == log["service"]
|
64 | 78 |
|
65 | 79 |
|
66 | 80 | def test_setup_service_env_var(monkeypatch, stdout):
|
67 |
| - # GIVEN service is explicitly defined via POWERTOOLS_SERVICE_NAME env |
68 |
| - # WHEN logger is setup |
69 |
| - # THEN service field should be equals POWERTOOLS_SERVICE_NAME value |
70 | 81 | service_name = "payment"
|
| 82 | + # GIVEN Logger is initialized |
| 83 | + # WHEN service is explicitly defined via POWERTOOLS_SERVICE_NAME env |
71 | 84 | monkeypatch.setenv("POWERTOOLS_SERVICE_NAME", service_name)
|
72 |
| - |
73 | 85 | logger = Logger(stream=stdout)
|
| 86 | + |
74 | 87 | logger.info("Hello")
|
75 |
| - log = json.loads(stdout.getvalue()) |
76 | 88 |
|
| 89 | + # THEN service field should be equals POWERTOOLS_SERVICE_NAME value |
| 90 | + log = capture_logging_output(stdout) |
77 | 91 | assert service_name == log["service"]
|
78 | 92 |
|
79 | 93 |
|
80 |
| -def test_setup_sampling_rate(monkeypatch, stdout): |
81 |
| - # GIVEN samping rate is explicitly defined via POWERTOOLS_LOGGER_SAMPLE_RATE env |
82 |
| - # WHEN logger is setup |
83 |
| - # THEN sampling rate should be equals POWERTOOLS_LOGGER_SAMPLE_RATE value and should sample debug logs |
84 |
| - |
| 94 | +def test_setup_sampling_rate_env_var(monkeypatch, stdout): |
| 95 | + # GIVEN Logger is initialized |
| 96 | + # WHEN samping rate is explicitly set to 100% via POWERTOOLS_LOGGER_SAMPLE_RATE env |
85 | 97 | sampling_rate = "1"
|
86 | 98 | monkeypatch.setenv("POWERTOOLS_LOGGER_SAMPLE_RATE", sampling_rate)
|
87 |
| - monkeypatch.setenv("LOG_LEVEL", "INFO") |
88 |
| - |
89 |
| - logger = Logger(stream=stdout) |
| 99 | + logger = Logger(stream=stdout, level="INFO") |
90 | 100 | logger.debug("I am being sampled")
|
91 |
| - log = json.loads(stdout.getvalue()) |
92 | 101 |
|
| 102 | + # THEN sampling rate should be equals POWERTOOLS_LOGGER_SAMPLE_RATE value |
| 103 | + # log level should be DEBUG |
| 104 | + # and debug log statements should be in stdout |
| 105 | + log = capture_logging_output(stdout) |
93 | 106 | assert sampling_rate == log["sampling_rate"]
|
94 | 107 | assert "DEBUG" == log["level"]
|
95 | 108 | assert "I am being sampled" == log["message"]
|
96 | 109 |
|
97 | 110 |
|
98 | 111 | def test_inject_lambda_context(lambda_context, stdout):
|
99 |
| - # GIVEN a lambda function is decorated with logger |
100 |
| - # WHEN logger is setup |
101 |
| - # THEN lambda contextual info should always be in the logs |
102 |
| - logger_context_keys = ( |
103 |
| - "function_name", |
104 |
| - "function_memory_size", |
105 |
| - "function_arn", |
106 |
| - "function_request_id", |
107 |
| - ) |
108 |
| - |
| 112 | + # GIVEN Logger is initialized |
109 | 113 | logger = Logger(stream=stdout)
|
110 | 114 |
|
| 115 | + # WHEN a lambda function is decorated with logger |
111 | 116 | @logger.inject_lambda_context
|
112 | 117 | def handler(event, context):
|
113 | 118 | logger.info("Hello")
|
114 | 119 |
|
115 | 120 | handler({}, lambda_context)
|
116 | 121 |
|
117 |
| - log = json.loads(stdout.getvalue()) |
118 |
| - |
119 |
| - for key in logger_context_keys: |
| 122 | + # THEN lambda contextual info should always be in the logs |
| 123 | + log = capture_logging_output(stdout) |
| 124 | + expected_logger_context_keys = ( |
| 125 | + "function_name", |
| 126 | + "function_memory_size", |
| 127 | + "function_arn", |
| 128 | + "function_request_id", |
| 129 | + ) |
| 130 | + for key in expected_logger_context_keys: |
120 | 131 | assert key in log
|
121 | 132 |
|
122 | 133 |
|
123 |
| -def test_inject_lambda_context_log_event_request(lambda_context, stdout): |
124 |
| - # GIVEN a lambda function is decorated with logger instructed to log event |
125 |
| - # WHEN logger is setup |
126 |
| - # THEN logger should log event received from Lambda |
127 |
| - lambda_event = {"greeting": "hello"} |
128 |
| - |
| 134 | +def test_inject_lambda_context_log_event_request(lambda_context, stdout, lambda_event): |
| 135 | + # GIVEN Logger is initialized |
129 | 136 | logger = Logger(stream=stdout)
|
130 | 137 |
|
| 138 | + # WHEN a lambda function is decorated with logger instructed to log event |
131 | 139 | @logger.inject_lambda_context(log_event=True)
|
132 |
| - # @logger.inject_lambda_context(log_event=True) |
133 | 140 | def handler(event, context):
|
134 | 141 | logger.info("Hello")
|
135 | 142 |
|
136 | 143 | handler(lambda_event, lambda_context)
|
137 | 144 |
|
138 |
| - # Given that our string buffer has many log statements separated by newline \n |
139 |
| - # We need to clean it before we can assert on |
140 |
| - logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] |
141 |
| - logged_event, _ = logs |
142 |
| - assert "greeting" in logged_event["message"] |
| 145 | + # THEN logger should log event received from Lambda |
| 146 | + logged_event, _ = capture_multiple_logging_statements_output(stdout) |
| 147 | + assert logged_event["message"] == lambda_event |
143 | 148 |
|
144 | 149 |
|
145 |
| -def test_inject_lambda_context_log_event_request_env_var(monkeypatch, lambda_context, stdout): |
146 |
| - # GIVEN a lambda function is decorated with logger instructed to log event |
147 |
| - # via POWERTOOLS_LOGGER_LOG_EVENT env |
148 |
| - # WHEN logger is setup |
149 |
| - # THEN logger should log event received from Lambda |
150 |
| - lambda_event = {"greeting": "hello"} |
| 150 | +def test_inject_lambda_context_log_event_request_env_var(monkeypatch, lambda_context, stdout, lambda_event): |
| 151 | + # GIVEN Logger is initialized |
151 | 152 | monkeypatch.setenv("POWERTOOLS_LOGGER_LOG_EVENT", "true")
|
152 |
| - |
153 | 153 | logger = Logger(stream=stdout)
|
154 | 154 |
|
| 155 | + # WHEN a lambda function is decorated with logger instructed to log event |
| 156 | + # via POWERTOOLS_LOGGER_LOG_EVENT env |
155 | 157 | @logger.inject_lambda_context
|
156 | 158 | def handler(event, context):
|
157 | 159 | logger.info("Hello")
|
158 | 160 |
|
159 | 161 | handler(lambda_event, lambda_context)
|
160 | 162 |
|
161 |
| - # Given that our string buffer has many log statements separated by newline \n |
162 |
| - # We need to clean it before we can assert on |
163 |
| - logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] |
164 |
| - |
165 |
| - event = {} |
166 |
| - for log in logs: |
167 |
| - if "greeting" in log["message"]: |
168 |
| - event = log["message"] |
169 |
| - |
170 |
| - assert event == lambda_event |
171 |
| - |
| 163 | + # THEN logger should log event received from Lambda |
| 164 | + logged_event, _ = capture_multiple_logging_statements_output(stdout) |
| 165 | + assert logged_event["message"] == lambda_event |
172 | 166 |
|
173 |
| -def test_inject_lambda_context_log_no_request_by_default(monkeypatch, lambda_context, stdout): |
174 |
| - # GIVEN a lambda function is decorated with logger |
175 |
| - # WHEN logger is setup |
176 |
| - # THEN logger should not log event received by lambda handler |
177 |
| - lambda_event = {"greeting": "hello"} |
178 | 167 |
|
| 168 | +def test_inject_lambda_context_log_no_request_by_default(monkeypatch, lambda_context, stdout, lambda_event): |
| 169 | + # GIVEN Logger is initialized |
179 | 170 | logger = Logger(stream=stdout)
|
180 | 171 |
|
| 172 | + # WHEN a lambda function is decorated with logger |
181 | 173 | @logger.inject_lambda_context
|
182 | 174 | def handler(event, context):
|
183 | 175 | logger.info("Hello")
|
184 | 176 |
|
185 | 177 | handler(lambda_event, lambda_context)
|
186 | 178 |
|
187 |
| - # Given that our string buffer has many log statements separated by newline \n |
188 |
| - # We need to clean it before we can assert on |
189 |
| - logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] |
190 |
| - |
191 |
| - event = {} |
192 |
| - for log in logs: |
193 |
| - if "greeting" in log["message"]: |
194 |
| - event = log["message"] |
195 |
| - |
196 |
| - assert event != lambda_event |
| 179 | + # THEN logger should not log event received by lambda handler |
| 180 | + log = capture_logging_output(stdout) |
| 181 | + assert log["message"] != lambda_event |
197 | 182 |
|
198 | 183 |
|
199 | 184 | def test_inject_lambda_cold_start(lambda_context, stdout):
|
200 |
| - # GIVEN a lambda function is decorated with logger, and called twice |
201 |
| - # WHEN logger is setup |
202 |
| - # THEN cold_start key should only be true in the first call |
203 |
| - |
| 185 | + # cold_start can be false as it's a global variable in Logger module |
| 186 | + # so we reset it to simulate the correct behaviour |
| 187 | + # since Lambda will only import our logger lib once per concurrent execution |
204 | 188 | from aws_lambda_powertools.logging import logger
|
205 | 189 |
|
206 |
| - # # As we run tests in parallel global cold_start value can be false |
207 |
| - # # here we reset to simulate the correct behaviour |
208 |
| - # # since Lambda will only import our logger lib once per concurrent execution |
209 | 190 | logger.is_cold_start = True
|
210 | 191 |
|
| 192 | + # GIVEN Logger is initialized |
211 | 193 | logger = Logger(stream=stdout)
|
212 | 194 |
|
213 |
| - def custom_method(): |
214 |
| - logger.info("Hello from method") |
215 |
| - |
| 195 | + # WHEN a lambda function is decorated with logger, and called twice |
216 | 196 | @logger.inject_lambda_context
|
217 | 197 | def handler(event, context):
|
218 |
| - custom_method() |
219 | 198 | logger.info("Hello")
|
220 | 199 |
|
221 | 200 | handler({}, lambda_context)
|
222 | 201 | handler({}, lambda_context)
|
223 | 202 |
|
224 |
| - # Given that our string buffer has many log statements separated by newline \n |
225 |
| - # We need to clean it before we can assert on |
226 |
| - logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] |
227 |
| - first_log, second_log, third_log, fourth_log = logs |
228 |
| - |
229 |
| - # First execution |
| 203 | + # THEN cold_start key should only be true in the first call |
| 204 | + first_log, second_log = capture_multiple_logging_statements_output(stdout) |
230 | 205 | assert first_log["cold_start"] is True
|
231 |
| - assert second_log["cold_start"] is True |
232 |
| - |
233 |
| - # Second execution |
234 |
| - assert third_log["cold_start"] is False |
235 |
| - assert fourth_log["cold_start"] is False |
| 206 | + assert second_log["cold_start"] is False |
236 | 207 |
|
237 | 208 |
|
238 |
| -def test_package_logger(capsys): |
| 209 | +def test_package_logger_stream(stdout): |
| 210 | + # GIVEN package logger "aws_lambda_powertools" is explicitly set with no params |
| 211 | + set_package_logger(stream=stdout) |
239 | 212 |
|
240 |
| - set_package_logger() |
| 213 | + # WHEN Tracer is initialized in disabled mode |
241 | 214 | Tracer(disabled=True)
|
242 |
| - output = capsys.readouterr() |
243 | 215 |
|
244 |
| - assert "Tracing has been disabled" in output.out |
| 216 | + # THEN Tracer debug log statement should be logged |
| 217 | + output = stdout.getvalue() |
| 218 | + logger = logging.getLogger("aws_lambda_powertools") |
| 219 | + assert "Tracing has been disabled" in output |
| 220 | + assert logger.level == logging.DEBUG |
245 | 221 |
|
246 | 222 |
|
247 |
| -def test_package_logger_format(stdout, capsys): |
248 |
| - set_package_logger(stream=stdout, formatter=JsonFormatter(formatter="test")) |
| 223 | +def test_package_logger_format(capsys): |
| 224 | + # GIVEN package logger "aws_lambda_powertools" is explicitly |
| 225 | + # with a custom formatter |
| 226 | + formatter = logging.Formatter("message=%(message)s") |
| 227 | + set_package_logger(formatter=formatter) |
| 228 | + |
| 229 | + # WHEN Tracer is initialized in disabled mode |
249 | 230 | Tracer(disabled=True)
|
250 |
| - output = json.loads(stdout.getvalue().split("\n")[0]) |
251 | 231 |
|
252 |
| - assert "test" in output["formatter"] |
| 232 | + # THEN Tracer debug log statement should be logged using `message=` format |
| 233 | + output = capsys.readouterr().out |
| 234 | + logger = logging.getLogger("aws_lambda_powertools") |
| 235 | + assert "message=" in output |
| 236 | + assert logger.level == logging.DEBUG |
253 | 237 |
|
254 | 238 |
|
255 | 239 | def test_logger_append_duplicated(stdout):
|
| 240 | + # GIVEN Logger is initialized with request_id field |
256 | 241 | logger = Logger(stream=stdout, request_id="value")
|
| 242 | + |
| 243 | + # WHEN `request_id` is appended to the existing structured log |
| 244 | + # using a different value |
257 | 245 | logger.structure_logs(append=True, request_id="new_value")
|
258 | 246 | logger.info("log")
|
259 |
| - log = json.loads(stdout.getvalue()) |
| 247 | + |
| 248 | + # THEN subsequent log statements should have the latest value |
| 249 | + log = capture_logging_output(stdout) |
260 | 250 | assert "new_value" == log["request_id"]
|
261 | 251 |
|
262 | 252 |
|
263 | 253 | def test_logger_invalid_sampling_rate():
|
| 254 | + # GIVEN Logger is initialized |
| 255 | + # WHEN sampling_rate non-numeric value |
| 256 | + # THEN we should raise InvalidLoggerSamplingRateError |
264 | 257 | with pytest.raises(InvalidLoggerSamplingRateError):
|
265 | 258 | Logger(sampling_rate="TEST")
|
0 commit comments