From e1ee083e1a872f2353e5a4e94660e798ee50f609 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 15:03:09 +0100 Subject: [PATCH 01/23] fix: set NullHandler for package logger --- python/aws_lambda_powertools/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/python/aws_lambda_powertools/__init__.py b/python/aws_lambda_powertools/__init__.py index b048ddfe742..c8ad30f77e7 100644 --- a/python/aws_lambda_powertools/__init__.py +++ b/python/aws_lambda_powertools/__init__.py @@ -1,5 +1,9 @@ # -*- coding: utf-8 -*- """Top-level package for Lambda Python Powertools.""" +import logging __author__ = """Amazon Web Services""" + + +logging.getLogger('aws-lambda-powertools').addHandler(logging.NullHandler()) From 976144da1fa98ec0da142b7683118305d68d4d08 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 15:52:42 +0100 Subject: [PATCH 02/23] improv: remove root logger logic, lib --- .../logging/aws_lambda_logging.py | 98 ------------------- .../aws_lambda_powertools/logging/logger.py | 83 +++++++++++++++- 2 files changed, 79 insertions(+), 102 deletions(-) delete mode 100644 python/aws_lambda_powertools/logging/aws_lambda_logging.py diff --git a/python/aws_lambda_powertools/logging/aws_lambda_logging.py b/python/aws_lambda_powertools/logging/aws_lambda_logging.py deleted file mode 100644 index 1b42ec3c707..00000000000 --- a/python/aws_lambda_powertools/logging/aws_lambda_logging.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Microlibrary to simplify logging in AWS Lambda. -Originally taken from https://gitlab.com/hadrien/aws_lambda_logging/ -""" -import json -import logging - - -def json_formatter(obj): - """Formatter for unserialisable values.""" - return str(obj) - - -class JsonFormatter(logging.Formatter): - """AWS Lambda Logging formatter. - - Formats the log message as a JSON encoded string. If the message is a - dict it will be used directly. If the message can be parsed as JSON, then - the parse d value is used in the output record. - """ - - def __init__(self, **kwargs): - """Return a JsonFormatter instance. - - The `json_default` kwarg is used to specify a formatter for otherwise - unserialisable values. It must not throw. Defaults to a function that - coerces the value to a string. - - Other kwargs are used to specify log field format strings. - """ - datefmt = kwargs.pop("datefmt", None) - - super(JsonFormatter, self).__init__(datefmt=datefmt) - self.format_dict = { - "timestamp": "%(asctime)s", - "level": "%(levelname)s", - "location": "%(name)s.%(funcName)s:%(lineno)d", - } - self.format_dict.update(kwargs) - self.default_json_formatter = kwargs.pop("json_default", json_formatter) - - def format(self, record): # noqa: A003 - record_dict = record.__dict__.copy() - record_dict["asctime"] = self.formatTime(record, self.datefmt) - - log_dict = {k: v % record_dict for k, v in self.format_dict.items() if v} - - if isinstance(record_dict["msg"], dict): - log_dict["message"] = record_dict["msg"] - else: - log_dict["message"] = record.getMessage() - - # Attempt to decode the message as JSON, if so, merge it with the - # overall message for clarity. - try: - log_dict["message"] = json.loads(log_dict["message"]) - except (TypeError, ValueError): - pass - - if record.exc_info: - # Cache the traceback text to avoid converting it multiple times - # (it's constant anyway) - # from logging.Formatter:format - if not record.exc_text: - record.exc_text = self.formatException(record.exc_info) - - if record.exc_text: - log_dict["exception"] = record.exc_text - - json_record = json.dumps(log_dict, default=self.default_json_formatter) - - if hasattr(json_record, "decode"): # pragma: no cover - json_record = json_record.decode("utf-8") - - return json_record - - -def setup(level="DEBUG", formatter_cls=JsonFormatter, boto_level=None, **kwargs): - """Overall Metadata Formatting.""" - if formatter_cls: # pragma: no cover - for handler in logging.root.handlers: - handler.setFormatter(formatter_cls(**kwargs)) - - try: - logging.root.setLevel(level) - except ValueError: - logging.root.error("Invalid log level: %s", level) - level = "INFO" - logging.root.setLevel(level) - - if not boto_level: - boto_level = level - - try: # pragma: no cover - logging.getLogger("boto").setLevel(boto_level) - logging.getLogger("boto3").setLevel(boto_level) - logging.getLogger("botocore").setLevel(boto_level) - except ValueError: # pragma: no cover - logging.root.error("Invalid log level: %s", boto_level) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 32dc1be6c1a..684bbb8bec5 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -1,14 +1,15 @@ import functools import itertools +import json import logging import os import random +import sys import warnings from distutils.util import strtobool from typing import Any, Callable, Dict from ..helper.models import MetricUnit, build_lambda_context_model, build_metric_unit_from_str -from . import aws_lambda_logging logger = logging.getLogger(__name__) logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) @@ -16,6 +17,79 @@ is_cold_start = True +def json_formatter(obj): + """Formatter for unserialisable values.""" + return str(obj) + + +class JsonFormatter(logging.Formatter): + """AWS Lambda Logging formatter. + + Formats the log message as a JSON encoded string. If the message is a + dict it will be used directly. If the message can be parsed as JSON, then + the parse d value is used in the output record. + + Microlibrary to simplify logging in AWS Lambda. + Originally taken from https://gitlab.com/hadrien/aws_lambda_logging/ + + """ + + def __init__(self, **kwargs): + """Return a JsonFormatter instance. + + The `json_default` kwarg is used to specify a formatter for otherwise + unserialisable values. It must not throw. Defaults to a function that + coerces the value to a string. + + Other kwargs are used to specify log field format strings. + """ + datefmt = kwargs.pop("datefmt", None) + + super(JsonFormatter, self).__init__(datefmt=datefmt) + self.format_dict = { + "timestamp": "%(asctime)s", + "level": "%(levelname)s", + "location": "%(name)s.%(funcName)s:%(lineno)d", + } + self.format_dict.update(kwargs) + self.default_json_formatter = kwargs.pop("json_default", json_formatter) + + def format(self, record): # noqa: A003 + record_dict = record.__dict__.copy() + record_dict["asctime"] = self.formatTime(record, self.datefmt) + + log_dict = {k: v % record_dict for k, v in self.format_dict.items() if v} + + if isinstance(record_dict["msg"], dict): + log_dict["message"] = record_dict["msg"] + else: + log_dict["message"] = record.getMessage() + + # Attempt to decode the message as JSON, if so, merge it with the + # overall message for clarity. + try: + log_dict["message"] = json.loads(log_dict["message"]) + except (TypeError, ValueError): + pass + + if record.exc_info: + # Cache the traceback text to avoid converting it multiple times + # (it's constant anyway) + # from logging.Formatter:format + if not record.exc_text: + record.exc_text = self.formatException(record.exc_info) + + if record.exc_text: + log_dict["exception"] = record.exc_text + + json_record = json.dumps(log_dict, default=self.default_json_formatter) + + if hasattr(json_record, "decode"): # pragma: no cover + json_record = json_record.decode("utf-8") + + return json_record + + def logger_setup(service: str = "service_undefined", level: str = "INFO", sampling_rate: float = 0.0, **kwargs): """Setups root logger to format statements in JSON. @@ -64,8 +138,12 @@ def logger_setup(service: str = "service_undefined", level: str = "INFO", sampli service = os.getenv("POWERTOOLS_SERVICE_NAME") or service sampling_rate = os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or sampling_rate log_level = os.getenv("LOG_LEVEL") or level + handler = logging.StreamHandler(sys.stdout) logger = logging.getLogger(name=service) + handler.setFormatter(JsonFormatter(service=service, sampling_rate=sampling_rate, **kwargs)) + logger.addHandler(handler) + try: if sampling_rate and random.random() <= float(sampling_rate): log_level = logging.DEBUG @@ -76,9 +154,6 @@ def logger_setup(service: str = "service_undefined", level: str = "INFO", sampli logger.setLevel(log_level) - # Patch logger by structuring its outputs as JSON - aws_lambda_logging.setup(level=log_level, service=service, sampling_rate=sampling_rate, **kwargs) - return logger From ad408cdfe8902239b2301e45c25212531967578d Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 17:23:59 +0100 Subject: [PATCH 03/23] fix: update exception type --- python/aws_lambda_powertools/logging/logger.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 684bbb8bec5..de2df1ee112 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -29,7 +29,6 @@ class JsonFormatter(logging.Formatter): dict it will be used directly. If the message can be parsed as JSON, then the parse d value is used in the output record. - Microlibrary to simplify logging in AWS Lambda. Originally taken from https://gitlab.com/hadrien/aws_lambda_logging/ """ @@ -69,7 +68,7 @@ def format(self, record): # noqa: A003 # overall message for clarity. try: log_dict["message"] = json.loads(log_dict["message"]) - except (TypeError, ValueError): + except (json.decoder.JSONDecodeError, TypeError, ValueError): pass if record.exc_info: From 8978a2dc8c3c8bd1df0d07a54ed76a61fecb852f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 17:45:16 +0100 Subject: [PATCH 04/23] improv: propagate log level if set, null otherwise --- python/aws_lambda_powertools/__init__.py | 8 +++++++- python/aws_lambda_powertools/logging/logger.py | 1 - python/aws_lambda_powertools/metrics/base.py | 1 - python/aws_lambda_powertools/metrics/metric.py | 1 - python/aws_lambda_powertools/metrics/metrics.py | 1 - .../aws_lambda_powertools/middleware_factory/factory.py | 1 - python/aws_lambda_powertools/tracing/tracer.py | 1 - 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/python/aws_lambda_powertools/__init__.py b/python/aws_lambda_powertools/__init__.py index c8ad30f77e7..cb337ab23ca 100644 --- a/python/aws_lambda_powertools/__init__.py +++ b/python/aws_lambda_powertools/__init__.py @@ -2,8 +2,14 @@ """Top-level package for Lambda Python Powertools.""" import logging +import os __author__ = """Amazon Web Services""" -logging.getLogger('aws-lambda-powertools').addHandler(logging.NullHandler()) +logger = logging.getLogger("aws_lambda_powertools") +log_level = os.getenv("POWERTOOLS_LOG_LEVEL") +logger.addHandler(logging.NullHandler()) + +if log_level: + logger.setLevel(log_level) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index de2df1ee112..3c28c764786 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -12,7 +12,6 @@ from ..helper.models import MetricUnit, build_lambda_context_model, build_metric_unit_from_str logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) is_cold_start = True diff --git a/python/aws_lambda_powertools/metrics/base.py b/python/aws_lambda_powertools/metrics/base.py index 448bfc37e02..05c3139fc58 100644 --- a/python/aws_lambda_powertools/metrics/base.py +++ b/python/aws_lambda_powertools/metrics/base.py @@ -13,7 +13,6 @@ from .exceptions import MetricUnitError, MetricValueError, SchemaValidationError, UniqueNamespaceError logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) _schema_path = pathlib.Path(__file__).parent / "./schema.json" with _schema_path.open() as f: diff --git a/python/aws_lambda_powertools/metrics/metric.py b/python/aws_lambda_powertools/metrics/metric.py index 85ce527e576..7c536b2d18e 100644 --- a/python/aws_lambda_powertools/metrics/metric.py +++ b/python/aws_lambda_powertools/metrics/metric.py @@ -8,7 +8,6 @@ from aws_lambda_powertools.metrics.base import MetricManager logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) class SingleMetric(MetricManager): diff --git a/python/aws_lambda_powertools/metrics/metrics.py b/python/aws_lambda_powertools/metrics/metrics.py index 24d8f2b93a1..66f56beefc2 100644 --- a/python/aws_lambda_powertools/metrics/metrics.py +++ b/python/aws_lambda_powertools/metrics/metrics.py @@ -7,7 +7,6 @@ from aws_lambda_powertools.metrics.base import MetricManager logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) class Metrics(MetricManager): diff --git a/python/aws_lambda_powertools/middleware_factory/factory.py b/python/aws_lambda_powertools/middleware_factory/factory.py index 4dcab2adf33..aaf1a0a3240 100644 --- a/python/aws_lambda_powertools/middleware_factory/factory.py +++ b/python/aws_lambda_powertools/middleware_factory/factory.py @@ -8,7 +8,6 @@ from ..tracing import Tracer logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) def lambda_handler_decorator(decorator: Callable = None, trace_execution=False): diff --git a/python/aws_lambda_powertools/tracing/tracer.py b/python/aws_lambda_powertools/tracing/tracer.py index 0f3e3cff8bb..bf67db22ab0 100644 --- a/python/aws_lambda_powertools/tracing/tracer.py +++ b/python/aws_lambda_powertools/tracing/tracer.py @@ -9,7 +9,6 @@ is_cold_start = True logger = logging.getLogger(__name__) -logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) class Tracer: From fa2fd0ed8589f0a40dd0aee7c82bccee7bc7213e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 17:51:36 +0100 Subject: [PATCH 05/23] fix: explicit wins over env var --- python/aws_lambda_powertools/logging/logger.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 3c28c764786..d21600da0aa 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -88,8 +88,8 @@ def format(self, record): # noqa: A003 return json_record -def logger_setup(service: str = "service_undefined", level: str = "INFO", sampling_rate: float = 0.0, **kwargs): - """Setups root logger to format statements in JSON. +def logger_setup(service: str = None, level: str = None, sampling_rate: float = 0.0, **kwargs): + """Creates and setups a logger to format statements in JSON. Includes service name and any additional key=value into logs It also accepts both service name or level explicitly via env vars @@ -133,12 +133,12 @@ def logger_setup(service: str = "service_undefined", level: str = "INFO", sampli logger.info("Hello") """ - service = os.getenv("POWERTOOLS_SERVICE_NAME") or service - sampling_rate = os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or sampling_rate - log_level = os.getenv("LOG_LEVEL") or level + service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" + sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") + log_level = level or os.getenv("LOG_LEVEL") or logging.INFO + handler = logging.StreamHandler(sys.stdout) logger = logging.getLogger(name=service) - handler.setFormatter(JsonFormatter(service=service, sampling_rate=sampling_rate, **kwargs)) logger.addHandler(handler) From 96c70d9fcdf82ae010acd00e485adca0cfd641cf Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 17:53:45 +0100 Subject: [PATCH 06/23] chore: fix test naming --- .../functional/{test_utils.py => test_middleware_factory.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename python/tests/functional/{test_utils.py => test_middleware_factory.py} (100%) diff --git a/python/tests/functional/test_utils.py b/python/tests/functional/test_middleware_factory.py similarity index 100% rename from python/tests/functional/test_utils.py rename to python/tests/functional/test_middleware_factory.py From 192047eb606c894cebe20033cc00ce4b563f89a8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Tue, 21 Apr 2020 18:01:05 +0100 Subject: [PATCH 07/23] fix: exception logging --- python/aws_lambda_powertools/logging/logger.py | 5 +++-- .../aws_lambda_powertools/middleware_factory/factory.py | 4 ++-- python/aws_lambda_powertools/tracing/tracer.py | 8 ++++---- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index d21600da0aa..fd4e1314acb 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -136,7 +136,7 @@ def logger_setup(service: str = None, level: str = None, sampling_rate: float = service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") log_level = level or os.getenv("LOG_LEVEL") or logging.INFO - + handler = logging.StreamHandler(sys.stdout) logger = logging.getLogger(name=service) handler.setFormatter(JsonFormatter(service=service, sampling_rate=sampling_rate, **kwargs)) @@ -144,10 +144,11 @@ def logger_setup(service: str = None, level: str = None, sampling_rate: float = try: if sampling_rate and random.random() <= float(sampling_rate): + logging.debug("Setting log level to Debug due to sampling rate") log_level = logging.DEBUG except ValueError: raise ValueError( - "fExpected a float value ranging 0 to 1, but received {sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501 + f"Expected a float value ranging 0 to 1, but received {sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501 ) logger.setLevel(log_level) diff --git a/python/aws_lambda_powertools/middleware_factory/factory.py b/python/aws_lambda_powertools/middleware_factory/factory.py index aaf1a0a3240..31a4c5dc58b 100644 --- a/python/aws_lambda_powertools/middleware_factory/factory.py +++ b/python/aws_lambda_powertools/middleware_factory/factory.py @@ -129,8 +129,8 @@ def wrapper(event, context): response = middleware() return response except Exception as err: - logger.error(f"Caught exception in {decorator.__qualname__}") - raise err + logger.exception(f"Caught exception in {decorator.__qualname__}", exc_info=True) + raise return wrapper diff --git a/python/aws_lambda_powertools/tracing/tracer.py b/python/aws_lambda_powertools/tracing/tracer.py index bf67db22ab0..a9c5b92ef29 100644 --- a/python/aws_lambda_powertools/tracing/tracer.py +++ b/python/aws_lambda_powertools/tracing/tracer.py @@ -174,9 +174,9 @@ def decorate(event, context): if response: self.put_metadata("lambda handler response", response) except Exception as err: - logger.debug("Exception received from lambda handler") + logger.exception("Exception received from lambda handler", exc_info=True) self.put_metadata(f"{self.service}_error", err) - raise err + raise finally: self.end_subsegment() @@ -222,9 +222,9 @@ def decorate(*args, **kwargs): if response is not None: self.put_metadata(f"{method_name} response", response) except Exception as err: - logger.debug(f"Exception received from '{method_name}'' method") + logger.exception(f"Exception received from '{method_name}'' method", exc_info=True) self.put_metadata(f"{method_name} error", err) - raise err + raise finally: self.end_subsegment() From ed547be035df9fc394b74e9878faf860aa924b0f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 22 Apr 2020 11:52:47 +0100 Subject: [PATCH 08/23] improv: shorten log location --- python/aws_lambda_powertools/logging/logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index fd4e1314acb..53197abe53a 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -47,7 +47,7 @@ def __init__(self, **kwargs): self.format_dict = { "timestamp": "%(asctime)s", "level": "%(levelname)s", - "location": "%(name)s.%(funcName)s:%(lineno)d", + "location": "%(funcName)s:%(lineno)d", } self.format_dict.update(kwargs) self.default_json_formatter = kwargs.pop("json_default", json_formatter) From ea1a742fcf4266f0d1761c7db5edd3232be4c0b8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 22 Apr 2020 12:13:26 +0100 Subject: [PATCH 09/23] feat: add Logger class wrapper --- .../aws_lambda_powertools/logging/__init__.py | 4 +- .../aws_lambda_powertools/logging/logger.py | 178 +++++++++++++++++- 2 files changed, 177 insertions(+), 5 deletions(-) diff --git a/python/aws_lambda_powertools/logging/__init__.py b/python/aws_lambda_powertools/logging/__init__.py index 855abba4635..4c1bb2ec5c6 100644 --- a/python/aws_lambda_powertools/logging/__init__.py +++ b/python/aws_lambda_powertools/logging/__init__.py @@ -1,6 +1,6 @@ """Logging utility """ from ..helper.models import MetricUnit -from .logger import log_metric, logger_inject_lambda_context, logger_setup +from .logger import Logger, log_metric, logger_inject_lambda_context, logger_setup -__all__ = ["logger_setup", "logger_inject_lambda_context", "log_metric", "MetricUnit"] +__all__ = ["logger_setup", "logger_inject_lambda_context", "log_metric", "MetricUnit", "Logger"] diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 53197abe53a..25c9541244e 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -1,3 +1,4 @@ +import copy import functools import itertools import json @@ -9,7 +10,8 @@ from distutils.util import strtobool from typing import Any, Callable, Dict -from ..helper.models import MetricUnit, build_lambda_context_model, build_metric_unit_from_str +from ..helper.models import (MetricUnit, build_lambda_context_model, + build_metric_unit_from_str) logger = logging.getLogger(__name__) @@ -219,7 +221,7 @@ def decorate(event, context): logger.info(event) lambda_context = build_lambda_context_model(context) - cold_start = __is_cold_start() + cold_start = _is_cold_start() logger_setup(cold_start=cold_start, **lambda_context.__dict__) @@ -228,7 +230,7 @@ def decorate(event, context): return decorate -def __is_cold_start() -> str: +def _is_cold_start() -> str: """Verifies whether is cold start and return a string used for struct logging Returns @@ -352,3 +354,173 @@ def __build_dimensions(**dimensions) -> str: dimension = ",".join(dimensions_list) return dimension + + +class Logger(logging.Logger): + """Creates and setups a logger to format statements in JSON. + + Includes service name and any additional key=value into logs + It also accepts both service name or level explicitly via env vars + + Environment variables + --------------------- + POWERTOOLS_SERVICE_NAME : str + service name + LOG_LEVEL: str + logging level (e.g. INFO, DEBUG) + POWERTOOLS_LOGGER_SAMPLE_RATE: float + samping rate ranging from 0 to 1, 1 being 100% sampling + + Parameters + ---------- + service : str, optional + service name to be appended in logs, by default "service_undefined" + level : str, optional + logging.level, by default "INFO" + sample_rate: float, optional + sample rate for debug calls within execution context defaults to 0.0 + stream: sys.stdout, optional + valid output for a logging stream, by default sys.stdout + + Example + ------- + **Setups structured logging in JSON for Lambda functions with explicit service name** + + >>> from aws_lambda_powertools.logging import Logger + >>> logger = Logger(service="payment") + >>> + >>> def handler(event, context): + logger.info("Hello") + + **Setups structured logging in JSON for Lambda functions using env vars** + + $ export POWERTOOLS_SERVICE_NAME="payment" + $ export POWERTOOLS_LOGGER_SAMPLE_RATE=0.01 # 1% debug sampling + >>> from aws_lambda_powertools.logging import Logger + >>> logger = Logger() + >>> + >>> def handler(event, context): + logger.info("Hello") + + **Append payment_id to previously setup structured log logger** + + >>> from aws_lambda_powertools.logging import Logger + >>> logger = Logger(service="payment") + >>> + >>> def handler(event, context): + logger.structure_logs(append=True, payment_id=event["payment_id"]) + logger.info("Hello") + + Parameters + ---------- + logging : logging.Logger + Inherits Logger + """ + + def __init__( + self, service: str = None, level: str = None, sampling_rate: float = 0.0, stream: sys.stdout = None, **kwargs + ): + self.service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" + self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") + self.log_level = level or os.getenv("LOG_LEVEL") or logging.INFO + self.handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout) + self._default_log_keys = {"service": self.service, "sampling_rate": self.sampling_rate} + self.log_keys = copy.copy(self._default_log_keys) + + super().__init__(name=self.service, level=self.log_level) + + try: + if self.sampling_rate and random.random() <= float(self.sampling_rate): + logger.debug("Setting log level to Debug due to sampling rate") + log_level = logging.DEBUG + except ValueError: + raise ValueError( + f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501 + ) + + self.setLevel(self.log_level) + self.structure_logs() + self.addHandler(self.handler) + + def inject_lambda_context(self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False): + """Decorator to capture Lambda contextual info and inject into struct logging + + Parameters + ---------- + log_event : bool, optional + Instructs logger to log Lambda Event, by default False + + Environment variables + --------------------- + POWERTOOLS_LOGGER_LOG_EVENT : str + instruct logger to log Lambda Event (e.g. `"true", "True", "TRUE"`) + + Example + ------- + **Captures Lambda contextual runtime info (e.g memory, arn, req_id)** + + from aws_lambda_powertools.logging import Logger + + logger = Logger(service="payment") + + @logger.inject_lambda_context + def handler(event, context): + logger.info("Hello") + + **Captures Lambda contextual runtime info and logs incoming request** + + from aws_lambda_powertools.logging import Logger + + logger = Logger(service="payment") + + @logger.inject_lambda_context(log_event=True) + def handler(event, context): + logger.info("Hello") + + Returns + ------- + decorate : Callable + Decorated lambda handler + """ + + # If handler is None we've been called with parameters + # Return a partial function with args filled + if lambda_handler is None: + logger.debug("Decorator called with parameters") + return functools.partial(logger_inject_lambda_context, log_event=log_event) + + log_event_env_option = str(os.getenv("POWERTOOLS_LOGGER_LOG_EVENT", "false")) + log_event = strtobool(log_event_env_option) or log_event + + @functools.wraps(lambda_handler) + def decorate(event, context): + if log_event: + logger.debug("Event received") + logger.info(event) + + lambda_context = build_lambda_context_model(context) + cold_start = _is_cold_start() + + self.structure_logs(append=True, cold_start=cold_start, **lambda_context.__dict__) + return lambda_handler(event, context) + + return decorate + + def structure_logs(self, append: bool = False, **kwargs): + """Sets logging formatting to JSON. + + Optionally, it can append keyword arguments + to an existing logger so it is available + across future log statements. + + Parameters + ---------- + append : bool, optional + [description], by default False + """ + if append: + self.handler.setFormatter(JsonFormatter(**self.log_keys, **kwargs)) + else: + self.handler.setFormatter(JsonFormatter(**kwargs)) + + self.log_keys.update(**kwargs) From fd9186b9c3fac64baca78216718fab87a5631d1f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 22 Apr 2020 13:48:34 +0100 Subject: [PATCH 10/23] improv: add deprecation warning --- python/aws_lambda_powertools/logging/logger.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 25c9541244e..957b3249e41 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -10,8 +10,7 @@ from distutils.util import strtobool from typing import Any, Callable, Dict -from ..helper.models import (MetricUnit, build_lambda_context_model, - build_metric_unit_from_str) +from ..helper.models import MetricUnit, build_lambda_context_model, build_metric_unit_from_str logger = logging.getLogger(__name__) @@ -93,6 +92,8 @@ def format(self, record): # noqa: A003 def logger_setup(service: str = None, level: str = None, sampling_rate: float = 0.0, **kwargs): """Creates and setups a logger to format statements in JSON. + **This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead** + Includes service name and any additional key=value into logs It also accepts both service name or level explicitly via env vars @@ -135,6 +136,8 @@ def logger_setup(service: str = None, level: str = None, sampling_rate: float = logger.info("Hello") """ + warnings.warn(message="This method will be removed in GA; use Logger instead", category=DeprecationWarning) + service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") log_level = level or os.getenv("LOG_LEVEL") or logging.INFO From a60ad0e65749fe2ffd72b740dd2bc8f0c62e3bc3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 22 Apr 2020 15:43:06 +0100 Subject: [PATCH 11/23] BREAKING CHANGE: logger_setup, inject_lambda_ctx --- .../aws_lambda_powertools/logging/logger.py | 143 +++--------------- 1 file changed, 17 insertions(+), 126 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 957b3249e41..76ac8c8c45f 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -89,149 +89,40 @@ def format(self, record): # noqa: A003 return json_record -def logger_setup(service: str = None, level: str = None, sampling_rate: float = 0.0, **kwargs): - """Creates and setups a logger to format statements in JSON. - - **This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead** - - Includes service name and any additional key=value into logs - It also accepts both service name or level explicitly via env vars - - Environment variables - --------------------- - POWERTOOLS_SERVICE_NAME : str - service name - LOG_LEVEL: str - logging level (e.g. INFO, DEBUG) - POWERTOOLS_LOGGER_SAMPLE_RATE: float - samping rate ranging from 0 to 1, 1 being 100% sampling - - Parameters - ---------- - service : str, optional - service name to be appended in logs, by default "service_undefined" - level : str, optional - logging.level, by default "INFO" - sample_rate: float, optional - sample rate for debug calls within execution context defaults to 0 +def logger_setup(service: str = None, level: str = None, sampling_rate: float = 0.0, legacy: bool = False, **kwargs): + """DEPRECATED + + This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead Example ------- - Setups structured logging in JSON for Lambda functions with explicit service name + **Logger class - Same UX** - >>> from aws_lambda_powertools.logging import logger_setup - >>> logger = logger_setup(service="payment") - >>> - >>> def handler(event, context): - logger.info("Hello") - - Setups structured logging in JSON for Lambda functions using env vars - - $ export POWERTOOLS_SERVICE_NAME="payment" - $ export POWERTOOLS_LOGGER_SAMPLE_RATE=0.01 # 1% debug sampling - >>> from aws_lambda_powertools.logging import logger_setup - >>> logger = logger_setup() - >>> - >>> def handler(event, context): - logger.info("Hello") + from aws_lambda_powertools.logging import Logger + logger = Logger(service="payment") # same env var still applies """ warnings.warn(message="This method will be removed in GA; use Logger instead", category=DeprecationWarning) - - service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" - sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") - log_level = level or os.getenv("LOG_LEVEL") or logging.INFO - - handler = logging.StreamHandler(sys.stdout) - logger = logging.getLogger(name=service) - handler.setFormatter(JsonFormatter(service=service, sampling_rate=sampling_rate, **kwargs)) - logger.addHandler(handler) - - try: - if sampling_rate and random.random() <= float(sampling_rate): - logging.debug("Setting log level to Debug due to sampling rate") - log_level = logging.DEBUG - except ValueError: - raise ValueError( - f"Expected a float value ranging 0 to 1, but received {sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501 - ) - - logger.setLevel(log_level) - - return logger + raise DeprecationWarning("Use Logger instead - This method will be removed when GA") def logger_inject_lambda_context(lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False): - """Decorator to capture Lambda contextual info and inject into struct logging + """DEPRECATED - Parameters - ---------- - log_event : bool, optional - Instructs logger to log Lambda Event, by default False - - Environment variables - --------------------- - POWERTOOLS_LOGGER_LOG_EVENT : str - instruct logger to log Lambda Event (e.g. `"true", "True", "TRUE"`) + This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead Example ------- - **Captures Lambda contextual runtime info (e.g memory, arn, req_id)** - - from aws_lambda_powertools.logging import logger_setup, logger_inject_lambda_context - import logging - - logger = logging.getLogger(__name__) - logging.setLevel(logging.INFO) - logger_setup() - - @logger_inject_lambda_context - def handler(event, context): - logger.info("Hello") + **Logger class - Same UX** - **Captures Lambda contextual runtime info and logs incoming request** - - from aws_lambda_powertools.logging import logger_setup, logger_inject_lambda_context - import logging - - logger = logging.getLogger(__name__) - logging.setLevel(logging.INFO) - logger_setup() - - @logger_inject_lambda_context(log_event=True) - def handler(event, context): - logger.info("Hello") - - Returns - ------- - decorate : Callable - Decorated lambda handler + from aws_lambda_powertools.logging import Logger + logger = Logger(service="payment") # same env var still applies + @logger.inject_lambda_context + def handler(evt, ctx): + pass """ - # If handler is None we've been called with parameters - # Return a partial function with args filled - if lambda_handler is None: - logger.debug("Decorator called with parameters") - return functools.partial(logger_inject_lambda_context, log_event=log_event) - - log_event_env_option = str(os.getenv("POWERTOOLS_LOGGER_LOG_EVENT", "false")) - log_event = strtobool(log_event_env_option) or log_event - - @functools.wraps(lambda_handler) - def decorate(event, context): - if log_event: - logger.debug("Event received") - logger.info(event) - - lambda_context = build_lambda_context_model(context) - cold_start = _is_cold_start() - - logger_setup(cold_start=cold_start, **lambda_context.__dict__) - - return lambda_handler(event, context) - - return decorate - + raise DeprecationWarning("Use Logger instead - This method will be removed when GA") def _is_cold_start() -> str: """Verifies whether is cold start and return a string used for struct logging From 9be37358f1f8a22cac6e1139e6f5cdeaa8e28ca3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Wed, 22 Apr 2020 19:00:41 +0100 Subject: [PATCH 12/23] improv: update tests --- .../aws_lambda_powertools/logging/logger.py | 18 ++--- .../functional/test_aws_lambda_logging.py | 57 ++++++++------ python/tests/functional/test_logger.py | 78 +++++++++++-------- 3 files changed, 87 insertions(+), 66 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 76ac8c8c45f..87560a8f821 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -312,10 +312,10 @@ class Logger(logging.Logger): """ def __init__( - self, service: str = None, level: str = None, sampling_rate: float = 0.0, stream: sys.stdout = None, **kwargs + self, service: str = None, level: str = None, sampling_rate: float = None, stream: sys.stdout = None, **kwargs ): self.service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" - self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") + self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or 0.0 self.log_level = level or os.getenv("LOG_LEVEL") or logging.INFO self.handler = logging.StreamHandler(stream) if stream is not None else logging.StreamHandler(sys.stdout) self._default_log_keys = {"service": self.service, "sampling_rate": self.sampling_rate} @@ -326,14 +326,14 @@ def __init__( try: if self.sampling_rate and random.random() <= float(self.sampling_rate): logger.debug("Setting log level to Debug due to sampling rate") - log_level = logging.DEBUG + self.log_level = logging.DEBUG except ValueError: raise ValueError( f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501 ) self.setLevel(self.log_level) - self.structure_logs() + self.structure_logs(**kwargs) self.addHandler(self.handler) def inject_lambda_context(self, lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False): @@ -381,7 +381,7 @@ def handler(event, context): # Return a partial function with args filled if lambda_handler is None: logger.debug("Decorator called with parameters") - return functools.partial(logger_inject_lambda_context, log_event=log_event) + return functools.partial(self.inject_lambda_context, log_event=log_event) log_event_env_option = str(os.getenv("POWERTOOLS_LOGGER_LOG_EVENT", "false")) log_event = strtobool(log_event_env_option) or log_event @@ -390,12 +390,12 @@ def handler(event, context): def decorate(event, context): if log_event: logger.debug("Event received") - logger.info(event) + self.info(event) lambda_context = build_lambda_context_model(context) cold_start = _is_cold_start() - self.structure_logs(append=True, cold_start=cold_start, **lambda_context.__dict__) + self.structure_logs(cold_start=cold_start, **lambda_context.__dict__) return lambda_handler(event, context) return decorate @@ -412,9 +412,9 @@ def structure_logs(self, append: bool = False, **kwargs): append : bool, optional [description], by default False """ + self.handler.setFormatter(JsonFormatter(**self._default_log_keys, **kwargs)) + if append: self.handler.setFormatter(JsonFormatter(**self.log_keys, **kwargs)) - else: - self.handler.setFormatter(JsonFormatter(**kwargs)) self.log_keys.update(**kwargs) diff --git a/python/tests/functional/test_aws_lambda_logging.py b/python/tests/functional/test_aws_lambda_logging.py index 6d5efd492a7..d14b74087ce 100644 --- a/python/tests/functional/test_aws_lambda_logging.py +++ b/python/tests/functional/test_aws_lambda_logging.py @@ -2,10 +2,12 @@ import io import json import logging +import functools from pytest import fixture, mark, yield_fixture +import pytest -from aws_lambda_powertools.logging.aws_lambda_logging import setup +from aws_lambda_powertools.logging.logger import Logger @fixture @@ -31,23 +33,32 @@ def root_logger(handler): @mark.parametrize("level", ["DEBUG", "WARNING", "ERROR", "INFO", "CRITICAL"]) -def test_setup_with_valid_log_levels(root_logger, logger, stdout, level): - setup(level, request_id="request id!", another="value") - - logger.critical("This is a test") - - log_dict = json.loads(stdout.getvalue()) +def test_setup_with_valid_log_levels(root_logger, stdout, level): + logger = Logger(level=level, stream=stdout, request_id="request id!", another="value") + msg = "This is a test" + log_command = { + "INFO": logger.info, + "ERROR": logger.error, + "WARNING": logger.warning, + "DEBUG": logger.debug, + "CRITICAL": logger.critical, + } + + log_message = log_command[level] + log_message(msg) + + log_dict = json.loads(stdout.getvalue().strip()) check_log_dict(log_dict) - assert "CRITICAL" == log_dict["level"] + assert level == log_dict["level"] assert "This is a test" == log_dict["message"] assert "request id!" == log_dict["request_id"] assert "exception" not in log_dict -def test_logging_exception_traceback(root_logger, logger, stdout): - setup("DEBUG", request_id="request id!", another="value") +def test_logging_exception_traceback(root_logger, stdout): + logger = Logger(level="DEBUG", stream=stdout, request_id="request id!", another="value") try: raise Exception("Boom") @@ -61,11 +72,9 @@ def test_logging_exception_traceback(root_logger, logger, stdout): def test_setup_with_invalid_log_level(root_logger, logger, stdout): - setup("not a valid log level") # writes a log event - - log_dict = json.loads(stdout.getvalue()) - - check_log_dict(log_dict) + with pytest.raises(ValueError) as e: + Logger(level="not a valid log level") + assert "Unknown level" in e.value.args[0] def check_log_dict(log_dict): @@ -76,11 +85,11 @@ def check_log_dict(log_dict): def test_setup_with_bad_level_does_not_fail(): - setup("DBGG", request_id="request id!", another="value") + Logger("DBGG", request_id="request id!", another="value") -def test_with_dict_message(root_logger, logger, stdout): - setup("DEBUG", another="value") +def test_with_dict_message(root_logger, stdout): + logger = Logger(level="DEBUG", stream=stdout) msg = {"x": "isx"} logger.critical(msg) @@ -90,25 +99,25 @@ def test_with_dict_message(root_logger, logger, stdout): assert msg == log_dict["message"] -def test_with_json_message(root_logger, logger, stdout): - setup("DEBUG", another="value") +def test_with_json_message(root_logger, stdout): + logger = Logger(stream=stdout) msg = {"x": "isx"} - logger.critical(json.dumps(msg)) + logger.info(json.dumps(msg)) log_dict = json.loads(stdout.getvalue()) assert msg == log_dict["message"] -def test_with_unserialisable_value_in_message(root_logger, logger, stdout): - setup("DEBUG", another="value") +def test_with_unserialisable_value_in_message(root_logger, stdout): + logger = Logger(level="DEBUG", stream=stdout) class X: pass msg = {"x": X()} - logger.critical(msg) + logger.debug(msg) log_dict = json.loads(stdout.getvalue()) diff --git a/python/tests/functional/test_logger.py b/python/tests/functional/test_logger.py index db8a09e8d57..be5985bcb87 100644 --- a/python/tests/functional/test_logger.py +++ b/python/tests/functional/test_logger.py @@ -5,7 +5,7 @@ import pytest -from aws_lambda_powertools.logging import MetricUnit, log_metric, logger_inject_lambda_context, logger_setup +from aws_lambda_powertools.logging import MetricUnit, log_metric, logger_inject_lambda_context, logger_setup, Logger @pytest.fixture @@ -37,45 +37,60 @@ def lambda_context(): return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values()) +# FIXME - Add test for multiple identical keys or multiple append w/ same keys +# TypeError: type object got multiple values for keyword argument 'cold_start' + +def test_logger_setup_deprecated(): + # Should be removed when GA + with pytest.raises(DeprecationWarning): + logger_setup() + + +def test_logger_inject_lambda_context_deprecated(): + # Should be removed when GA + with pytest.raises(DeprecationWarning): + logger_setup() + + def test_setup_service_name(root_logger, stdout): # GIVEN service is explicitly defined # WHEN logger is setup # THEN service field should be equals service given service_name = "payment" - logger = logger_setup(service=service_name) + logger = Logger(service=service_name, stream=stdout) + logger.info("Hello") log = json.loads(stdout.getvalue()) assert service_name == log["service"] -def test_setup_no_service_name(root_logger, stdout): +def test_setup_no_service_name(stdout): # GIVEN no service is explicitly defined # WHEN logger is setup # THEN service field should be "service_undefined" - logger_setup() - logger = logger_setup() + logger = Logger(stream=stdout) logger.info("Hello") log = json.loads(stdout.getvalue()) assert "service_undefined" == log["service"] -def test_setup_service_env_var(monkeypatch, root_logger, stdout): +def test_setup_service_env_var(monkeypatch, stdout): # GIVEN service is explicitly defined via POWERTOOLS_SERVICE_NAME env # WHEN logger is setup # THEN service field should be equals POWERTOOLS_SERVICE_NAME value service_name = "payment" monkeypatch.setenv("POWERTOOLS_SERVICE_NAME", service_name) - logger = logger_setup() + logger = Logger(stream=stdout) logger.info("Hello") log = json.loads(stdout.getvalue()) assert service_name == log["service"] -def test_setup_sampling_rate(monkeypatch, root_logger, stdout): +def test_setup_sampling_rate(monkeypatch, stdout): # GIVEN samping rate is explicitly defined via POWERTOOLS_LOGGER_SAMPLE_RATE env # WHEN logger is setup # THEN sampling rate should be equals POWERTOOLS_LOGGER_SAMPLE_RATE value and should sample debug logs @@ -84,7 +99,7 @@ def test_setup_sampling_rate(monkeypatch, root_logger, stdout): monkeypatch.setenv("POWERTOOLS_LOGGER_SAMPLE_RATE", sampling_rate) monkeypatch.setenv("LOG_LEVEL", "INFO") - logger = logger_setup() + logger = Logger(stream=stdout) logger.debug("I am being sampled") log = json.loads(stdout.getvalue()) @@ -93,7 +108,7 @@ def test_setup_sampling_rate(monkeypatch, root_logger, stdout): assert "I am being sampled" == log["message"] -def test_inject_lambda_context(root_logger, stdout, lambda_context): +def test_inject_lambda_context(lambda_context, stdout): # GIVEN a lambda function is decorated with logger # WHEN logger is setup # THEN lambda contextual info should always be in the logs @@ -104,9 +119,9 @@ def test_inject_lambda_context(root_logger, stdout, lambda_context): "function_request_id", ) - logger = logger_setup() + logger = Logger(stream=stdout) - @logger_inject_lambda_context + @logger.inject_lambda_context def handler(event, context): logger.info("Hello") @@ -118,15 +133,16 @@ def handler(event, context): assert key in log -def test_inject_lambda_context_log_event_request(root_logger, stdout, lambda_context): +def test_inject_lambda_context_log_event_request(lambda_context, stdout): # GIVEN a lambda function is decorated with logger instructed to log event # WHEN logger is setup # THEN logger should log event received from Lambda lambda_event = {"greeting": "hello"} - logger = logger_setup() + logger = Logger(stream=stdout) - @logger_inject_lambda_context(log_event=True) + @logger.inject_lambda_context(log_event=True) + # @logger.inject_lambda_context(log_event=True) def handler(event, context): logger.info("Hello") @@ -134,13 +150,12 @@ def handler(event, context): # Given that our string buffer has many log statements separated by newline \n # We need to clean it before we can assert on - stdout.seek(0) - logs = [json.loads(line.strip()) for line in stdout.readlines()] + logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] logged_event, _ = logs assert "greeting" in logged_event["message"] -def test_inject_lambda_context_log_event_request_env_var(monkeypatch, root_logger, stdout, lambda_context): +def test_inject_lambda_context_log_event_request_env_var(monkeypatch, lambda_context, stdout): # GIVEN a lambda function is decorated with logger instructed to log event # via POWERTOOLS_LOGGER_LOG_EVENT env # WHEN logger is setup @@ -148,9 +163,9 @@ def test_inject_lambda_context_log_event_request_env_var(monkeypatch, root_logge lambda_event = {"greeting": "hello"} monkeypatch.setenv("POWERTOOLS_LOGGER_LOG_EVENT", "true") - logger = logger_setup() + logger = Logger(stream=stdout) - @logger_inject_lambda_context + @logger.inject_lambda_context def handler(event, context): logger.info("Hello") @@ -158,8 +173,7 @@ def handler(event, context): # Given that our string buffer has many log statements separated by newline \n # We need to clean it before we can assert on - stdout.seek(0) - logs = [json.loads(line.strip()) for line in stdout.readlines()] + logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] event = {} for log in logs: @@ -169,15 +183,15 @@ def handler(event, context): assert event == lambda_event -def test_inject_lambda_context_log_no_request_by_default(monkeypatch, root_logger, stdout, lambda_context): +def test_inject_lambda_context_log_no_request_by_default(monkeypatch, lambda_context, stdout): # GIVEN a lambda function is decorated with logger # WHEN logger is setup # THEN logger should not log event received by lambda handler lambda_event = {"greeting": "hello"} - logger = logger_setup() + logger = Logger(stream=stdout) - @logger_inject_lambda_context + @logger.inject_lambda_context def handler(event, context): logger.info("Hello") @@ -185,8 +199,7 @@ def handler(event, context): # Given that our string buffer has many log statements separated by newline \n # We need to clean it before we can assert on - stdout.seek(0) - logs = [json.loads(line.strip()) for line in stdout.readlines()] + logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] event = {} for log in logs: @@ -196,7 +209,7 @@ def handler(event, context): assert event != lambda_event -def test_inject_lambda_cold_start(root_logger, stdout, lambda_context): +def test_inject_lambda_cold_start(lambda_context, stdout): # GIVEN a lambda function is decorated with logger, and called twice # WHEN logger is setup # THEN cold_start key should only be true in the first call @@ -208,12 +221,12 @@ def test_inject_lambda_cold_start(root_logger, stdout, lambda_context): # # since Lambda will only import our logger lib once per concurrent execution logger.is_cold_start = True - logger = logger_setup() + logger = Logger(stream=stdout) def custom_method(): logger.info("Hello from method") - @logger_inject_lambda_context + @logger.inject_lambda_context def handler(event, context): custom_method() logger.info("Hello") @@ -223,8 +236,7 @@ def handler(event, context): # Given that our string buffer has many log statements separated by newline \n # We need to clean it before we can assert on - stdout.seek(0) - logs = [json.loads(line.strip()) for line in stdout.readlines()] + logs = [json.loads(line.strip()) for line in stdout.getvalue().split("\n") if line] first_log, second_log, third_log, fourth_log = logs # First execution @@ -303,7 +315,7 @@ def test_log_metric_partially_correct_args(capsys, invalid_input, expected): [({"unit": "Blah"}, ValueError), ({"unit": None}, ValueError), ({}, TypeError)], ids=["invalid metric unit as str", "unit as None", "missing required unit"], ) -def test_log_metric_invalid_unit(invalid_input, expected): +def test_log_metric_invalid_unit(capsys, invalid_input, expected): # GIVEN invalid units are provided # WHEN log_metric is called # THEN ValueError exception should be raised From 4467cf6c637184be0d5fa755d07f82a42c1b536f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 23 Apr 2020 11:06:45 +0100 Subject: [PATCH 13/23] improv: cover duplicated keys edge case --- .../aws_lambda_powertools/logging/logger.py | 8 +++- .../functional/test_aws_lambda_logging.py | 13 +++---- python/tests/functional/test_logger.py | 37 +++++++++++-------- 3 files changed, 33 insertions(+), 25 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 87560a8f821..01a4e7fdb51 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -91,7 +91,7 @@ def format(self, record): # noqa: A003 def logger_setup(service: str = None, level: str = None, sampling_rate: float = 0.0, legacy: bool = False, **kwargs): """DEPRECATED - + This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead Example @@ -124,6 +124,7 @@ def handler(evt, ctx): raise DeprecationWarning("Use Logger instead - This method will be removed when GA") + def _is_cold_start() -> str: """Verifies whether is cold start and return a string used for struct logging @@ -407,6 +408,8 @@ def structure_logs(self, append: bool = False, **kwargs): to an existing logger so it is available across future log statements. + Last keyword argument and value wins if duplicated. + Parameters ---------- append : bool, optional @@ -415,6 +418,7 @@ def structure_logs(self, append: bool = False, **kwargs): self.handler.setFormatter(JsonFormatter(**self._default_log_keys, **kwargs)) if append: - self.handler.setFormatter(JsonFormatter(**self.log_keys, **kwargs)) + new_keys = {**self.log_keys, **kwargs} + self.handler.setFormatter(JsonFormatter(**new_keys)) self.log_keys.update(**kwargs) diff --git a/python/tests/functional/test_aws_lambda_logging.py b/python/tests/functional/test_aws_lambda_logging.py index d14b74087ce..4a0035e170d 100644 --- a/python/tests/functional/test_aws_lambda_logging.py +++ b/python/tests/functional/test_aws_lambda_logging.py @@ -1,38 +1,37 @@ """aws_lambda_logging tests.""" +import functools import io import json import logging -import functools -from pytest import fixture, mark, yield_fixture import pytest from aws_lambda_powertools.logging.logger import Logger -@fixture +@pytest.fixture def stdout(): return io.StringIO() -@fixture +@pytest.fixture def handler(stdout): return logging.StreamHandler(stdout) -@fixture +@pytest.fixture def logger(): return logging.getLogger(__name__) -@yield_fixture +@pytest.fixture def root_logger(handler): logging.root.addHandler(handler) yield logging.root logging.root.removeHandler(handler) -@mark.parametrize("level", ["DEBUG", "WARNING", "ERROR", "INFO", "CRITICAL"]) +@pytest.mark.parametrize("level", ["DEBUG", "WARNING", "ERROR", "INFO", "CRITICAL"]) def test_setup_with_valid_log_levels(root_logger, stdout, level): logger = Logger(level=level, stream=stdout, request_id="request id!", another="value") msg = "This is a test" diff --git a/python/tests/functional/test_logger.py b/python/tests/functional/test_logger.py index be5985bcb87..4e1de9d27dc 100644 --- a/python/tests/functional/test_logger.py +++ b/python/tests/functional/test_logger.py @@ -5,7 +5,7 @@ import pytest -from aws_lambda_powertools.logging import MetricUnit, log_metric, logger_inject_lambda_context, logger_setup, Logger +from aws_lambda_powertools.logging import Logger, MetricUnit, log_metric, logger_inject_lambda_context, logger_setup @pytest.fixture @@ -37,21 +37,6 @@ def lambda_context(): return namedtuple("LambdaContext", lambda_context.keys())(*lambda_context.values()) -# FIXME - Add test for multiple identical keys or multiple append w/ same keys -# TypeError: type object got multiple values for keyword argument 'cold_start' - -def test_logger_setup_deprecated(): - # Should be removed when GA - with pytest.raises(DeprecationWarning): - logger_setup() - - -def test_logger_inject_lambda_context_deprecated(): - # Should be removed when GA - with pytest.raises(DeprecationWarning): - logger_setup() - - def test_setup_service_name(root_logger, stdout): # GIVEN service is explicitly defined # WHEN logger is setup @@ -322,3 +307,23 @@ def test_log_metric_invalid_unit(capsys, invalid_input, expected): with pytest.raises(expected): log_metric(name="test_metric", namespace="DemoApp", **invalid_input) + + +def test_logger_setup_deprecated(): + # Should be removed when GA + with pytest.raises(DeprecationWarning): + logger_setup() + + +def test_logger_inject_lambda_context_deprecated(): + # Should be removed when GA + with pytest.raises(DeprecationWarning): + logger_setup() + + +def test_logger_append_duplicated(stdout): + logger = Logger(stream=stdout, request_id="value") + logger.structure_logs(append=True, request_id="new_value") + logger.info("log") + log = json.loads(stdout.getvalue()) + assert "new_value" == log["request_id"] From 220916bec8a94d7c7a604ee7a9b246a67e5514c6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 23 Apr 2020 15:10:07 +0100 Subject: [PATCH 14/23] improv: cover debug package logging --- python/aws_lambda_powertools/__init__.py | 6 --- .../aws_lambda_powertools/logging/logger.py | 50 +++++++++++++++++-- python/tests/functional/test_init.py | 10 ++++ 3 files changed, 57 insertions(+), 9 deletions(-) create mode 100644 python/tests/functional/test_init.py diff --git a/python/aws_lambda_powertools/__init__.py b/python/aws_lambda_powertools/__init__.py index cb337ab23ca..b7407d46831 100644 --- a/python/aws_lambda_powertools/__init__.py +++ b/python/aws_lambda_powertools/__init__.py @@ -2,14 +2,8 @@ """Top-level package for Lambda Python Powertools.""" import logging -import os __author__ = """Amazon Web Services""" - logger = logging.getLogger("aws_lambda_powertools") -log_level = os.getenv("POWERTOOLS_LOG_LEVEL") logger.addHandler(logging.NullHandler()) - -if log_level: - logger.setLevel(log_level) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 01a4e7fdb51..e00771ee9fd 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -8,7 +8,7 @@ import sys import warnings from distutils.util import strtobool -from typing import Any, Callable, Dict +from typing import Any, Callable, Dict, Union from ..helper.models import MetricUnit, build_lambda_context_model, build_metric_unit_from_str @@ -261,7 +261,7 @@ class Logger(logging.Logger): --------------------- POWERTOOLS_SERVICE_NAME : str service name - LOG_LEVEL: str + LOG_LEVEL: str, int logging level (e.g. INFO, DEBUG) POWERTOOLS_LOGGER_SAMPLE_RATE: float samping rate ranging from 0 to 1, 1 being 100% sampling @@ -310,10 +310,18 @@ class Logger(logging.Logger): ---------- logging : logging.Logger Inherits Logger + service: str + name of the service to create the logger for, "service_undefined" by default + level: str, int + log level, INFO by default + sampling_rate: float + debug log sampling rate, 0.0 by default + stream: sys.stdout + log stream, stdout by default """ def __init__( - self, service: str = None, level: str = None, sampling_rate: float = None, stream: sys.stdout = None, **kwargs + self, service: str = None, level: Union[str, int] = None, sampling_rate: float = None, stream: sys.stdout = None, **kwargs ): self.service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or 0.0 @@ -422,3 +430,39 @@ def structure_logs(self, append: bool = False, **kwargs): self.handler.setFormatter(JsonFormatter(**new_keys)) self.log_keys.update(**kwargs) + +def set_package_logger(level: Union[str, int] = logging.DEBUG, stream: sys.stdout = None, formatter: logging.Formatter = None +): + """Set an additional stream handler, formatter, and log level for aws_lambda_powertools package logger. + + **Package log by default is supressed (NullHandler), this should only used for debugging. + This is separate from application Logger class utility** + + Example + ------- + **Enables debug logging for AWS Lambda Powertools package** + + >>> from aws_lambda_powertools.logging.logger import set_package_logger + >>> set_package_logger() + + Parameters + ---------- + level: str, int + log level, DEBUG by default + stream: sys.stdout + log stream, stdout by default + formatter: logging.Formatter + log formatter, "%(asctime)s %(name)s [%(levelname)s] %(message)s" by default + """ + if formatter is None: + formatter = "%(asctime)s %(name)s [%(levelname)s] %(message)s" + + if stream is None: + stream = sys.stdout + + logger = logging.getLogger("aws_lambda_powertools") + logger.setLevel(level) + handler = logging.StreamHandler(stream) + formatter = logging.Formatter(formatter) + handler.setFormatter(formatter) + logger.addHandler(handler) diff --git a/python/tests/functional/test_init.py b/python/tests/functional/test_init.py new file mode 100644 index 00000000000..5d26583667e --- /dev/null +++ b/python/tests/functional/test_init.py @@ -0,0 +1,10 @@ +from aws_lambda_powertools.tracing import Tracer +from aws_lambda_powertools.logging.logger import set_package_logger + +def test_package_logger(capsys): + import aws_lambda_powertools + set_package_logger() + tracer = Tracer(disabled=True) + output = capsys.readouterr() + + assert "Tracing has been disabled" in output.out From 3cc398e5d54fa24d60467068145194ce9d0741e3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 23 Apr 2020 16:02:06 +0100 Subject: [PATCH 15/23] improv: more coverage, linting --- .../aws_lambda_powertools/logging/logger.py | 14 +++++++--- .../aws_lambda_powertools/metrics/metric.py | 5 ++-- .../aws_lambda_powertools/metrics/metrics.py | 5 ++-- .../middleware_factory/factory.py | 2 +- .../functional/test_aws_lambda_logging.py | 1 - python/tests/functional/test_init.py | 10 ------- python/tests/functional/test_logger.py | 26 ++++++++++++++++++- 7 files changed, 40 insertions(+), 23 deletions(-) delete mode 100644 python/tests/functional/test_init.py diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index e00771ee9fd..7e08224da86 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -321,7 +321,12 @@ class Logger(logging.Logger): """ def __init__( - self, service: str = None, level: Union[str, int] = None, sampling_rate: float = None, stream: sys.stdout = None, **kwargs + self, + service: str = None, + level: Union[str, int] = None, + sampling_rate: float = None, + stream: sys.stdout = None, + **kwargs, ): self.service = service or os.getenv("POWERTOOLS_SERVICE_NAME") or "service_undefined" self.sampling_rate = sampling_rate or os.getenv("POWERTOOLS_LOGGER_SAMPLE_RATE") or 0.0 @@ -431,7 +436,9 @@ def structure_logs(self, append: bool = False, **kwargs): self.log_keys.update(**kwargs) -def set_package_logger(level: Union[str, int] = logging.DEBUG, stream: sys.stdout = None, formatter: logging.Formatter = None + +def set_package_logger( + level: Union[str, int] = logging.DEBUG, stream: sys.stdout = None, formatter: logging.Formatter = None ): """Set an additional stream handler, formatter, and log level for aws_lambda_powertools package logger. @@ -455,7 +462,7 @@ def set_package_logger(level: Union[str, int] = logging.DEBUG, stream: sys.stdou log formatter, "%(asctime)s %(name)s [%(levelname)s] %(message)s" by default """ if formatter is None: - formatter = "%(asctime)s %(name)s [%(levelname)s] %(message)s" + formatter = logging.Formatter("%(asctime)s %(name)s [%(levelname)s] %(message)s") if stream is None: stream = sys.stdout @@ -463,6 +470,5 @@ def set_package_logger(level: Union[str, int] = logging.DEBUG, stream: sys.stdou logger = logging.getLogger("aws_lambda_powertools") logger.setLevel(level) handler = logging.StreamHandler(stream) - formatter = logging.Formatter(formatter) handler.setFormatter(formatter) logger.addHandler(handler) diff --git a/python/aws_lambda_powertools/metrics/metric.py b/python/aws_lambda_powertools/metrics/metric.py index 7c536b2d18e..b83004a6005 100644 --- a/python/aws_lambda_powertools/metrics/metric.py +++ b/python/aws_lambda_powertools/metrics/metric.py @@ -1,6 +1,5 @@ import json import logging -import os from contextlib import contextmanager from typing import Dict @@ -112,8 +111,8 @@ def single_metric(name: str, unit: MetricUnit, value: float): yield metric logger.debug("Serializing single metric") metric_set: Dict = metric.serialize_metric_set() - except Exception as e: - raise e + except Exception: + raise finally: logger.debug("Publishing single metric", {"metric": metric}) print(json.dumps(metric_set)) diff --git a/python/aws_lambda_powertools/metrics/metrics.py b/python/aws_lambda_powertools/metrics/metrics.py index 66f56beefc2..e706ddfae0a 100644 --- a/python/aws_lambda_powertools/metrics/metrics.py +++ b/python/aws_lambda_powertools/metrics/metrics.py @@ -1,7 +1,6 @@ import functools import json import logging -import os from typing import Any, Callable from aws_lambda_powertools.metrics.base import MetricManager @@ -100,8 +99,8 @@ def handler(event, context) def decorate(*args, **kwargs): try: response = lambda_handler(*args, **kwargs) - except Exception as e: - raise e + except Exception: + raise finally: metrics = self.serialize_metric_set() logger.debug("Publishing metrics", {"metrics": metrics}) diff --git a/python/aws_lambda_powertools/middleware_factory/factory.py b/python/aws_lambda_powertools/middleware_factory/factory.py index 31a4c5dc58b..cf70fad22d9 100644 --- a/python/aws_lambda_powertools/middleware_factory/factory.py +++ b/python/aws_lambda_powertools/middleware_factory/factory.py @@ -128,7 +128,7 @@ def wrapper(event, context): else: response = middleware() return response - except Exception as err: + except Exception: logger.exception(f"Caught exception in {decorator.__qualname__}", exc_info=True) raise diff --git a/python/tests/functional/test_aws_lambda_logging.py b/python/tests/functional/test_aws_lambda_logging.py index 4a0035e170d..4a4513134dc 100644 --- a/python/tests/functional/test_aws_lambda_logging.py +++ b/python/tests/functional/test_aws_lambda_logging.py @@ -1,5 +1,4 @@ """aws_lambda_logging tests.""" -import functools import io import json import logging diff --git a/python/tests/functional/test_init.py b/python/tests/functional/test_init.py deleted file mode 100644 index 5d26583667e..00000000000 --- a/python/tests/functional/test_init.py +++ /dev/null @@ -1,10 +0,0 @@ -from aws_lambda_powertools.tracing import Tracer -from aws_lambda_powertools.logging.logger import set_package_logger - -def test_package_logger(capsys): - import aws_lambda_powertools - set_package_logger() - tracer = Tracer(disabled=True) - output = capsys.readouterr() - - assert "Tracing has been disabled" in output.out diff --git a/python/tests/functional/test_logger.py b/python/tests/functional/test_logger.py index 4e1de9d27dc..7031cfd826c 100644 --- a/python/tests/functional/test_logger.py +++ b/python/tests/functional/test_logger.py @@ -6,6 +6,8 @@ import pytest from aws_lambda_powertools.logging import Logger, MetricUnit, log_metric, logger_inject_lambda_context, logger_setup +from aws_lambda_powertools.logging.logger import JsonFormatter, set_package_logger +from aws_lambda_powertools.tracing import Tracer @pytest.fixture @@ -295,6 +297,23 @@ def test_log_metric_partially_correct_args(capsys, invalid_input, expected): assert captured.out == expected +def test_package_logger(capsys): + + set_package_logger() + Tracer(disabled=True) + output = capsys.readouterr() + + assert "Tracing has been disabled" in output.out + + +def test_package_logger_format(stdout, capsys): + set_package_logger(stream=stdout, formatter=JsonFormatter(formatter="test")) + Tracer(disabled=True) + output = json.loads(stdout.getvalue().split("\n")[0]) + + assert "test" in output["formatter"] + + @pytest.mark.parametrize( "invalid_input,expected", [({"unit": "Blah"}, ValueError), ({"unit": None}, ValueError), ({}, TypeError)], @@ -318,7 +337,7 @@ def test_logger_setup_deprecated(): def test_logger_inject_lambda_context_deprecated(): # Should be removed when GA with pytest.raises(DeprecationWarning): - logger_setup() + logger_inject_lambda_context() def test_logger_append_duplicated(stdout): @@ -327,3 +346,8 @@ def test_logger_append_duplicated(stdout): logger.info("log") log = json.loads(stdout.getvalue()) assert "new_value" == log["request_id"] + + +def test_logger_invalid_sampling_rate(): + with pytest.raises(ValueError): + Logger(sampling_rate="TEST") From 447dc2baa62aaec8f2e5a899c6b04dfe7fdc3c74 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 23 Apr 2020 16:47:59 +0100 Subject: [PATCH 16/23] improv: complete coverage, fix dead code --- python/aws_lambda_powertools/metrics/base.py | 11 +++--- .../aws_lambda_powertools/tracing/tracer.py | 16 +++----- python/tests/functional/test_metrics.py | 38 +++++++++++++++++++ python/tests/unit/test_tracing.py | 28 ++++++++++++++ 4 files changed, 76 insertions(+), 17 deletions(-) diff --git a/python/aws_lambda_powertools/metrics/base.py b/python/aws_lambda_powertools/metrics/base.py index 05c3139fc58..9f6ae20bc9e 100644 --- a/python/aws_lambda_powertools/metrics/base.py +++ b/python/aws_lambda_powertools/metrics/base.py @@ -148,10 +148,10 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None) -> SchemaValidationError Raised when serialization fail schema validation """ - if metrics is None: + if metrics is None: # pragma: no cover metrics = self.metric_set - if dimensions is None: + if dimensions is None: # pragma: no cover dimensions = self.dimension_set logger.debug("Serializing...", {"metrics": metrics, "dimensions": dimensions}) @@ -163,11 +163,10 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None) -> for metric_name in metrics: metric: str = metrics[metric_name] metric_value: int = metric.get("Value", 0) - metric_unit: str = metric.get("Unit") + metric_unit: str = metric.get("Unit", "") - if metric_value > 0 and metric_unit is not None: - metric_names_unit.append({"Name": metric_name, "Unit": metric["Unit"]}) - metric_set.update({metric_name: metric["Value"]}) + metric_names_unit.append({"Name": metric_name, "Unit": metric_unit}) + metric_set.update({metric_name: metric_value}) metrics_definition = { "CloudWatchMetrics": [ diff --git a/python/aws_lambda_powertools/tracing/tracer.py b/python/aws_lambda_powertools/tracing/tracer.py index a9c5b92ef29..b347a67b3c9 100644 --- a/python/aws_lambda_powertools/tracing/tracer.py +++ b/python/aws_lambda_powertools/tracing/tracer.py @@ -343,17 +343,10 @@ def patch(self): """Patch modules for instrumentation""" logger.debug("Patching modules...") - is_lambda_emulator = os.getenv("AWS_SAM_LOCAL", False) - is_lambda_env = os.getenv("LAMBDA_TASK_ROOT", False) - if self.disabled: logger.debug("Tracing has been disabled, aborting patch") return - if is_lambda_emulator or is_lambda_env: - logger.debug("Running under SAM CLI env or not in Lambda; aborting patch") - return - patch_all() # pragma: no cover def __disable_tracing_provider(self): @@ -368,7 +361,7 @@ def __is_trace_disabled(self) -> bool: Tracing is automatically disabled in the following conditions: 1. Explicitly disabled via `TRACE_DISABLED` environment variable - 2. Running in Lambda Emulators where X-Ray Daemon will not be listening + 2. Running in Lambda Emulators, or locally where X-Ray Daemon will not be listening 3. Explicitly disabled via constructor e.g `Tracer(disabled=True)` Returns @@ -377,6 +370,7 @@ def __is_trace_disabled(self) -> bool: """ logger.debug("Verifying whether Tracing has been disabled") is_lambda_emulator = os.getenv("AWS_SAM_LOCAL") + is_lambda_env = os.getenv("LAMBDA_TASK_ROOT") env_option = str(os.getenv("POWERTOOLS_TRACE_DISABLED", "false")) disabled_env = strtobool(env_option) @@ -384,9 +378,9 @@ def __is_trace_disabled(self) -> bool: logger.debug("Tracing has been disabled via env var POWERTOOLS_TRACE_DISABLED") return disabled_env - if is_lambda_emulator: - logger.debug("Running under SAM CLI env; Tracing has been disabled") - return is_lambda_emulator + if is_lambda_emulator or is_lambda_env: + logger.debug("Running under SAM CLI env or not in Lambda env; disabling Tracing") + return True return False diff --git a/python/tests/functional/test_metrics.py b/python/tests/functional/test_metrics.py index 0feaf3303ff..65183f47b05 100644 --- a/python/tests/functional/test_metrics.py +++ b/python/tests/functional/test_metrics.py @@ -264,3 +264,41 @@ def test_exceed_number_of_dimensions(metric, namespace): my_metric.add_namespace(**namespace) for dimension in dimensions: my_metric.add_dimension(**dimension) + + +def test_log_metrics_error_propagation(capsys, metric, dimension, namespace): + # GIVEN Metrics are serialized after handler execution + # WHEN If an error occurs and metrics have been added + # THEN we should log metrics and propagate exception up + my_metrics = Metrics() + + my_metrics.add_metric(**metric) + my_metrics.add_dimension(**dimension) + my_metrics.add_namespace(**namespace) + + @my_metrics.log_metrics + def lambda_handler(evt, context): + raise ValueError("Bubble up") + + with pytest.raises(ValueError): + lambda_handler({}, {}) + + output = json.loads(capsys.readouterr().out.strip()) + expected = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) + + remove_timestamp(metrics=[output, expected]) # Timestamp will always be different + assert expected["_aws"] == output["_aws"] + + +def test_log_no_metrics_error_propagation(capsys, metric, dimension, namespace): + # GIVEN Metrics are serialized after handler execution + # WHEN If an error occurs and no metrics have been added + # THEN we should propagate exception up and raise SchemaValidationError + my_metrics = Metrics() + + @my_metrics.log_metrics + def lambda_handler(evt, context): + raise ValueError("Bubble up") + + with pytest.raises(SchemaValidationError): + lambda_handler({}, {}) diff --git a/python/tests/unit/test_tracing.py b/python/tests/unit/test_tracing.py index a7b98389e33..f8f43de0bf4 100644 --- a/python/tests/unit/test_tracing.py +++ b/python/tests/unit/test_tracing.py @@ -153,3 +153,31 @@ def test_tracer_no_autopatch(patch_mock): # THEN tracer should not patch any module Tracer(disabled=True, auto_patch=False) assert patch_mock.call_count == 0 + + +def test_tracer_lambda_handler_empty_response_metadata(mocker, xray_stub): + put_metadata_mock = mocker.MagicMock() + xray_provider = xray_stub(put_metadata_mock=put_metadata_mock) + tracer = Tracer(provider=xray_provider) + + @tracer.capture_lambda_handler + def handler(event, context): + return + + handler({}, mocker.MagicMock()) + + assert put_metadata_mock.call_count == 0 + + +def test_tracer_method_empty_response_metadata(mocker, xray_stub): + put_metadata_mock = mocker.MagicMock() + xray_provider = xray_stub(put_metadata_mock=put_metadata_mock) + tracer = Tracer(provider=xray_provider) + + @tracer.capture_method + def greeting(name, message): + return + + greeting(name="Foo", message="Bar") + + assert put_metadata_mock.call_count == 0 From 9dc2c00461564099d82f1e2a34d0cbad7b4f371b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 23 Apr 2020 17:20:29 +0100 Subject: [PATCH 17/23] docs: update readme to reflect changes --- README.md | 2 +- python/README.md | 68 ++++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 58 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 175e8b98470..9a1245bfa80 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Lambda Powertools -![PackageStatus](https://img.shields.io/static/v1?label=status&message=beta&color=blueviolet?style=flat-square) ![PythonSupport](https://img.shields.io/static/v1?label=python&message=3.6%20|%203.7|%203.8&color=blue?style=flat-square&logo=python) +![Python Build](https://github.com/awslabs/aws-lambda-powertools/workflows/Powertools%20Python/badge.svg?branch=master) A suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, structured logging and creating custom metrics asynchronously easier. diff --git a/python/README.md b/python/README.md index 5a96dc80b63..d0e6481291d 100644 --- a/python/README.md +++ b/python/README.md @@ -24,6 +24,7 @@ A suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, * Logs canonical custom metric line to logs that can be consumed asynchronously * Log sampling enables DEBUG log level for a percentage of requests (disabled by default) - Enable via `POWERTOOLS_LOGGER_SAMPLE_RATE=0.1`, ranges from 0 to 1, where 0.1 is 10% and 1 is 100% +* Append additional keys to structured log at any point in time so they're available across log statements **Metrics** @@ -113,6 +114,8 @@ tracer = Tracer(auto_patch=False) # new instance using existing configuration wi ### Logging +> **NOTE** `logger_setup` and `logger_inject_lambda_context` are deprecated and will be completely removed once it's GA. + **Example SAM template using supported environment variables** ```yaml @@ -128,13 +131,13 @@ Globals: **Pseudo Python Lambda code** ```python -from aws_lambda_powertools.logging import logger_setup, logger_inject_lambda_context +from aws_lambda_powertools.logging import Logger -logger = logger_setup() -# logger_setup(service="payment") # also accept explicit service name -# logger_setup(level="INFO") # also accept explicit log level +logger = Logger() +# Logger(service="payment") # also accepts explicit service name +# Logger(level="INFO") # also accepts explicit log level -@logger_inject_lambda_context +@logger.inject_lambda_context def handler(event, context) logger.info("Collecting payment") ... @@ -159,6 +162,7 @@ def handler(event, context) "lambda_function_arn":"arn:aws:lambda:eu-west-1:12345678910:function:test", "lambda_request_id":"52fdfc07-2182-154f-163f-5f0f9a621d72", "cold_start": "true", + "sampling_rate": 0.1, "message": "Collecting payment" } @@ -172,6 +176,7 @@ def handler(event, context) "lambda_function_arn":"arn:aws:lambda:eu-west-1:12345678910:function:test", "lambda_request_id":"52fdfc07-2182-154f-163f-5f0f9a621d72", "cold_start": "true", + "sampling_rate": 0.1, "message":{ "operation":"collect_payment", "charge_id": "ch_AZFlk2345C0" @@ -179,6 +184,40 @@ def handler(event, context) } ``` +**Append additional keys to structured log** + +```python +from aws_lambda_powertools.logging import Logger + +logger = Logger() + +@logger.inject_lambda_context +def handler(event, context) + if "order_id" in event: + logger.structure_logs(append=True, order_id=event["order_id"]) + logger.info("Collecting payment") + ... +``` + +**Exerpt output in CloudWatch Logs** + +```json +{ + "timestamp":"2019-08-22 18:17:33,774", + "level":"INFO", + "location":"collect.handler:1", + "service":"payment", + "lambda_function_name":"test", + "lambda_function_memory_size":"128", + "lambda_function_arn":"arn:aws:lambda:eu-west-1:12345678910:function:test", + "lambda_request_id":"52fdfc07-2182-154f-163f-5f0f9a621d72", + "cold_start": "true", + "sampling_rate": 0.1, + "order_id": "order_id_value", + "message": "Collecting payment" +} +``` + ### Custom Metrics async > **NOTE** `log_metric` will be removed once it's GA. @@ -320,12 +359,19 @@ def lambda_handler(event, context): return True ``` -## Beta -> **[Progress towards GA](https://github.com/awslabs/aws-lambda-powertools/projects/1)** +### Debug mode + +By default, all debug log statements from AWS Lambda Powertools package are suppressed. If you'd like to enable them, use `set_package_logger` utility: + +```python +import aws_lambda_powertools +aws_lambda_powertools.logging.logger.set_package_logger() +... +``` + +## Beta -This library may change its API/methods or environment variables as it receives feedback from customers. Currently looking for ideas in the following areas before making it stable: +This library may change its API/methods or environment variables as it receives feedback from customers -* **Should Tracer patch all possible imported libraries by default or only AWS SDKs?** - - Patching all libraries may have a small performance penalty (~50ms) at cold start - - Alternatively, we could patch only AWS SDK if available and to provide a param to patch multiple `Tracer(modules=("boto3", "requests"))` +**[Progress towards GA](https://github.com/awslabs/aws-lambda-powertools/projects/1)** From 0e8dd848f07ecb912864fadf066617cdb09bf751 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 24 Apr 2020 11:48:26 +0100 Subject: [PATCH 18/23] fix: address jacob's code review feedback --- python/aws_lambda_powertools/__init__.py | 1 + .../logging/exceptions.py | 2 + .../aws_lambda_powertools/logging/logger.py | 47 +++++++++++++++---- .../aws_lambda_powertools/metrics/metric.py | 2 - .../aws_lambda_powertools/metrics/metrics.py | 2 - .../middleware_factory/exceptions.py | 2 + .../middleware_factory/factory.py | 8 ++-- python/tests/functional/test_logger.py | 3 +- .../functional/test_middleware_factory.py | 22 ++++++++- 9 files changed, 71 insertions(+), 18 deletions(-) create mode 100644 python/aws_lambda_powertools/logging/exceptions.py create mode 100644 python/aws_lambda_powertools/middleware_factory/exceptions.py diff --git a/python/aws_lambda_powertools/__init__.py b/python/aws_lambda_powertools/__init__.py index b7407d46831..2be705fb386 100644 --- a/python/aws_lambda_powertools/__init__.py +++ b/python/aws_lambda_powertools/__init__.py @@ -7,3 +7,4 @@ logger = logging.getLogger("aws_lambda_powertools") logger.addHandler(logging.NullHandler()) +logger.propagate = False diff --git a/python/aws_lambda_powertools/logging/exceptions.py b/python/aws_lambda_powertools/logging/exceptions.py new file mode 100644 index 00000000000..65b30906edf --- /dev/null +++ b/python/aws_lambda_powertools/logging/exceptions.py @@ -0,0 +1,2 @@ +class InvalidLoggerSamplingRateError(Exception): + pass diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 7e08224da86..8b164829437 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -11,15 +11,32 @@ from typing import Any, Callable, Dict, Union from ..helper.models import MetricUnit, build_lambda_context_model, build_metric_unit_from_str +from .exceptions import InvalidLoggerSamplingRateError logger = logging.getLogger(__name__) is_cold_start = True -def json_formatter(obj): - """Formatter for unserialisable values.""" - return str(obj) +def json_formatter(unserialized_value: Any): + """JSON custom serializer to cast unserialisable values to strings. + + Example + ------- + + **Serialize unserialisable value to string** + + class X: pass + value = {"x": X()} + + json.dumps(value, default=json_formatter) + + Parameters + ---------- + unserialized_value: Any + Python object unserializable by JSON + """ + return str(unserialized_value) class JsonFormatter(logging.Formatter): @@ -57,7 +74,12 @@ def format(self, record): # noqa: A003 record_dict = record.__dict__.copy() record_dict["asctime"] = self.formatTime(record, self.datefmt) - log_dict = {k: v % record_dict for k, v in self.format_dict.items() if v} + log_dict = {} + for key, value in self.format_dict.items(): + if value: + # converts default logging expr to its record value + # e.g. '%(asctime)s' to '2020-04-24 09:35:40,698' + log_dict[key] = value % record_dict if isinstance(record_dict["msg"], dict): log_dict["message"] = record_dict["msg"] @@ -89,7 +111,9 @@ def format(self, record): # noqa: A003 return json_record -def logger_setup(service: str = None, level: str = None, sampling_rate: float = 0.0, legacy: bool = False, **kwargs): +def logger_setup( + service: str = None, level: str = None, sampling_rate: float = 0.0, legacy: bool = False, **kwargs +) -> DeprecationWarning: """DEPRECATED This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead @@ -102,11 +126,12 @@ def logger_setup(service: str = None, level: str = None, sampling_rate: float = logger = Logger(service="payment") # same env var still applies """ - warnings.warn(message="This method will be removed in GA; use Logger instead", category=DeprecationWarning) raise DeprecationWarning("Use Logger instead - This method will be removed when GA") -def logger_inject_lambda_context(lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False): +def logger_inject_lambda_context( + lambda_handler: Callable[[Dict, Any], Any] = None, log_event: bool = False +) -> DeprecationWarning: """DEPRECATED This will be removed when GA - Use `aws_lambda_powertools.logging.logger.Logger` instead @@ -121,7 +146,6 @@ def logger_inject_lambda_context(lambda_handler: Callable[[Dict, Any], Any] = No def handler(evt, ctx): pass """ - raise DeprecationWarning("Use Logger instead - This method will be removed when GA") @@ -318,6 +342,11 @@ class Logger(logging.Logger): debug log sampling rate, 0.0 by default stream: sys.stdout log stream, stdout by default + + Raises + ------ + InvalidLoggerSamplingRateError + When sampling rate provided is not a float """ def __init__( @@ -342,7 +371,7 @@ def __init__( logger.debug("Setting log level to Debug due to sampling rate") self.log_level = logging.DEBUG except ValueError: - raise ValueError( + raise InvalidLoggerSamplingRateError( f"Expected a float value ranging 0 to 1, but received {self.sampling_rate} instead. Please review POWERTOOLS_LOGGER_SAMPLE_RATE environment variable." # noqa E501 ) diff --git a/python/aws_lambda_powertools/metrics/metric.py b/python/aws_lambda_powertools/metrics/metric.py index b83004a6005..05a8d4ce76b 100644 --- a/python/aws_lambda_powertools/metrics/metric.py +++ b/python/aws_lambda_powertools/metrics/metric.py @@ -111,8 +111,6 @@ def single_metric(name: str, unit: MetricUnit, value: float): yield metric logger.debug("Serializing single metric") metric_set: Dict = metric.serialize_metric_set() - except Exception: - raise finally: logger.debug("Publishing single metric", {"metric": metric}) print(json.dumps(metric_set)) diff --git a/python/aws_lambda_powertools/metrics/metrics.py b/python/aws_lambda_powertools/metrics/metrics.py index e706ddfae0a..390356b1461 100644 --- a/python/aws_lambda_powertools/metrics/metrics.py +++ b/python/aws_lambda_powertools/metrics/metrics.py @@ -99,8 +99,6 @@ def handler(event, context) def decorate(*args, **kwargs): try: response = lambda_handler(*args, **kwargs) - except Exception: - raise finally: metrics = self.serialize_metric_set() logger.debug("Publishing metrics", {"metrics": metrics}) diff --git a/python/aws_lambda_powertools/middleware_factory/exceptions.py b/python/aws_lambda_powertools/middleware_factory/exceptions.py new file mode 100644 index 00000000000..55d5b2342bb --- /dev/null +++ b/python/aws_lambda_powertools/middleware_factory/exceptions.py @@ -0,0 +1,2 @@ +class MiddlewareInvalidArgumentError(Exception): + pass diff --git a/python/aws_lambda_powertools/middleware_factory/factory.py b/python/aws_lambda_powertools/middleware_factory/factory.py index cf70fad22d9..43c8e5ad9fa 100644 --- a/python/aws_lambda_powertools/middleware_factory/factory.py +++ b/python/aws_lambda_powertools/middleware_factory/factory.py @@ -6,6 +6,7 @@ from typing import Callable from ..tracing import Tracer +from .exceptions import MiddlewareInvalidArgumentError logger = logging.getLogger(__name__) @@ -96,7 +97,7 @@ def lambda_handler(event, context): Raises ------ - TypeError + MiddlewareInvalidArgumentError When middleware receives non keyword=arguments """ @@ -112,7 +113,8 @@ def final_decorator(func: Callable = None, **kwargs): return functools.partial(final_decorator, **kwargs) if not inspect.isfunction(func): - raise TypeError( + # @custom_middleware(True) vs @custom_middleware(log_event=True) + raise MiddlewareInvalidArgumentError( f"Only keyword arguments is supported for middlewares: {decorator.__qualname__} received {func}" ) @@ -129,7 +131,7 @@ def wrapper(event, context): response = middleware() return response except Exception: - logger.exception(f"Caught exception in {decorator.__qualname__}", exc_info=True) + logger.exception(f"Caught exception in {decorator.__qualname__}") raise return wrapper diff --git a/python/tests/functional/test_logger.py b/python/tests/functional/test_logger.py index 7031cfd826c..dbac1f0a642 100644 --- a/python/tests/functional/test_logger.py +++ b/python/tests/functional/test_logger.py @@ -8,6 +8,7 @@ from aws_lambda_powertools.logging import Logger, MetricUnit, log_metric, logger_inject_lambda_context, logger_setup from aws_lambda_powertools.logging.logger import JsonFormatter, set_package_logger from aws_lambda_powertools.tracing import Tracer +from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError @pytest.fixture @@ -349,5 +350,5 @@ def test_logger_append_duplicated(stdout): def test_logger_invalid_sampling_rate(): - with pytest.raises(ValueError): + with pytest.raises(InvalidLoggerSamplingRateError): Logger(sampling_rate="TEST") diff --git a/python/tests/functional/test_middleware_factory.py b/python/tests/functional/test_middleware_factory.py index 141acf9d96f..e7571aab880 100644 --- a/python/tests/functional/test_middleware_factory.py +++ b/python/tests/functional/test_middleware_factory.py @@ -1,9 +1,11 @@ import json +import logging from typing import Callable import pytest from aws_lambda_powertools.middleware_factory import lambda_handler_decorator +from aws_lambda_powertools.middleware_factory.exceptions import MiddlewareInvalidArgumentError @pytest.fixture @@ -115,8 +117,26 @@ def log_event(handler, event, context, log_event=False): print(json.dumps(event)) return handler(event, context) - with pytest.raises(TypeError): + with pytest.raises(MiddlewareInvalidArgumentError): @log_event(True) def lambda_handler(evt, ctx): return True + + +def test_factory_middleware_exception_propagation(say_bye_middleware, say_hi_middleware): + class CustomMiddlewareException(Exception): + pass + + @lambda_handler_decorator + def raise_middleware(handler, evt, ctx): + raise CustomMiddlewareException("Raise middleware exception") + + @say_bye_middleware + @raise_middleware + @say_hi_middleware + def lambda_handler(evt, ctx): + return "hello world" + + with pytest.raises(CustomMiddlewareException): + lambda_handler({}, {}) From 802f1815d141f53cd682d3f202ef05f7050a20ee Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 24 Apr 2020 13:53:50 +0100 Subject: [PATCH 19/23] chore: linting --- python/aws_lambda_powertools/logging/logger.py | 4 ++-- python/tests/functional/test_logger.py | 2 +- python/tests/functional/test_middleware_factory.py | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/python/aws_lambda_powertools/logging/logger.py b/python/aws_lambda_powertools/logging/logger.py index 8b164829437..02c0e912b12 100644 --- a/python/aws_lambda_powertools/logging/logger.py +++ b/python/aws_lambda_powertools/logging/logger.py @@ -342,7 +342,7 @@ class Logger(logging.Logger): debug log sampling rate, 0.0 by default stream: sys.stdout log stream, stdout by default - + Raises ------ InvalidLoggerSamplingRateError @@ -471,7 +471,7 @@ def set_package_logger( ): """Set an additional stream handler, formatter, and log level for aws_lambda_powertools package logger. - **Package log by default is supressed (NullHandler), this should only used for debugging. + **Package log by default is supressed (NullHandler), this should only used for debugging. This is separate from application Logger class utility** Example diff --git a/python/tests/functional/test_logger.py b/python/tests/functional/test_logger.py index dbac1f0a642..c1a882f3320 100644 --- a/python/tests/functional/test_logger.py +++ b/python/tests/functional/test_logger.py @@ -6,9 +6,9 @@ import pytest from aws_lambda_powertools.logging import Logger, MetricUnit, log_metric, logger_inject_lambda_context, logger_setup +from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError from aws_lambda_powertools.logging.logger import JsonFormatter, set_package_logger from aws_lambda_powertools.tracing import Tracer -from aws_lambda_powertools.logging.exceptions import InvalidLoggerSamplingRateError @pytest.fixture diff --git a/python/tests/functional/test_middleware_factory.py b/python/tests/functional/test_middleware_factory.py index e7571aab880..ee8078f801a 100644 --- a/python/tests/functional/test_middleware_factory.py +++ b/python/tests/functional/test_middleware_factory.py @@ -1,5 +1,4 @@ import json -import logging from typing import Callable import pytest From 9392afbe085cc67fed5ee1333ed7a77d710cfd41 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 24 Apr 2020 16:54:01 +0100 Subject: [PATCH 20/23] fix: metric spillover at 100, not 101 --- python/aws_lambda_powertools/metrics/base.py | 13 ++++++------ python/tests/functional/test_metrics.py | 21 ++++++++++---------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/python/aws_lambda_powertools/metrics/base.py b/python/aws_lambda_powertools/metrics/base.py index 9f6ae20bc9e..249bee4aefc 100644 --- a/python/aws_lambda_powertools/metrics/base.py +++ b/python/aws_lambda_powertools/metrics/base.py @@ -18,6 +18,7 @@ with _schema_path.open() as f: CLOUDWATCH_EMF_SCHEMA = json.load(f) +MAX_METRICS = 100 class MetricManager: """Base class for metric functionality (namespace, metric, dimension, serialization) @@ -100,12 +101,6 @@ def add_metric(self, name: str, unit: MetricUnit, value: Union[float, int]): MetricUnitError When metric unit is not supported by CloudWatch """ - if len(self.metric_set) == 100: - logger.debug("Exceeded maximum of 100 metrics - Publishing existing metric set") - metrics = self.serialize_metric_set() - print(json.dumps(metrics)) - self.metric_set = {} - if not isinstance(value, numbers.Number): raise MetricValueError(f"{value} is not a valid number") @@ -120,6 +115,12 @@ def add_metric(self, name: str, unit: MetricUnit, value: Union[float, int]): logger.debug(f"Adding metric: {name} with {metric}") self.metric_set[name] = metric + if len(self.metric_set) == MAX_METRICS: + logger.debug(f"Exceeded maximum of {MAX_METRICS} metrics - Publishing existing metric set") + metrics = self.serialize_metric_set() + print(json.dumps(metrics)) + self.metric_set = {} + def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None) -> Dict: """Serializes metric and dimensions set diff --git a/python/tests/functional/test_metrics.py b/python/tests/functional/test_metrics.py index 65183f47b05..3b20f992f90 100644 --- a/python/tests/functional/test_metrics.py +++ b/python/tests/functional/test_metrics.py @@ -57,15 +57,16 @@ def a_hundred_metrics() -> List[Dict[str, str]]: def serialize_metrics(metrics: List[Dict], dimensions: List[Dict], namespace: Dict) -> Dict: """ Helper function to build EMF object from a list of metrics, dimensions """ - my_metrics = MetricManager() - for metric in metrics: - my_metrics.add_metric(**metric) - + my_metrics = Metrics() for dimension in dimensions: my_metrics.add_dimension(**dimension) my_metrics.add_namespace(**namespace) - return my_metrics.serialize_metric_set() + for metric in metrics: + my_metrics.add_metric(**metric) + + if len(metrics) != 100: + return my_metrics.serialize_metric_set() def serialize_single_metric(metric: Dict, dimension: Dict, namespace: Dict) -> Dict: @@ -173,10 +174,10 @@ def test_namespace_env_var(monkeypatch, capsys, metric, dimension, namespace): assert expected["_aws"] == output["_aws"] -def test_metrics_spillover(capsys, metric, dimension, namespace, a_hundred_metrics): +def test_metrics_spillover(monkeypatch, capsys, metric, dimension, namespace, a_hundred_metrics): my_metrics = Metrics() - my_metrics.add_namespace(**namespace) my_metrics.add_dimension(**dimension) + my_metrics.add_namespace(**namespace) for _metric in a_hundred_metrics: my_metrics.add_metric(**_metric) @@ -194,9 +195,9 @@ def lambda_handler(evt, handler): single_metric = json.loads(single_metric) expected_single_metric = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) - expected_spillover_metrics = serialize_metrics( - metrics=a_hundred_metrics, dimensions=[dimension], namespace=namespace - ) + + serialize_metrics(metrics=a_hundred_metrics, dimensions=[dimension], namespace=namespace) + expected_spillover_metrics = json.loads(capsys.readouterr().out.strip()) remove_timestamp(metrics=[spillover_metrics, expected_spillover_metrics, single_metric, expected_single_metric]) From fd780be95b6d0837c40f60edf4f26d97192a1cf4 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 24 Apr 2020 17:05:47 +0100 Subject: [PATCH 21/23] fix: trace auto-disable, doc edge cases --- python/README.md | 6 +++--- python/aws_lambda_powertools/tracing/tracer.py | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/python/README.md b/python/README.md index d0e6481291d..ff22f799077 100644 --- a/python/README.md +++ b/python/README.md @@ -13,7 +13,7 @@ A suite of utilities for AWS Lambda Functions that makes tracing with AWS X-Ray, > It currently uses AWS X-Ray * Decorators that capture cold start as annotation, and response and exceptions as metadata -* Run functions locally without code change to disable tracing +* Run functions locally with SAM CLI without code change to disable tracing * Explicitly disable tracing via env var `POWERTOOLS_TRACE_DISABLED="true"` **Logging** @@ -134,8 +134,7 @@ Globals: from aws_lambda_powertools.logging import Logger logger = Logger() -# Logger(service="payment") # also accepts explicit service name -# Logger(level="INFO") # also accepts explicit log level +# Logger(service="payment", level="INFO") # also accepts explicit service name, log level @logger.inject_lambda_context def handler(event, context) @@ -268,6 +267,7 @@ with single_metric(name="ColdStart", unit=MetricUnit.Count, value=1) as metric: metric.add_dimension(name="function_context", value="$LATEST") ``` +> **NOTE**: If you want to instantiate Metrics() in multiple places in your code, make sure to use `POWERTOOLS_METRICS_NAMESPACE` env var as we don't keep a copy of that across instances. ### Utilities diff --git a/python/aws_lambda_powertools/tracing/tracer.py b/python/aws_lambda_powertools/tracing/tracer.py index b347a67b3c9..19cc319492f 100644 --- a/python/aws_lambda_powertools/tracing/tracer.py +++ b/python/aws_lambda_powertools/tracing/tracer.py @@ -369,8 +369,7 @@ def __is_trace_disabled(self) -> bool: bool """ logger.debug("Verifying whether Tracing has been disabled") - is_lambda_emulator = os.getenv("AWS_SAM_LOCAL") - is_lambda_env = os.getenv("LAMBDA_TASK_ROOT") + is_lambda_sam_cli = os.getenv("AWS_SAM_LOCAL") env_option = str(os.getenv("POWERTOOLS_TRACE_DISABLED", "false")) disabled_env = strtobool(env_option) @@ -378,7 +377,7 @@ def __is_trace_disabled(self) -> bool: logger.debug("Tracing has been disabled via env var POWERTOOLS_TRACE_DISABLED") return disabled_env - if is_lambda_emulator or is_lambda_env: + if is_lambda_sam_cli: logger.debug("Running under SAM CLI env or not in Lambda env; disabling Tracing") return True From bb1faf6b168b7f2608cc1724f08232b9145990d5 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 24 Apr 2020 17:06:58 +0100 Subject: [PATCH 22/23] chore: linting --- python/aws_lambda_powertools/metrics/base.py | 1 + python/tests/functional/test_metrics.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/python/aws_lambda_powertools/metrics/base.py b/python/aws_lambda_powertools/metrics/base.py index 249bee4aefc..8d561abf71d 100644 --- a/python/aws_lambda_powertools/metrics/base.py +++ b/python/aws_lambda_powertools/metrics/base.py @@ -20,6 +20,7 @@ MAX_METRICS = 100 + class MetricManager: """Base class for metric functionality (namespace, metric, dimension, serialization) diff --git a/python/tests/functional/test_metrics.py b/python/tests/functional/test_metrics.py index 3b20f992f90..703c8788ac1 100644 --- a/python/tests/functional/test_metrics.py +++ b/python/tests/functional/test_metrics.py @@ -195,7 +195,7 @@ def lambda_handler(evt, handler): single_metric = json.loads(single_metric) expected_single_metric = serialize_single_metric(metric=metric, dimension=dimension, namespace=namespace) - + serialize_metrics(metrics=a_hundred_metrics, dimensions=[dimension], namespace=namespace) expected_spillover_metrics = json.loads(capsys.readouterr().out.strip()) From 83e58fa9531d2d47fd9b89a1e8331690b26807b6 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 24 Apr 2020 17:26:32 +0100 Subject: [PATCH 23/23] chore: 0.8.0 version bump --- python/HISTORY.md | 6 ++++++ python/pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/python/HISTORY.md b/python/HISTORY.md index 9d9d296c1c2..8001c9bba2c 100644 --- a/python/HISTORY.md +++ b/python/HISTORY.md @@ -1,5 +1,11 @@ # HISTORY +## April 24th + +* Introduces `Logger` for stuctured logging as a replacement for `logger_setup` +* Introduces `Logger.inject_lambda_context` decorator as a replacement for `logger_inject_lambda_context` +* Raise `DeprecationWarning` exception for both `logger_setup`, `logger_inject_lambda_context` + ## April 20th, 2020 **0.7.0** diff --git a/python/pyproject.toml b/python/pyproject.toml index 3360e7fd7ed..8b21c9dce86 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "0.7.0" +version = "0.8.0" description = "Python utilities for AWS Lambda functions including but not limited to tracing, logging and custom metric" authors = ["Amazon Web Services"] classifiers=[