From 138bfc90864f6cf18a4ac975a8cd1ad6a9ccfcea Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 18 Aug 2022 16:51:08 +0200 Subject: [PATCH 01/12] chore: cleanup get_metrics output --- tests/e2e/metrics/test_metrics.py | 12 +++++------- tests/e2e/utils/data_fetcher/metrics.py | 12 ++++++------ 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/tests/e2e/metrics/test_metrics.py b/tests/e2e/metrics/test_metrics.py index 01d1ba2fbf1..516f93ac1f0 100644 --- a/tests/e2e/metrics/test_metrics.py +++ b/tests/e2e/metrics/test_metrics.py @@ -39,13 +39,12 @@ def test_basic_lambda_metric_is_visible(basic_handler_fn: str, basic_handler_fn_ event = json.dumps({"metrics": metrics, "service": service, "namespace": METRIC_NAMESPACE}) _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=event) - my_metrics = data_fetcher.get_metrics( + metric_values = data_fetcher.get_metrics( namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions ) # THEN - metric_data = my_metrics.get("Values", []) - assert metric_data and metric_data[0] == 3.0 + assert metric_values == [3.0] def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str): @@ -58,12 +57,11 @@ def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str): event = json.dumps({"service": service, "namespace": METRIC_NAMESPACE}) _, execution_time = data_fetcher.get_lambda_response(lambda_arn=cold_start_fn_arn, payload=event) - _, _ = data_fetcher.get_lambda_response(lambda_arn=cold_start_fn_arn, payload=event) + data_fetcher.get_lambda_response(lambda_arn=cold_start_fn_arn, payload=event) - my_metrics = data_fetcher.get_metrics( + metric_values = data_fetcher.get_metrics( namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions ) # THEN - metric_data = my_metrics.get("Values", []) - assert metric_data and metric_data[0] == 1.0 + assert metric_values == [1.0] diff --git a/tests/e2e/utils/data_fetcher/metrics.py b/tests/e2e/utils/data_fetcher/metrics.py index 5a017f0a845..18023b18336 100644 --- a/tests/e2e/utils/data_fetcher/metrics.py +++ b/tests/e2e/utils/data_fetcher/metrics.py @@ -3,7 +3,7 @@ import boto3 from mypy_boto3_cloudwatch import CloudWatchClient -from mypy_boto3_cloudwatch.type_defs import DimensionTypeDef, MetricDataResultTypeDef +from mypy_boto3_cloudwatch.type_defs import DimensionTypeDef from retry import retry from tests.e2e.utils.data_builder import build_metric_query_data @@ -19,7 +19,7 @@ def get_metrics( end_date: Optional[datetime] = None, period: int = 60, stat: str = "Sum", -) -> MetricDataResultTypeDef: +) -> List[float]: """Fetch CloudWatch Metrics It takes into account eventual consistency with up to 10 retries and 1.5s jitter. @@ -45,8 +45,8 @@ def get_metrics( Returns ------- - MetricDataResultTypeDef - Dict with metric values found + List[float] + List with metric values found Raises ------ @@ -65,7 +65,7 @@ def get_metrics( StartTime=start_date, EndTime=end_date or datetime.utcnow(), ) - result = response["MetricDataResults"][0] - if not result["Values"]: + result = response["MetricDataResults"][0]["Values"] + if not result: raise ValueError("Empty response from Cloudwatch. Repeating...") return result From 338698b64b8ddde393a8a2f719d998a79e6b768b Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Thu, 18 Aug 2022 17:31:28 +0200 Subject: [PATCH 02/12] feat: move logger to new infrastructure --- tests/e2e/logger/conftest.py | 25 ++++ .../e2e/logger/handlers/no_context_handler.py | 14 -- tests/e2e/logger/infrastructure.py | 19 +++ tests/e2e/logger/test_logger.py | 129 ++---------------- 4 files changed, 55 insertions(+), 132 deletions(-) create mode 100644 tests/e2e/logger/conftest.py delete mode 100644 tests/e2e/logger/handlers/no_context_handler.py create mode 100644 tests/e2e/logger/infrastructure.py diff --git a/tests/e2e/logger/conftest.py b/tests/e2e/logger/conftest.py new file mode 100644 index 00000000000..201a5f7dca1 --- /dev/null +++ b/tests/e2e/logger/conftest.py @@ -0,0 +1,25 @@ +import pytest + +from tests.e2e.logger.infrastructure import LoggerStack +from tests.e2e.utils.infrastructure import deploy_once + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str): + """Setup and teardown logic for E2E test infrastructure + + Parameters + ---------- + request : pytest.FixtureRequest + pytest request fixture to introspect absolute path to test being executed + tmp_path_factory : pytest.TempPathFactory + pytest temporary path factory to discover shared tmp when multiple CPU processes are spun up + worker_id : str + pytest-xdist worker identification to detect whether parallelization is enabled + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + yield from deploy_once(stack=LoggerStack, request=request, tmp_path_factory=tmp_path_factory, worker_id=worker_id) diff --git a/tests/e2e/logger/handlers/no_context_handler.py b/tests/e2e/logger/handlers/no_context_handler.py deleted file mode 100644 index 1347ba98d81..00000000000 --- a/tests/e2e/logger/handlers/no_context_handler.py +++ /dev/null @@ -1,14 +0,0 @@ -import os - -from aws_lambda_powertools import Logger - -logger = Logger() - -MESSAGE = os.environ["MESSAGE"] -ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] - - -def lambda_handler(event, context): - logger.info(MESSAGE) - logger.append_keys(**{ADDITIONAL_KEY: "test"}) - return "success" diff --git a/tests/e2e/logger/infrastructure.py b/tests/e2e/logger/infrastructure.py new file mode 100644 index 00000000000..11c55d0e8fc --- /dev/null +++ b/tests/e2e/logger/infrastructure.py @@ -0,0 +1,19 @@ +from pathlib import Path + +from tests.e2e.utils.infrastructure import BaseInfrastructureV2 + + +class LoggerStack(BaseInfrastructureV2): + LOG_MESSAGE: str = "logger message test" + LOG_LEVEL: str = "INFO" + + def __init__(self, handlers_dir: Path, feature_name: str = "logger") -> None: + super().__init__(feature_name, handlers_dir) + + def create_resources(self): + env_vars = { + "MESSAGE": self.LOG_MESSAGE, + "LOG_LEVEL": self.LOG_LEVEL, + "ADDITIONAL_KEY": "extra_info", + } + self.create_lambda_functions(function_props={"environment": env_vars}) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 992cf779275..0a452210cb4 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,58 +1,22 @@ import boto3 import pytest -from e2e import conftest from tests.e2e.utils import data_fetcher -@pytest.fixture(scope="module") -def config() -> conftest.LambdaConfig: - return { - "parameters": {}, - "environment_variables": { - "MESSAGE": "logger message test", - "LOG_LEVEL": "INFO", - "ADDITIONAL_KEY": "extra_info", - }, - } +@pytest.fixture +def basic_handler_fn(infrastructure: dict) -> str: + return infrastructure.get("BasicHandler", "") -def test_basic_lambda_logs_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) +@pytest.fixture +def basic_handler_fn_arn(infrastructure: dict) -> str: + return infrastructure.get("BasicHandlerArn", "") - # THEN - assert any( - log.message == config["environment_variables"]["MESSAGE"] - and log.level == config["environment_variables"]["LOG_LEVEL"] - for log in filtered_logs - ) - -def test_basic_lambda_no_debug_logs_visible( - execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig -): +def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert not any( - log.message == config["environment_variables"]["MESSAGE"] and log.level == "DEBUG" for log in filtered_logs - ) - - -def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.InfrastructureOutput): - # GIVEN required_keys = ( "xray_trace_id", "function_request_id", @@ -62,82 +26,11 @@ def test_basic_lambda_contextual_data_logged(execute_lambda: conftest.Infrastruc "cold_start", ) - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) - - -def test_basic_lambda_additional_key_persistence_basic_lambda( - execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig -): - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert any( - log.extra_info - and log.message == config["environment_variables"]["MESSAGE"] - and log.level == config["environment_variables"]["LOG_LEVEL"] - for log in filtered_logs + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) + filtered_logs = data_fetcher.get_logs( + lambda_function_name=basic_handler_fn, start_time=int(execution_time.timestamp()), log_client=cw_client ) - -def test_basic_lambda_empty_event_logged(execute_lambda: conftest.InfrastructureOutput): - - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="basichandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert any(log.message == {} for log in filtered_logs) - - -def test_no_context_lambda_contextual_data_not_logged(execute_lambda: conftest.InfrastructureOutput): - - # GIVEN - required_missing_keys = ( - "function_request_id", - "function_arn", - "function_memory_size", - "function_name", - "cold_start", - ) - - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - # THEN - assert not any(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_missing_keys) - - -def test_no_context_lambda_event_not_logged(execute_lambda: conftest.InfrastructureOutput): - - # GIVEN - lambda_name = execute_lambda.get_lambda_function_name(cf_output_name="nocontexthandlerarn") - timestamp = execute_lambda.get_lambda_execution_time_timestamp() - cw_client = boto3.client("logs") - - # WHEN - filtered_logs = data_fetcher.get_logs(lambda_function_name=lambda_name, start_time=timestamp, log_client=cw_client) - - # THEN - assert not any(log.message == {} for log in filtered_logs) + assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) From 231f6f3f0d9b0b14b5cfdd083f9ebe6af93f2ba8 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 13:00:15 +0200 Subject: [PATCH 03/12] feat: get_logs to accept datetime for consistency --- tests/e2e/logger/test_logger.py | 2 +- tests/e2e/utils/data_fetcher/logs.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 0a452210cb4..108c7caff94 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -29,7 +29,7 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # WHEN _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) filtered_logs = data_fetcher.get_logs( - lambda_function_name=basic_handler_fn, start_time=int(execution_time.timestamp()), log_client=cw_client + lambda_function_name=basic_handler_fn, start_time=execution_time, log_client=cw_client ) # THEN diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index e8211eeea30..29d130bd7c4 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -1,4 +1,5 @@ import json +from datetime import datetime from functools import lru_cache from typing import List, Optional, Union @@ -24,10 +25,13 @@ class Log(BaseModel): @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=20) -def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: int, **kwargs: dict) -> List[Log]: - response = log_client.filter_log_events(logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=start_time) +def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: datetime) -> List[Log]: + response = log_client.filter_log_events( + logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=int(start_time.timestamp()) + ) if not response["events"]: raise ValueError("Empty response from Cloudwatch Logs. Repeating...") + filtered_logs = [] for event in response["events"]: try: From 528f5a85b14bed5b4e8760a75b36c6fa0c1ed711 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 13:08:25 +0200 Subject: [PATCH 04/12] feat: make log_client optional and fix client type --- poetry.lock | 44 ++++++++++++++++++++-------- pyproject.toml | 1 + tests/e2e/logger/test_logger.py | 6 +--- tests/e2e/utils/data_fetcher/logs.py | 10 +++++-- 4 files changed, 41 insertions(+), 20 deletions(-) diff --git a/poetry.lock b/poetry.lock index c178b0c694c..905c852476c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -678,7 +678,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.3.9" +version = "8.4.0" description = "Documentation that simply works" category = "dev" optional = false @@ -774,6 +774,17 @@ python-versions = ">=3.6" [package.dependencies] typing-extensions = ">=4.1.0" +[[package]] +name = "mypy-boto3-logs" +version = "1.24.36.post1" +description = "Type annotations for boto3.CloudWatchLogs 1.24.36 service generated with mypy-boto3-builder 7.10.0" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = ">=4.1.0" + [[package]] name = "mypy-boto3-s3" version = "1.24.36.post1" @@ -950,12 +961,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pymdown-extensions" version = "9.5" @@ -1254,7 +1268,7 @@ python-versions = ">=3.6" [[package]] name = "types-requests" -version = "2.28.8" +version = "2.28.9" description = "Typing stubs for requests" category = "dev" optional = false @@ -1265,7 +1279,7 @@ types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.22" +version = "1.26.23" description = "Typing stubs for urllib3" category = "dev" optional = false @@ -1342,7 +1356,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "f9e26c18e24673e05314f2664f1442157e34b70ba4bdb9f912d149df96003eb9" +content-hash = "77b3593db443d2972a854cf7eaf6643e33315d5da218933f360b33a2e3bb945d" [metadata.files] atomicwrites = [ @@ -1633,8 +1647,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, ] mkdocs-material = [ - {file = "mkdocs-material-8.3.9.tar.gz", hash = "sha256:dc82b667d2a83f0de581b46a6d0949732ab77e7638b87ea35b770b33bc02e75a"}, - {file = "mkdocs_material-8.3.9-py2.py3-none-any.whl", hash = "sha256:263f2721f3abe533b61f7c8bed435a0462620912742c919821ac2d698b4bfe67"}, + {file = "mkdocs-material-8.4.0.tar.gz", hash = "sha256:6c0a6e6cda8b43956e0c562374588160af8110584a1444f422b1cfd91930f9c7"}, + {file = "mkdocs_material-8.4.0-py2.py3-none-any.whl", hash = "sha256:ef6641e1910d4f217873ac376b4594f3157dca3949901b88b4991ba8e5477577"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, @@ -1685,6 +1699,10 @@ mypy-boto3-lambda = [ {file = "mypy-boto3-lambda-1.24.0.tar.gz", hash = "sha256:ab425f941d0d50a2b8a20cc13cebe03c3097b122259bf00e7b295d284814bd6f"}, {file = "mypy_boto3_lambda-1.24.0-py3-none-any.whl", hash = "sha256:a286a464513adf50847bda8573f2dc7adc348234827d1ac0200e610ee9a09b80"}, ] +mypy-boto3-logs = [ + {file = "mypy-boto3-logs-1.24.36.post1.tar.gz", hash = "sha256:8b00c2d5328e72023b1d1acd65e7cea7854f07827d23ce21c78391ca74271290"}, + {file = "mypy_boto3_logs-1.24.36.post1-py3-none-any.whl", hash = "sha256:f96257ec06099bfda1ce5f35b410e7fb93fb601bc312e8d7a09b13adaefd23f0"}, +] mypy-boto3-s3 = [ {file = "mypy-boto3-s3-1.24.36.post1.tar.gz", hash = "sha256:3bd7e06f9ade5059eae2181d7a9f1a41e7fa807ad3e94c01c9901838e87e0abe"}, {file = "mypy_boto3_s3-1.24.36.post1-py3-none-any.whl", hash = "sha256:30ae59b33c55f8b7b693170f9519ea5b91a2fbf31a73de79cdef57a27d784e5a"}, @@ -1785,8 +1803,8 @@ pyflakes = [ {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] pymdown-extensions = [ {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"}, @@ -2007,12 +2025,12 @@ typed-ast = [ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] types-requests = [ - {file = "types-requests-2.28.8.tar.gz", hash = "sha256:7a9f7b152d594a1c18dd4932cdd2596b8efbeedfd73caa4e4abb3755805b4685"}, - {file = "types_requests-2.28.8-py3-none-any.whl", hash = "sha256:b0421f9f2d0dd0f8df2c75f974686517ca67473f05b466232d4c6384d765ad7a"}, + {file = "types-requests-2.28.9.tar.gz", hash = "sha256:feaf581bd580497a47fe845d506fa3b91b484cf706ff27774e87659837de9962"}, + {file = "types_requests-2.28.9-py3-none-any.whl", hash = "sha256:86cb66d3de2f53eac5c09adc42cf6547eefbd0c7e1210beca1ee751c35d96083"}, ] types-urllib3 = [ - {file = "types-urllib3-1.26.22.tar.gz", hash = "sha256:b05af90e73889e688094008a97ca95788db8bf3736e2776fd43fb6b171485d94"}, - {file = "types_urllib3-1.26.22-py3-none-any.whl", hash = "sha256:09a8783e1002472e8d1e1f3792d4c5cca1fffebb9b48ee1512aae6d16fe186bc"}, + {file = "types-urllib3-1.26.23.tar.gz", hash = "sha256:b78e819f0e350221d0689a5666162e467ba3910737bafda14b5c2c85e9bb1e56"}, + {file = "types_urllib3-1.26.23-py3-none-any.whl", hash = "sha256:333e675b188a1c1fd980b4b352f9e40572413a4c1ac689c23cd546e96310070a"}, ] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, diff --git a/pyproject.toml b/pyproject.toml index 481652d9c30..ae6e1a5d56a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ mypy-boto3-lambda = "^1.24.0" mypy-boto3-xray = "^1.24.0" mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" } +mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" } types-requests = "^2.28.8" typing-extensions = { version = "^4.3.0", python = ">=3.7" } python-snappy = "^0.6.1" diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 108c7caff94..97a0d472881 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,4 +1,3 @@ -import boto3 import pytest from tests.e2e.utils import data_fetcher @@ -16,7 +15,6 @@ def basic_handler_fn_arn(infrastructure: dict) -> str: def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # GIVEN - cw_client = boto3.client("logs") required_keys = ( "xray_trace_id", "function_request_id", @@ -28,9 +26,7 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # WHEN _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) - filtered_logs = data_fetcher.get_logs( - lambda_function_name=basic_handler_fn, start_time=execution_time, log_client=cw_client - ) + filtered_logs = data_fetcher.get_logs(lambda_function_name=basic_handler_fn, start_time=execution_time) # THEN assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index 29d130bd7c4..e79293802f0 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -3,7 +3,8 @@ from functools import lru_cache from typing import List, Optional, Union -from mypy_boto3_cloudwatch import CloudWatchClient +import boto3 +from mypy_boto3_logs import CloudWatchLogsClient from pydantic import BaseModel from retry import retry @@ -25,10 +26,15 @@ class Log(BaseModel): @lru_cache(maxsize=10, typed=False) @retry(ValueError, delay=1, jitter=1, tries=20) -def get_logs(lambda_function_name: str, log_client: CloudWatchClient, start_time: datetime) -> List[Log]: +def get_logs( + lambda_function_name: str, start_time: datetime, log_client: Optional[CloudWatchLogsClient] = None +) -> List[Log]: + log_client = log_client or boto3.client("logs") + response = log_client.filter_log_events( logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=int(start_time.timestamp()) ) + if not response["events"]: raise ValueError("Empty response from Cloudwatch Logs. Repeating...") From 724289b8f7681b76b952291e691b05e0d63417c3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 13:12:16 +0200 Subject: [PATCH 05/12] chore: remove lru cache --- tests/e2e/utils/data_fetcher/logs.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index e79293802f0..6fd52f54066 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -1,6 +1,5 @@ import json from datetime import datetime -from functools import lru_cache from typing import List, Optional, Union import boto3 @@ -24,8 +23,7 @@ class Log(BaseModel): extra_info: Optional[str] -@lru_cache(maxsize=10, typed=False) -@retry(ValueError, delay=1, jitter=1, tries=20) +@retry(ValueError, delay=2, jitter=1.5, tries=10) def get_logs( lambda_function_name: str, start_time: datetime, log_client: Optional[CloudWatchLogsClient] = None ) -> List[Log]: From d429be61d757efc54960713280b6a58264ad3f1f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 13:35:57 +0200 Subject: [PATCH 06/12] feat: accept filter_expression to reduce wanted logs --- tests/e2e/utils/data_fetcher/logs.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index 6fd52f54066..efc65c60fd8 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -25,12 +25,18 @@ class Log(BaseModel): @retry(ValueError, delay=2, jitter=1.5, tries=10) def get_logs( - lambda_function_name: str, start_time: datetime, log_client: Optional[CloudWatchLogsClient] = None + lambda_function_name: str, + start_time: datetime, + log_client: Optional[CloudWatchLogsClient] = None, + filter_expression: Optional[str] = None, ) -> List[Log]: log_client = log_client or boto3.client("logs") + filter_expression = filter_expression or "message" # Logger message key response = log_client.filter_log_events( - logGroupName=f"/aws/lambda/{lambda_function_name}", startTime=int(start_time.timestamp()) + logGroupName=f"/aws/lambda/{lambda_function_name}", + startTime=int(start_time.timestamp()), + filterPattern=filter_expression, ) if not response["events"]: From efaef586c5138b4c350c3536194d91c134d66be7 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 13:38:26 +0200 Subject: [PATCH 07/12] refactor: use function_name for consistency --- tests/e2e/logger/test_logger.py | 2 +- tests/e2e/utils/data_fetcher/logs.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 97a0d472881..3aa7d7a671b 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -26,7 +26,7 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # WHEN _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) - filtered_logs = data_fetcher.get_logs(lambda_function_name=basic_handler_fn, start_time=execution_time) + filtered_logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) # THEN assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index efc65c60fd8..382a7b4941a 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -25,7 +25,7 @@ class Log(BaseModel): @retry(ValueError, delay=2, jitter=1.5, tries=10) def get_logs( - lambda_function_name: str, + function_name: str, start_time: datetime, log_client: Optional[CloudWatchLogsClient] = None, filter_expression: Optional[str] = None, @@ -34,7 +34,7 @@ def get_logs( filter_expression = filter_expression or "message" # Logger message key response = log_client.filter_log_events( - logGroupName=f"/aws/lambda/{lambda_function_name}", + logGroupName=f"/aws/lambda/{function_name}", startTime=int(start_time.timestamp()), filterPattern=filter_expression, ) From 3cce3a0533ef512ce394419bf8a0ad26e5f4b480 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 14:03:47 +0200 Subject: [PATCH 08/12] fix: accept extra logging keys instead of dropping --- tests/e2e/utils/data_fetcher/logs.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index 382a7b4941a..e372149b34b 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -4,11 +4,11 @@ import boto3 from mypy_boto3_logs import CloudWatchLogsClient -from pydantic import BaseModel +from pydantic import BaseModel, Extra from retry import retry -class Log(BaseModel): +class Log(BaseModel, extra=Extra.allow): level: str location: str message: Union[dict, str] @@ -20,7 +20,6 @@ class Log(BaseModel): function_arn: Optional[str] function_request_id: Optional[str] xray_trace_id: Optional[str] - extra_info: Optional[str] @retry(ValueError, delay=2, jitter=1.5, tries=10) From fc5e8a64418801f93aa74b67f8b8a1a6dc826453 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 14:05:25 +0200 Subject: [PATCH 09/12] refactor: make logger e2e dynamic --- tests/e2e/logger/handlers/basic_handler.py | 14 ++++---------- tests/e2e/logger/infrastructure.py | 10 +--------- tests/e2e/logger/test_logger.py | 9 ++++++++- 3 files changed, 13 insertions(+), 20 deletions(-) diff --git a/tests/e2e/logger/handlers/basic_handler.py b/tests/e2e/logger/handlers/basic_handler.py index 34d7fb4678a..0f0dd46b4aa 100644 --- a/tests/e2e/logger/handlers/basic_handler.py +++ b/tests/e2e/logger/handlers/basic_handler.py @@ -1,17 +1,11 @@ -import os - from aws_lambda_powertools import Logger logger = Logger() -MESSAGE = os.environ["MESSAGE"] -ADDITIONAL_KEY = os.environ["ADDITIONAL_KEY"] - -@logger.inject_lambda_context(log_event=True) +@logger.inject_lambda_context def lambda_handler(event, context): - logger.debug(MESSAGE) - logger.info(MESSAGE) - logger.append_keys(**{ADDITIONAL_KEY: "test"}) - logger.info(MESSAGE) + message, append_keys = event.get("message", ""), event.get("append_keys", {}) + logger.append_keys(**append_keys) + logger.info(message) return "success" diff --git a/tests/e2e/logger/infrastructure.py b/tests/e2e/logger/infrastructure.py index 11c55d0e8fc..76595908206 100644 --- a/tests/e2e/logger/infrastructure.py +++ b/tests/e2e/logger/infrastructure.py @@ -4,16 +4,8 @@ class LoggerStack(BaseInfrastructureV2): - LOG_MESSAGE: str = "logger message test" - LOG_LEVEL: str = "INFO" - def __init__(self, handlers_dir: Path, feature_name: str = "logger") -> None: super().__init__(feature_name, handlers_dir) def create_resources(self): - env_vars = { - "MESSAGE": self.LOG_MESSAGE, - "LOG_LEVEL": self.LOG_LEVEL, - "ADDITIONAL_KEY": "extra_info", - } - self.create_lambda_functions(function_props={"environment": env_vars}) + self.create_lambda_functions() diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 3aa7d7a671b..b071e4c0441 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -1,3 +1,6 @@ +import json +from uuid import uuid4 + import pytest from tests.e2e.utils import data_fetcher @@ -23,10 +26,14 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): "function_name", "cold_start", ) + message = "logs should be visible with default settings" + additional_keys = {"order_id": f"{uuid4()}"} + payload = json.dumps({"message": message, "append_keys": additional_keys}) # WHEN - _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn) + _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) filtered_logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) # THEN assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) + assert any(getattr(logs, "order_id", False) for logs in filtered_logs) From 6981a78316042aed0cc137cccf0763958953b13f Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 14:55:00 +0200 Subject: [PATCH 10/12] refactor: create LogFetcher to simply assertion --- aws_lambda_powertools/shared/constants.py | 9 +++ tests/e2e/logger/test_logger.py | 19 ++---- tests/e2e/utils/data_fetcher/logs.py | 83 +++++++++++++++++------ 3 files changed, 77 insertions(+), 34 deletions(-) diff --git a/aws_lambda_powertools/shared/constants.py b/aws_lambda_powertools/shared/constants.py index 45b46d236f9..48d94d88f1d 100644 --- a/aws_lambda_powertools/shared/constants.py +++ b/aws_lambda_powertools/shared/constants.py @@ -23,3 +23,12 @@ XRAY_SDK_CORE_MODULE: str = "aws_xray_sdk.core" IDEMPOTENCY_DISABLED_ENV: str = "POWERTOOLS_IDEMPOTENCY_DISABLED" + +LOGGER_LAMBDA_CONTEXT_KEYS = [ + "function_arn", + "function_memory_size", + "function_name", + "function_request_id", + "cold_start", + "xray_trace_id", +] diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index b071e4c0441..222cfd1077c 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -18,22 +18,17 @@ def basic_handler_fn_arn(infrastructure: dict) -> str: def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # GIVEN - required_keys = ( - "xray_trace_id", - "function_request_id", - "function_arn", - "function_memory_size", - "function_name", - "cold_start", - ) message = "logs should be visible with default settings" - additional_keys = {"order_id": f"{uuid4()}"} + custom_key = "order_id" + additional_keys = {custom_key: f"{uuid4()}"} payload = json.dumps({"message": message, "append_keys": additional_keys}) # WHEN _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) - filtered_logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) + logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) # THEN - assert all(keys in logs.dict(exclude_unset=True) for logs in filtered_logs for keys in required_keys) - assert any(getattr(logs, "order_id", False) for logs in filtered_logs) + assert len(logs) == 1 + assert len(logs.get_cold_start_log()) == 1 + assert len(logs.get_log(key=custom_key)) == 1 + assert logs.have_logger_context_keys() is True diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index e372149b34b..b29a26be994 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -7,6 +7,8 @@ from pydantic import BaseModel, Extra from retry import retry +from aws_lambda_powertools.shared.constants import LOGGER_LAMBDA_CONTEXT_KEYS + class Log(BaseModel, extra=Extra.allow): level: str @@ -22,31 +24,68 @@ class Log(BaseModel, extra=Extra.allow): xray_trace_id: Optional[str] +class LogFetcher: + def __init__( + self, + function_name: str, + start_time: datetime, + log_client: Optional[CloudWatchLogsClient] = None, + filter_expression: Optional[str] = None, + ) -> None: + self.function_name = function_name + self.start_time = int(start_time.timestamp()) + self.log_client = log_client or boto3.client("logs") + self.filter_expression = filter_expression or "message" # Logger message key + self.log_group = f"/aws/lambda/{self.function_name}" + self.logs: List[Log] = self.get_logs() + + def get_logs(self): + ret = self.log_client.filter_log_events( + logGroupName=self.log_group, + startTime=self.start_time, + filterPattern=self.filter_expression, + ) + + if not ret["events"]: + raise ValueError("Empty response from Cloudwatch Logs. Repeating...") + + filtered_logs = [] + for event in ret["events"]: + try: + message = Log(**json.loads(event["message"])) + except json.decoder.JSONDecodeError: + continue + filtered_logs.append(message) + + return filtered_logs + + def get_log(self, key: str, value: Optional[any] = None) -> List[Log]: + logs = [] + for log in self.logs: + log_value = getattr(log, key, None) + if value is not None and log_value == value: + logs.append(log) + if value is None and getattr(log, key, False): + logs.append(log) + return logs + + def get_cold_start_log(self) -> List[Log]: + return [log for log in self.logs if log.cold_start] + + def have_logger_context_keys(self) -> bool: + return all(getattr(log, key, False) for log in self.logs for key in LOGGER_LAMBDA_CONTEXT_KEYS) + + def __len__(self) -> int: + return len(self.logs) + + @retry(ValueError, delay=2, jitter=1.5, tries=10) def get_logs( function_name: str, start_time: datetime, - log_client: Optional[CloudWatchLogsClient] = None, filter_expression: Optional[str] = None, -) -> List[Log]: - log_client = log_client or boto3.client("logs") - filter_expression = filter_expression or "message" # Logger message key - - response = log_client.filter_log_events( - logGroupName=f"/aws/lambda/{function_name}", - startTime=int(start_time.timestamp()), - filterPattern=filter_expression, + log_client: Optional[CloudWatchLogsClient] = None, +) -> LogFetcher: + return LogFetcher( + function_name=function_name, start_time=start_time, filter_expression=filter_expression, log_client=log_client ) - - if not response["events"]: - raise ValueError("Empty response from Cloudwatch Logs. Repeating...") - - filtered_logs = [] - for event in response["events"]: - try: - message = Log(**json.loads(event["message"])) - except json.decoder.JSONDecodeError: - continue - filtered_logs.append(message) - - return filtered_logs From d17d6eb7d9e3903059e622f20f1a3b2dd7000251 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 15:23:27 +0200 Subject: [PATCH 11/12] feat: test cold start --- tests/e2e/logger/test_logger.py | 11 +++++++---- tests/e2e/utils/data_fetcher/logs.py | 8 +++----- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/tests/e2e/logger/test_logger.py b/tests/e2e/logger/test_logger.py index 222cfd1077c..e5c27dd0a8f 100644 --- a/tests/e2e/logger/test_logger.py +++ b/tests/e2e/logger/test_logger.py @@ -3,6 +3,7 @@ import pytest +from aws_lambda_powertools.shared.constants import LOGGER_LAMBDA_CONTEXT_KEYS from tests.e2e.utils import data_fetcher @@ -25,10 +26,12 @@ def test_basic_lambda_logs_visible(basic_handler_fn, basic_handler_fn_arn): # WHEN _, execution_time = data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) - logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) + data_fetcher.get_lambda_response(lambda_arn=basic_handler_fn_arn, payload=payload) # THEN - assert len(logs) == 1 + logs = data_fetcher.get_logs(function_name=basic_handler_fn, start_time=execution_time) + + assert len(logs) == 2 assert len(logs.get_cold_start_log()) == 1 - assert len(logs.get_log(key=custom_key)) == 1 - assert logs.have_logger_context_keys() is True + assert len(logs.get_log(key=custom_key)) == 2 + assert logs.have_keys(*LOGGER_LAMBDA_CONTEXT_KEYS) is True diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index b29a26be994..d75fc7c525b 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -7,8 +7,6 @@ from pydantic import BaseModel, Extra from retry import retry -from aws_lambda_powertools.shared.constants import LOGGER_LAMBDA_CONTEXT_KEYS - class Log(BaseModel, extra=Extra.allow): level: str @@ -65,15 +63,15 @@ def get_log(self, key: str, value: Optional[any] = None) -> List[Log]: log_value = getattr(log, key, None) if value is not None and log_value == value: logs.append(log) - if value is None and getattr(log, key, False): + elif value is None and hasattr(log, key): logs.append(log) return logs def get_cold_start_log(self) -> List[Log]: return [log for log in self.logs if log.cold_start] - def have_logger_context_keys(self) -> bool: - return all(getattr(log, key, False) for log in self.logs for key in LOGGER_LAMBDA_CONTEXT_KEYS) + def have_keys(self, *keys) -> bool: + return all(hasattr(log, key) for log in self.logs for key in keys) def __len__(self) -> int: return len(self.logs) From 4eabeebf41be39427bf0988358d0093293843279 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 19 Aug 2022 15:34:19 +0200 Subject: [PATCH 12/12] chore: document LogFetcher --- tests/e2e/utils/data_fetcher/logs.py | 97 ++++++++++++++++++++++------ 1 file changed, 78 insertions(+), 19 deletions(-) diff --git a/tests/e2e/utils/data_fetcher/logs.py b/tests/e2e/utils/data_fetcher/logs.py index d75fc7c525b..a005009f5f5 100644 --- a/tests/e2e/utils/data_fetcher/logs.py +++ b/tests/e2e/utils/data_fetcher/logs.py @@ -29,15 +29,72 @@ def __init__( start_time: datetime, log_client: Optional[CloudWatchLogsClient] = None, filter_expression: Optional[str] = None, - ) -> None: + ): + """Fetch and expose Powertools Logger logs from CloudWatch Logs + + Parameters + ---------- + function_name : str + Name of Lambda function to fetch logs for + start_time : datetime + Start date range to filter traces + log_client : Optional[CloudWatchLogsClient], optional + Amazon CloudWatch Logs Client, by default boto3.client('logs) + filter_expression : Optional[str], optional + CloudWatch Logs Filter Pattern expression, by default "message" + """ self.function_name = function_name self.start_time = int(start_time.timestamp()) self.log_client = log_client or boto3.client("logs") self.filter_expression = filter_expression or "message" # Logger message key self.log_group = f"/aws/lambda/{self.function_name}" - self.logs: List[Log] = self.get_logs() + self.logs: List[Log] = self._get_logs() + + def get_log(self, key: str, value: Optional[any] = None) -> List[Log]: + """Get logs based on key or key and value + + Parameters + ---------- + key : str + Log key name + value : Optional[any], optional + Log value, by default None + + Returns + ------- + List[Log] + List of Log instances + """ + logs = [] + for log in self.logs: + log_value = getattr(log, key, None) + if value is not None and log_value == value: + logs.append(log) + elif value is None and hasattr(log, key): + logs.append(log) + return logs + + def get_cold_start_log(self) -> List[Log]: + """Get logs where cold start was true - def get_logs(self): + Returns + ------- + List[Log] + List of Log instances + """ + return [log for log in self.logs if log.cold_start] + + def have_keys(self, *keys) -> bool: + """Whether an arbitrary number of key names exist in each log event + + Returns + ------- + bool + Whether keys are present + """ + return all(hasattr(log, key) for log in self.logs for key in keys) + + def _get_logs(self) -> List[Log]: ret = self.log_client.filter_log_events( logGroupName=self.log_group, startTime=self.start_time, @@ -57,22 +114,6 @@ def get_logs(self): return filtered_logs - def get_log(self, key: str, value: Optional[any] = None) -> List[Log]: - logs = [] - for log in self.logs: - log_value = getattr(log, key, None) - if value is not None and log_value == value: - logs.append(log) - elif value is None and hasattr(log, key): - logs.append(log) - return logs - - def get_cold_start_log(self) -> List[Log]: - return [log for log in self.logs if log.cold_start] - - def have_keys(self, *keys) -> bool: - return all(hasattr(log, key) for log in self.logs for key in keys) - def __len__(self) -> int: return len(self.logs) @@ -84,6 +125,24 @@ def get_logs( filter_expression: Optional[str] = None, log_client: Optional[CloudWatchLogsClient] = None, ) -> LogFetcher: + """_summary_ + + Parameters + ---------- + function_name : str + Name of Lambda function to fetch logs for + start_time : datetime + Start date range to filter traces + log_client : Optional[CloudWatchLogsClient], optional + Amazon CloudWatch Logs Client, by default boto3.client('logs) + filter_expression : Optional[str], optional + CloudWatch Logs Filter Pattern expression, by default "message" + + Returns + ------- + LogFetcher + LogFetcher instance with logs available as properties and methods + """ return LogFetcher( function_name=function_name, start_time=start_time, filter_expression=filter_expression, log_client=log_client )