diff --git a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py index 434df509deb..96c209e0eca 100644 --- a/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py +++ b/aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py @@ -4,9 +4,6 @@ from typing import Any, Dict, List, Optional from urllib.parse import unquote_plus -import boto3 - -from aws_lambda_powertools.shared import user_agent from aws_lambda_powertools.utilities.data_classes.common import DictWrapper @@ -204,6 +201,12 @@ def setup_s3_client(self): BaseClient An S3 client with the appropriate credentials """ + # IMPORTING boto3 within the FUNCTION and not at the top level to get + # it only when we explicitly want it for better performance. + import boto3 + + from aws_lambda_powertools.shared import user_agent + s3 = boto3.client( "s3", aws_access_key_id=self.data.artifact_credentials.access_key_id, diff --git a/docs/core/logger.md b/docs/core/logger.md index 064328dabb0..305a7cef0f3 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -741,6 +741,7 @@ Here's an example where we persist `payment_id` not `request_id`. Note that `pay ---8<-- "examples/logger/src/append_keys_vs_extra_output.json" ``` + ### How do I aggregate and search Powertools for AWS Lambda (Python) logs across accounts? As of now, ElasticSearch (ELK) or 3rd party solutions are best suited to this task. Please refer to this [discussion for more details](https://github.com/awslabs/aws-lambda-powertools-python/issues/460)