@@ -133,17 +133,15 @@ When debugging in non-production environments, you can instruct Logger to log th
133
133
???+ warning
134
134
This is disabled by default to prevent sensitive info being logged
135
135
136
- === "log_handler_event.py"
136
+ ``` python hl_lines="5" title="Logging incoming event"
137
+ from aws_lambda_powertools import Logger
137
138
138
- ```python hl_lines="5"
139
- from aws_lambda_powertools import Logger
139
+ logger = Logger(service = " payment" )
140
140
141
- logger = Logger(service="payment")
142
-
143
- @logger.inject_lambda_context(log_event=True)
144
- def handler(event, context):
145
- ...
146
- ```
141
+ @logger.inject_lambda_context (log_event = True )
142
+ def handler (event , context ):
143
+ ...
144
+ ```
147
145
148
146
#### Setting a Correlation ID
149
147
@@ -249,7 +247,8 @@ You can append additional keys using either mechanism:
249
247
250
248
#### append_keys method
251
249
252
- > NOTE: ` append_keys ` replaces ` structure_logs(append=True, **kwargs) ` method. Both will continue to work until the next major version.
250
+ ???+ note
251
+ ` append_keys ` replaces ` structure_logs(append=True, **kwargs) ` method. structure_logs will be removed in v2.
253
252
254
253
You can append your own keys to your existing Logger via ` append_keys(**additional_key_values) ` method.
255
254
@@ -661,15 +660,13 @@ Parameter | Description | Default
661
660
** ` log_record_order ` ** | set order of log keys when logging | ` ["level", "location", "message", "timestamp"] `
662
661
** ` kwargs ` ** | key-value to be included in log messages | ` None `
663
662
664
- === "LambdaPowertoolsFormatter.py"
665
-
666
- ```python hl_lines="2 4-5"
667
- from aws_lambda_powertools import Logger
668
- from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter
663
+ ``` python hl_lines="2 4-5" title="Pre-configuring Lambda Powertools Formatter"
664
+ from aws_lambda_powertools import Logger
665
+ from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter
669
666
670
- formatter = LambdaPowertoolsFormatter(utc=True, log_record_order=["message"])
671
- logger = Logger(service="example", logger_formatter=formatter)
672
- ```
667
+ formatter = LambdaPowertoolsFormatter(utc = True , log_record_order = [" message" ])
668
+ logger = Logger(service = " example" , logger_formatter = formatter)
669
+ ```
673
670
674
671
### Migrating from other Loggers
675
672
@@ -797,17 +794,15 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or
797
794
798
795
By default, this Logger and standard logging library emits records using local time timestamp. You can override this behaviour via ` utc ` parameter:
799
796
800
- === "app.py"
797
+ ``` python hl_lines="6" title="Setting UTC timestamp by default"
798
+ from aws_lambda_powertools import Logger
801
799
802
- ```python hl_lines="6"
803
- from aws_lambda_powertools import Logger
800
+ logger = Logger( service = " payment " )
801
+ logger.info( " Local time " )
804
802
805
- logger = Logger(service="payment")
806
- logger.info("Local time")
807
-
808
- logger_in_utc = Logger(service="payment", utc=True)
809
- logger_in_utc.info("GMT time zone")
810
- ```
803
+ logger_in_utc = Logger(service = " payment" , utc = True )
804
+ logger_in_utc.info(" GMT time zone" )
805
+ ```
811
806
812
807
#### Custom function for unserializable values
813
808
@@ -845,20 +840,18 @@ By default, Logger uses `str` to handle values non-serializable by JSON. You can
845
840
846
841
By default, Logger uses StreamHandler and logs to standard output. You can override this behaviour via ` logger_handler ` parameter:
847
842
848
- === "collect.py"
849
-
850
- ```python hl_lines="3-4 9 12"
851
- import logging
852
- from pathlib import Path
843
+ ``` python hl_lines="3-4 9 12" title="Configure Logger to output to a file"
844
+ import logging
845
+ from pathlib import Path
853
846
854
- from aws_lambda_powertools import Logger
847
+ from aws_lambda_powertools import Logger
855
848
856
- log_file = Path("/tmp/log.json")
857
- log_file_handler = logging.FileHandler(filename=log_file)
858
- logger = Logger(service="payment", logger_handler=log_file_handler)
849
+ log_file = Path(" /tmp/log.json" )
850
+ log_file_handler = logging.FileHandler(filename = log_file)
851
+ logger = Logger(service = " payment" , logger_handler = log_file_handler)
859
852
860
- logger.info("Collecting payment")
861
- ```
853
+ logger.info(" Collecting payment" )
854
+ ```
862
855
863
856
#### Bring your own formatter
864
857
@@ -868,7 +861,7 @@ For **minor changes like remapping keys** after all log record processing has co
868
861
869
862
=== "custom_formatter.py"
870
863
871
- ```python
864
+ ```python hl_lines="6-7 12"
872
865
from aws_lambda_powertools import Logger
873
866
from aws_lambda_powertools.logging.formatter import LambdaPowertoolsFormatter
874
867
@@ -880,11 +873,20 @@ For **minor changes like remapping keys** after all log record processing has co
880
873
log["event"] = log.pop("message") # rename message key to event
881
874
return self.json_serializer(log) # use configured json serializer
882
875
883
- my_formatter = CustomFormatter()
884
- logger = Logger(service="example", logger_formatter=my_formatter)
876
+ logger = Logger(service="example", logger_formatter=CustomFormatter())
885
877
logger.info("hello")
886
878
```
887
879
880
+ === "Example CloudWatch Logs excerpt"
881
+ ```json hl_lines="5"
882
+ {
883
+ "level": "INFO",
884
+ "location": "<module >:16",
885
+ "timestamp": "2021-12-30 13:41:53,413+0100",
886
+ "event": "hello"
887
+ }
888
+ ```
889
+
888
890
For ** replacing the formatter entirely** , you can subclass ` BasePowertoolsFormatter ` , implement ` append_keys ` method, and override ` format ` standard logging method. This ensures the current feature set of Logger like [ injecting Lambda context] ( #capturing-lambda-context-info ) and [ sampling] ( #sampling-debug-logs ) will continue to work.
889
891
890
892
???+ info
@@ -946,24 +948,22 @@ By default, Logger uses `json.dumps` and `json.loads` as serializer and deserial
946
948
947
949
As parameters don't always translate well between them, you can pass any callable that receives a ` Dict ` and return a ` str ` :
948
950
949
- === "collect.py"
951
+ ``` python hl_lines="1 5-6 9-10" title="Using Rust orjson library as serializer"
952
+ import orjson
950
953
951
- ```python hl_lines="1 5-6 9-10"
952
- import orjson
954
+ from aws_lambda_powertools import Logger
953
955
954
- from aws_lambda_powertools import Logger
956
+ custom_serializer = orjson.dumps
957
+ custom_deserializer = orjson.loads
955
958
956
- custom_serializer = orjson.dumps
957
- custom_deserializer = orjson.loads
959
+ logger = Logger(service = " payment" ,
960
+ json_serializer = custom_serializer,
961
+ json_deserializer = custom_deserializer
962
+ )
958
963
959
- logger = Logger(service="payment",
960
- json_serializer=custom_serializer,
961
- json_deserializer=custom_deserializer
962
- )
963
-
964
- # when using parameters, you can pass a partial
965
- # custom_serializer=functools.partial(orjson.dumps, option=orjson.OPT_SERIALIZE_NUMPY)
966
- ```
964
+ # when using parameters, you can pass a partial
965
+ # custom_serializer=functools.partial(orjson.dumps, option=orjson.OPT_SERIALIZE_NUMPY)
966
+ ```
967
967
968
968
## Testing your code
969
969
@@ -1028,11 +1028,9 @@ This is a Pytest sample that provides the minimum information necessary for Logg
1028
1028
1029
1029
Pytest Live Log feature duplicates emitted log messages in order to style log statements according to their levels, for this to work use ` POWERTOOLS_LOG_DEDUPLICATION_DISABLED ` env var.
1030
1030
1031
- === "shell"
1032
-
1033
- ```bash
1034
- POWERTOOLS_LOG_DEDUPLICATION_DISABLED="1" pytest -o log_cli=1
1035
- ```
1031
+ ``` bash title="Disabling log deduplication to use Pytest live log"
1032
+ POWERTOOLS_LOG_DEDUPLICATION_DISABLED=" 1" pytest -o log_cli=1
1033
+ ```
1036
1034
1037
1035
???+ warning
1038
1036
This feature should be used with care, as it explicitly disables our ability to filter propagated messages to the root logger (if configured).
@@ -1044,26 +1042,24 @@ Pytest Live Log feature duplicates emitted log messages in order to style log st
1044
1042
You can enable the ` botocore ` and ` boto3 ` logs by using the ` set_stream_logger ` method, this method will add a stream handler
1045
1043
for the given name and level to the logging module. By default, this logs all boto3 messages to stdout.
1046
1044
1047
- === "log_botocore_and_boto3.py"
1048
-
1049
- ```python hl_lines="6-7"
1050
- from typing import Dict, List
1051
- from aws_lambda_powertools.utilities.typing import LambdaContext
1052
- from aws_lambda_powertools import Logger
1045
+ ``` python hl_lines="6-7" title="Enabling AWS SDK logging"
1046
+ from typing import Dict, List
1047
+ from aws_lambda_powertools.utilities.typing import LambdaContext
1048
+ from aws_lambda_powertools import Logger
1053
1049
1054
- import boto3
1055
- boto3.set_stream_logger()
1056
- boto3.set_stream_logger('botocore')
1050
+ import boto3
1051
+ boto3.set_stream_logger()
1052
+ boto3.set_stream_logger(' botocore' )
1057
1053
1058
- logger = Logger()
1059
- client = boto3.client('s3')
1054
+ logger = Logger()
1055
+ client = boto3.client(' s3' )
1060
1056
1061
1057
1062
- def handler(event: Dict, context: LambdaContext) -> List:
1063
- response = client.list_buckets()
1058
+ def handler (event : Dict, context : LambdaContext) -> List:
1059
+ response = client.list_buckets()
1064
1060
1065
- return response.get("Buckets", [])
1066
- ```
1061
+ return response.get(" Buckets" , [])
1062
+ ```
1067
1063
1068
1064
** What's the difference between ` append_keys ` and ` extra ` ?**
1069
1065
0 commit comments