From fef70b0f98951b91969c510853df24351c9a0f0b Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Fri, 8 Jul 2022 16:00:48 +0200 Subject: [PATCH 01/49] docs(core): match code snippet name with filename (#1286) --- docs/core/event_handler/api_gateway.md | 62 +++++----- docs/core/logger.md | 111 +++++++++--------- docs/core/metrics.md | 34 +++--- docs/core/tracer.md | 16 +-- ...son => append_and_remove_keys_output.json} | 0 ....json => set_correlation_id_jmespath.json} | 0 ...nt.json => set_correlation_id_method.json} | 0 examples/tracer/src/tracer_reuse.py | 2 +- ...euse_payment.py => tracer_reuse_module.py} | 0 9 files changed, 115 insertions(+), 110 deletions(-) rename examples/logger/src/{append_and_remove_keys.json => append_and_remove_keys_output.json} (100%) rename examples/logger/src/{set_correlation_id_jmespath_event.json => set_correlation_id_jmespath.json} (100%) rename examples/logger/src/{set_correlation_id_method_event.json => set_correlation_id_method.json} (100%) rename examples/tracer/src/{tracer_reuse_payment.py => tracer_reuse_module.py} (100%) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 9db219e994e..6d8f441d661 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -48,13 +48,13 @@ Here's an example on how we can handle the `/todos` path. ???+ info We automatically serialize `Dict` responses as JSON, trim whitespace for compact responses, and set content-type to `application/json`. -=== "app.py" +=== "getting_started_rest_api_resolver.py" ```python hl_lines="5 11 14 28" --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.py" ``` -=== "Request" +=== "getting_started_rest_api_resolver.json" This utility uses `path` and `httpMethod` to route to the right function. This helps make unit tests and local invocation easier too. @@ -62,7 +62,7 @@ Here's an example on how we can handle the `/todos` path. --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver.json" ``` -=== "Response" +=== "getting_started_rest_api_resolver_output.json" ```json --8<-- "examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json" @@ -96,13 +96,13 @@ Each dynamic route you set must be part of your function signature. This allows ???+ note For brevity, we will only include the necessary keys for each sample request for the example to work. -=== "app.py" +=== "dynamic_routes.py" ```python hl_lines="14 16" --8<-- "examples/event_handler_rest/src/dynamic_routes.py" ``` -=== "Request" +=== "dynamic_routes.json" ```json --8<-- "examples/event_handler_rest/src/dynamic_routes.json" @@ -123,13 +123,13 @@ You can also combine nested paths with greedy regex to catch in between routes. ???+ warning We choose the most explicit registered route that matches an incoming event. -=== "app.py" +=== "dynamic_routes_catch_all.py" ```python hl_lines="11" --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.py" ``` -=== "Request" +=== "dynamic_routes_catch_all.json" ```json --8<-- "examples/event_handler_rest/src/dynamic_routes_catch_all.json" @@ -139,13 +139,13 @@ You can also combine nested paths with greedy regex to catch in between routes. You can use named decorators to specify the HTTP method that should be handled in your functions. That is, `app.`, where the HTTP method could be `get`, `post`, `put`, `patch`, `delete`, and `options`. -=== "app.py" +=== "http_methods.py" ```python hl_lines="14 17" --8<-- "examples/event_handler_rest/src/http_methods.py" ``` -=== "Request" +=== "http_methods.json" ```json --8<-- "examples/event_handler_rest/src/http_methods.json" @@ -225,13 +225,13 @@ When using [Custom Domain API Mappings feature](https://docs.aws.amazon.com/apig To address this API Gateway behavior, we use `strip_prefixes` parameter to account for these prefixes that are now injected into the path regardless of which type of API Gateway you're using. -=== "app.py" +=== "custom_api_mapping.py" ```python hl_lines="8" --8<-- "examples/event_handler_rest/src/custom_api_mapping.py" ``` -=== "Request" +=== "custom_api_mapping.json" ```json --8<-- "examples/event_handler_rest/src/custom_api_mapping.json" @@ -253,13 +253,13 @@ This will ensure that CORS headers are always returned as part of the response w ???+ tip Optionally disable CORS on a per path basis with `cors=False` parameter. -=== "app.py" +=== "setting_cors.py" ```python hl_lines="5 11-12 34" --8<-- "examples/event_handler_rest/src/setting_cors.py" ``` -=== "Response" +=== "setting_cors_output.json" ```json --8<-- "examples/event_handler_rest/src/setting_cors_output.json" @@ -290,13 +290,13 @@ For convenience, these are the default values when using `CORSConfig` to enable You can use the `Response` class to have full control over the response, for example you might want to add additional headers or set a custom Content-type. -=== "app.py" +=== "fine_grained_responses.py" ```python hl_lines="7 24-28" --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` -=== "Response" +=== "fine_grained_responses_output.json" ```json --8<-- "examples/event_handler_rest/src/fine_grained_responses_output.json" @@ -309,19 +309,19 @@ You can compress with gzip and base64 encode your responses via `compress` param ???+ warning The client must send the `Accept-Encoding` header, otherwise a normal response will be sent. -=== "app.py" +=== "compressing_responses.py" ```python hl_lines="14" --8<-- "examples/event_handler_rest/src/compressing_responses.py" ``` -=== "Request" +=== "compressing_responses.json" ```json --8<-- "examples/event_handler_rest/src/compressing_responses.json" ``` -=== "Response" +=== "compressing_responses_output.json" ```json --8<-- "examples/event_handler_rest/src/compressing_responses_output.json" @@ -336,25 +336,25 @@ Like `compress` feature, the client must send the `Accept` header with the corre ???+ warning This feature requires API Gateway to configure binary media types, see [our sample infrastructure](#required-resources) for reference. -=== "app.py" +=== "binary_responses.py" ```python hl_lines="14 20" --8<-- "examples/event_handler_rest/src/binary_responses.py" ``` -=== "logo.svg" +=== "binary_responses_logo.svg" ```xml --8<-- "examples/event_handler_rest/src/binary_responses_logo.svg" ``` -=== "Request" +=== "binary_responses.json" ```json --8<-- "examples/event_handler_rest/src/binary_responses.json" ``` -=== "Response" +=== "binary_responses_output.json" ```json --8<-- "examples/event_handler_rest/src/binary_responses_output.json" @@ -387,9 +387,9 @@ You can instruct API Gateway handler to use a custom serializer to best suit you As you grow the number of routes a given Lambda function should handle, it is natural to split routes into separate files to ease maintenance - That's where the `Router` feature is useful. -Let's assume you have `app.py` as your Lambda function entrypoint and routes in `todos.py`, this is how you'd use the `Router` feature. +Let's assume you have `app.py` as your Lambda function entrypoint and routes in `split_route_module.py`, this is how you'd use the `Router` feature. -=== "todos.py" +=== "split_route_module.py" We import **Router** instead of **APIGatewayRestResolver**; syntax wise is exactly the same. @@ -397,7 +397,7 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in --8<-- "examples/event_handler_rest/src/split_route_module.py" ``` -=== "app.py" +=== "split_route.py" We use `include_router` method and include all user routers registered in the `router` global object. @@ -407,17 +407,17 @@ Let's assume you have `app.py` as your Lambda function entrypoint and routes in #### Route prefix -In the previous example, `todos.py` routes had a `/todos` prefix. This might grow over time and become repetitive. +In the previous example, `split_route_module.py` routes had a `/todos` prefix. This might grow over time and become repetitive. -When necessary, you can set a prefix when including a router object. This means you could remove `/todos` prefix in `todos.py` altogether. +When necessary, you can set a prefix when including a router object. This means you could remove `/todos` prefix altogether. -=== "app.py" +=== "split_route_prefix.py" ```python hl_lines="12" --8<-- "examples/event_handler_rest/src/split_route_prefix.py" ``` -=== "todos.py" +=== "split_route_prefix_module.py" ```python hl_lines="13 25" --8<-- "examples/event_handler_rest/src/split_route_prefix_module.py" @@ -509,13 +509,13 @@ your development, building, deployment tooling need to accommodate the distinct You can test your routes by passing a proxy event request where `path` and `httpMethod`. -=== "test_app.py" +=== "assert_http_response.py" ```python hl_lines="21-24" --8<-- "examples/event_handler_rest/src/assert_http_response.py" ``` -=== "app.py" +=== "assert_http_response_module.py" ```python --8<-- "examples/event_handler_rest/src/assert_http_response_module.py" diff --git a/docs/core/logger.md b/docs/core/logger.md index b09cc6c85d3..c699568b349 100644 --- a/docs/core/logger.md +++ b/docs/core/logger.md @@ -48,13 +48,13 @@ Your Logger will include the following keys to your structured logging: You can enrich your structured logs with key Lambda context information via `inject_lambda_context`. -=== "collect.py" +=== "inject_lambda_context.py" ```python hl_lines="7" --8<-- "examples/logger/src/inject_lambda_context.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "inject_lambda_context_output.json" ```json hl_lines="8-12 17-20" --8<-- "examples/logger/src/inject_lambda_context_output.json" @@ -88,19 +88,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME ???+ tip You can retrieve correlation IDs via `get_correlation_id` method -=== "collect.py" +=== "set_correlation_id.py" ```python hl_lines="7" --8<-- "examples/logger/src/set_correlation_id.py" ``` -=== "Example Event" +=== "set_correlation_id_event.json" ```json hl_lines="3" --8<-- "examples/logger/src/set_correlation_id_event.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/set_correlation_id_output.json" @@ -110,18 +110,19 @@ You can set a Correlation ID using `correlation_id_path` param by passing a [JME You can also use `set_correlation_id` method to inject it anywhere else in your code. Example below uses [Event Source Data Classes utility](../utilities/data_classes.md) to easily access events properties. -=== "collect.py" +=== "set_correlation_id_method.py" ```python hl_lines="11" --8<-- "examples/logger/src/set_correlation_id_method.py" ``` -=== "Example Event" + +=== "set_correlation_id_method.json" ```json hl_lines="3" - --8<-- "examples/logger/src/set_correlation_id_method_event.json" + --8<-- "examples/logger/src/set_correlation_id_method.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_method_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/set_correlation_id_method_output.json" @@ -131,19 +132,19 @@ You can also use `set_correlation_id` method to inject it anywhere else in your To ease routine tasks like extracting correlation ID from popular event sources, we provide [built-in JMESPath expressions](#built-in-correlation-id-expressions). -=== "collect.py" +=== "set_correlation_id_jmespath.py" ```python hl_lines="2 8" --8<-- "examples/logger/src/set_correlation_id_jmespath.py" ``` -=== "Example Event" +=== "set_correlation_id_jmespath.json" ```json hl_lines="3" - --8<-- "examples/logger/src/set_correlation_id_jmespath_event.json" + --8<-- "examples/logger/src/set_correlation_id_jmespath.json" ``` -=== "Example CloudWatch Logs excerpt" +=== "set_correlation_id_jmespath_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/set_correlation_id_jmespath_output.json" @@ -166,12 +167,13 @@ You can append additional keys using either mechanism: You can append your own keys to your existing Logger via `append_keys(**additional_key_values)` method. -=== "collect.py" +=== "append_keys.py" ```python hl_lines="12" --8<-- "examples/logger/src/append_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_keys_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/append_keys_output.json" @@ -191,12 +193,13 @@ It accepts any dictionary, and all keyword arguments will be added as part of th ???+ info Any keyword argument added using `extra` will not be persisted for subsequent messages. -=== "extra_parameter.py" +=== "append_keys_extra.py" ```python hl_lines="9" --8<-- "examples/logger/src/append_keys_extra.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_keys_extra_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/append_keys_extra_output.json" @@ -206,13 +209,13 @@ It accepts any dictionary, and all keyword arguments will be added as part of th You can remove any additional key from Logger state using `remove_keys`. -=== "collect.py" +=== "remove_keys.py" ```python hl_lines="11" --8<-- "examples/logger/src/remove_keys.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "remove_keys_output.json" ```json hl_lines="7" --8<-- "examples/logger/src/remove_keys_output.json" @@ -232,19 +235,19 @@ Logger is commonly initialized in the global scope. Due to [Lambda Execution Con You can either avoid running any code as part of Lambda Layers global scope, or override keys with their latest value as part of handler's execution. -=== "collect.py" +=== "clear_state.py" ```python hl_lines="7 10" --8<-- "examples/logger/src/clear_state.py" ``` -=== "#1 request" +=== "clear_state_event_one.json" ```json hl_lines="7" --8<-- "examples/logger/src/clear_state_event_one.json" ``` -=== "#2 request" +=== "clear_state_event_two.json" ```json hl_lines="7" --8<-- "examples/logger/src/clear_state_event_two.json" @@ -257,13 +260,13 @@ Use `logger.exception` method to log contextual information about exceptions. Lo ???+ tip You can use your preferred Log Analytics tool to enumerate and visualize exceptions across all your services using `exception_name` key. -=== "collect.py" +=== "logging_exceptions.py" ```python hl_lines="15" --8<-- "examples/logger/src/logging_exceptions.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "logging_exceptions_output.json" ```json hl_lines="7-8" --8<-- "examples/logger/src/logging_exceptions_output.json" @@ -292,19 +295,19 @@ Similar to [Tracer](./tracer.md#reusing-tracer-across-your-code), a new instance Notice in the CloudWatch Logs output how `payment_id` appeared as expected when logging in `collect.py`. -=== "collect.py" +=== "logger_reuse.py" ```python hl_lines="1 9 11 12" --8<-- "examples/logger/src/logger_reuse.py" ``` -=== "payment.py" +=== "logger_reuse_payment.py" ```python hl_lines="3 7" --8<-- "examples/logger/src/logger_reuse_payment.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "logger_reuse_output.json" ```json hl_lines="12" --8<-- "examples/logger/src/logger_reuse_output.json" @@ -313,7 +316,7 @@ Notice in the CloudWatch Logs output how `payment_id` appeared as expected when ???+ note "Note: About Child Loggers" Coming from standard library, you might be used to use `logging.getLogger(__name__)`. This will create a new instance of a Logger with a different name. - In Powertools, you can have the same effect by using `child=True` parameter: `Logger(child=True)`. This creates a new Logger instance named after `service.`. All state changes will be propagated bi-directonally between Child and Parent. + In Powertools, you can have the same effect by using `child=True` parameter: `Logger(child=True)`. This creates a new Logger instance named after `service.`. All state changes will be propagated bi-directionally between Child and Parent. For that reason, there could be side effects depending on the order the Child Logger is instantiated, because Child Loggers don't have a handler. @@ -337,15 +340,15 @@ Sampling decision happens at the Logger initialization. This means sampling may ???+ note Open a [feature request](https://github.com/awslabs/aws-lambda-powertools-python/issues/new?assignees=&labels=feature-request%2C+triage&template=feature_request.md&title=) if you want Logger to calculate sampling for every invocation -=== "collect.py" +=== "sampling_debug_logs.py" ```python hl_lines="6 10" - --8<-- "examples/logger/src/logger_reuse.py" + --8<-- "examples/logger/src/sampling_debug_logs.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "sampling_debug_logs_output.json" - ```json hl_lines="3 5 13 16 25" + ```json hl_lines="3 5 13 16 26" --8<-- "examples/logger/src/sampling_debug_logs_output.json" ``` @@ -393,13 +396,13 @@ For child Loggers, we introspect the name of your module where `Logger(child=Tru ???+ danger A common issue when migrating from other Loggers is that `service` might be defined in the parent Logger (no child param), and not defined in the child Logger: -=== "incorrect_logger_inheritance.py" +=== "logging_inheritance_bad.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_bad.py" ``` -=== "my_other_module.py" +=== "logging_inheritance_module.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_module.py" @@ -412,13 +415,13 @@ In this case, Logger will register a Logger named `payment`, and a Logger named Do this instead: -=== "correct_logger_inheritance.py" +=== "logging_inheritance_good.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_good.py" ``` -=== "my_other_module.py" +=== "logging_inheritance_module.py" ```python hl_lines="1 9" --8<-- "examples/logger/src/logging_inheritance_module.py" @@ -435,13 +438,13 @@ You might want to continue to use the same date formatting style, or override `l Logger allows you to either change the format or suppress the following keys altogether at the initialization: `location`, `timestamp`, `level`, `xray_trace_id`. -=== "lambda_handler.py" +=== "overriding_log_records.py" ```python hl_lines="7 10" --8<-- "examples/logger/src/overriding_log_records.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "overriding_log_records_output.json" ```json hl_lines="3 5" --8<-- "examples/logger/src/overriding_log_records_output.json" @@ -451,12 +454,13 @@ Logger allows you to either change the format or suppress the following keys alt You can change the order of [standard Logger keys](#standard-structured-keys) or any keys that will be appended later at runtime via the `log_record_order` parameter. -=== "app.py" +=== "reordering_log_keys.py" ```python hl_lines="5 8" --8<-- "examples/logger/src/reordering_log_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "reordering_log_keys_output.json" ```json hl_lines="3 10" --8<-- "examples/logger/src/reordering_log_keys_output.json" @@ -466,13 +470,13 @@ You can change the order of [standard Logger keys](#standard-structured-keys) or By default, this Logger and standard logging library emits records using local time timestamp. You can override this behavior via `utc` parameter: -=== "app.py" +=== "setting_utc_timestamp.py" ```python hl_lines="6" --8<-- "examples/logger/src/setting_utc_timestamp.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "setting_utc_timestamp_output.json" ```json hl_lines="6 13" --8<-- "examples/logger/src/setting_utc_timestamp_output.json" @@ -482,13 +486,13 @@ By default, this Logger and standard logging library emits records using local t By default, Logger uses `str` to handle values non-serializable by JSON. You can override this behavior via `json_default` parameter by passing a Callable: -=== "app.py" +=== "unserializable_values.py" ```python hl_lines="6 17" --8<-- "examples/logger/src/unserializable_values.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "unserializable_values_output.json" ```json hl_lines="4-6" --8<-- "examples/logger/src/unserializable_values_output.json" @@ -511,13 +515,13 @@ By default, Logger uses [LambdaPowertoolsFormatter](#lambdapowertoolsformatter) For these, you can override the `serialize` method from [LambdaPowertoolsFormatter](#lambdapowertoolsformatter). -=== "custom_formatter.py" +=== "bring_your_own_formatter.py" ```python hl_lines="2 5-6 12" --8<-- "examples/logger/src/bring_your_own_formatter.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "bring_your_own_formatter_output.json" ```json hl_lines="6" --8<-- "examples/logger/src/bring_your_own_formatter_output.json" ``` @@ -529,13 +533,13 @@ For exceptional cases where you want to completely replace our formatter logic, ???+ warning You will need to implement `append_keys`, `clear_state`, override `format`, and optionally `remove_keys` to keep the same feature set Powertools Logger provides. This also means keeping state of logging keys added. -=== "collect.py" +=== "bring_your_own_formatter_from_scratch.py" ```python hl_lines="6 9 11-12 15 19 23 26 38" --8<-- "examples/logger/src/bring_your_own_formatter_from_scratch.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "bring_your_own_formatter_from_scratch_output.json" ```json hl_lines="2-4" --8<-- "examples/logger/src/bring_your_own_formatter_from_scratch_output.json" @@ -615,15 +619,16 @@ You can include any of these logging attributes as key value arguments (`kwargs` You can also add them later anywhere in your code with `append_keys`, or remove them with `remove_keys` methods. -=== "collect.py" +=== "append_and_remove_keys.py" ```python hl_lines="3 8 10" ---8<-- "examples/logger/src/append_and_remove_keys.py" ``` -=== "Example CloudWatch Logs excerpt" + +=== "append_and_remove_keys_output.json" ```json hl_lines="6 15-16" - ---8<-- "examples/logger/src/append_and_remove_keys.json" + ---8<-- "examples/logger/src/append_and_remove_keys_output.json" ``` For log records originating from Powertools Logger, the `name` attribute will be the same as `service`, for log records coming from standard library logger, it will be the name of the logger (i.e. what was used as name argument to `logging.getLogger`). @@ -634,13 +639,13 @@ Keys added with `append_keys` will persist across multiple log messages while ke Here's an example where we persist `payment_id` not `request_id`. Note that `payment_id` remains in both log messages while `booking_id` is only available in the first message. -=== "collect.py" +=== "append_keys_vs_extra.py" ```python hl_lines="16 23" ---8<-- "examples/logger/src/append_keys_vs_extra.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "append_keys_vs_extra_output.json" ```json hl_lines="9-10 19" ---8<-- "examples/logger/src/append_keys_vs_extra_output.json" diff --git a/docs/core/metrics.md b/docs/core/metrics.md index 713a53b193c..843e35b7eb8 100644 --- a/docs/core/metrics.md +++ b/docs/core/metrics.md @@ -57,13 +57,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all ???+ tip You can initialize Metrics in any other module too. It'll keep track of your aggregate metrics in memory to optimize costs (one blob instead of multiples). -=== "Metrics" +=== "add_metrics.py" ```python hl_lines="10" --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Metrics with custom dimensions" +=== "add_dimension.py" ```python hl_lines="13" --8<-- "examples/metrics/src/add_dimension.py" @@ -82,13 +82,13 @@ You can create metrics using `add_metric`, and you can create dimensions for all You can call `add_metric()` with the same metric name multiple times. The values will be grouped together in a list. -=== "Metrics" +=== "add_multi_value_metrics.py" ```python hl_lines="14-15" --8<-- "examples/metrics/src/add_multi_value_metrics.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "add_multi_value_metrics_output.json" ```python hl_lines="15 24-26" --8<-- "examples/metrics/src/add_multi_value_metrics_output.json" @@ -100,13 +100,13 @@ You can use `set_default_dimensions` method, or `default_dimensions` parameter i If you'd like to remove them at some point, you can use `clear_default_dimensions` method. -=== "set_default_dimensions method" +=== "set_default_dimensions.py" ```python hl_lines="9" --8<-- "examples/metrics/src/set_default_dimensions.py" ``` -=== "with log_metrics decorator" +=== "set_default_dimensions_log_metrics.py" ```python hl_lines="9 13" --8<-- "examples/metrics/src/set_default_dimensions_log_metrics.py" @@ -118,13 +118,13 @@ As you finish adding all your metrics, you need to serialize and flush them to s This decorator also **validates**, **serializes**, and **flushes** all your metrics. During metrics validation, if no metrics are provided then a warning will be logged, but no exception will be raised. -=== "app.py" +=== "add_metrics.py" ```python hl_lines="8" --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "log_metrics_output.json" ```json hl_lines="6 9 14 21-23" --8<-- "examples/metrics/src/log_metrics_output.json" @@ -152,13 +152,13 @@ If you want to ensure at least one metric is always emitted, you can pass `raise You can optionally capture cold start metrics with `log_metrics` decorator via `capture_cold_start_metric` param. -=== "app.py" +=== "capture_cold_start_metric.py" ```python hl_lines="7" --8<-- "examples/metrics/src/capture_cold_start_metric.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "capture_cold_start_metric_output.json" ```json hl_lines="9 15 22 24-25" --8<-- "examples/metrics/src/capture_cold_start_metric_output.json" @@ -183,13 +183,13 @@ You can add high-cardinality data as part of your Metrics log with `add_metadata ???+ info **This will not be available during metrics visualization** - Use **dimensions** for this purpose -=== "app.py" +=== "add_metadata.py" ```python hl_lines="14" --8<-- "examples/metrics/src/add_metadata.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "add_metadata_output.json" ```json hl_lines="22" --8<-- "examples/metrics/src/add_metadata_output.json" @@ -204,13 +204,13 @@ CloudWatch EMF uses the same dimensions across all your metrics. Use `single_met **unique metric = (metric_name + dimension_name + dimension_value)** -=== "app.py" +=== "single_metric.py" ```python hl_lines="11" --8<-- "examples/metrics/src/single_metric.py" ``` -=== "Example CloudWatch Logs excerpt" +=== "single_metric_output.json" ```json hl_lines="15" --8<-- "examples/metrics/src/single_metric_output.json" @@ -257,7 +257,7 @@ Make sure to set `POWERTOOLS_METRICS_NAMESPACE` and `POWERTOOLS_SERVICE_NAME` be You can read standard output and assert whether metrics have been flushed. Here's an example using `pytest` with `capsys` built-in fixture: -=== "Asserting single EMF blob" +=== "assert_single_emf_blob.py" ```python hl_lines="6 9-10 23-34" --8<-- "examples/metrics/src/assert_single_emf_blob.py" @@ -269,7 +269,7 @@ You can read standard output and assert whether metrics have been flushed. Here' --8<-- "examples/metrics/src/add_metrics.py" ``` -=== "Asserting multiple EMF blobs" +=== "assert_multiple_emf_blobs.py" This will be needed when using `capture_cold_start_metric=True`, or when both `Metrics` and `single_metric` are used. @@ -277,7 +277,7 @@ You can read standard output and assert whether metrics have been flushed. Here' --8<-- "examples/metrics/src/assert_multiple_emf_blobs.py" ``` -=== "my_other_module.py" +=== "assert_multiple_emf_blobs_module.py" ```python --8<-- "examples/metrics/src/assert_multiple_emf_blobs_module.py" diff --git a/docs/core/tracer.md b/docs/core/tracer.md index 7664231cc31..8fbfc0e29f7 100644 --- a/docs/core/tracer.md +++ b/docs/core/tracer.md @@ -77,19 +77,19 @@ You can trace synchronous functions using the `capture_method` decorator. You can trace asynchronous functions and generator functions (including context managers) using `capture_method`. -=== "Async" +=== "capture_method_async.py" ```python hl_lines="9" --8<-- "examples/tracer/src/capture_method_async.py" ``` -=== "Context manager" +=== "capture_method_context_manager.py" ```python hl_lines="12-13" --8<-- "examples/tracer/src/capture_method_context_manager.py" ``` -=== "Generators" +=== "capture_method_generators.py" ```python hl_lines="9" --8<-- "examples/tracer/src/capture_method_generators.py" @@ -116,13 +116,13 @@ Use **`capture_response=False`** parameter in both `capture_lambda_handler` and 2. You might manipulate **streaming objects that can be read only once**; this prevents subsequent calls from being empty 3. You might return **more than 64K** of data _e.g., `message too long` error_ -=== "sensitive_data_scenario.py" +=== "disable_capture_response.py" ```python hl_lines="8 15" --8<-- "examples/tracer/src/disable_capture_response.py" ``` -=== "streaming_object_scenario.py" +=== "disable_capture_response_streaming_body.py" ```python hl_lines="19" --8<-- "examples/tracer/src/disable_capture_response_streaming_body.py" @@ -192,17 +192,17 @@ Tracer keeps a copy of its configuration after the first initialization. This is Tracer will automatically ignore imported modules that have been patched. -=== "handler.py" +=== "tracer_reuse.py" ```python hl_lines="1 6" --8<-- "examples/tracer/src/tracer_reuse.py" ``` -=== "tracer_reuse_payment.py" +=== "tracer_reuse_module.py" A new instance of Tracer will be created but will reuse the previous Tracer instance configuration, similar to a Singleton. ```python hl_lines="3" - --8<-- "examples/tracer/src/tracer_reuse_payment.py" + --8<-- "examples/tracer/src/tracer_reuse_module.py" ``` ## Testing your code diff --git a/examples/logger/src/append_and_remove_keys.json b/examples/logger/src/append_and_remove_keys_output.json similarity index 100% rename from examples/logger/src/append_and_remove_keys.json rename to examples/logger/src/append_and_remove_keys_output.json diff --git a/examples/logger/src/set_correlation_id_jmespath_event.json b/examples/logger/src/set_correlation_id_jmespath.json similarity index 100% rename from examples/logger/src/set_correlation_id_jmespath_event.json rename to examples/logger/src/set_correlation_id_jmespath.json diff --git a/examples/logger/src/set_correlation_id_method_event.json b/examples/logger/src/set_correlation_id_method.json similarity index 100% rename from examples/logger/src/set_correlation_id_method_event.json rename to examples/logger/src/set_correlation_id_method.json diff --git a/examples/tracer/src/tracer_reuse.py b/examples/tracer/src/tracer_reuse.py index 5f12f82b714..bdfe7bc9d91 100644 --- a/examples/tracer/src/tracer_reuse.py +++ b/examples/tracer/src/tracer_reuse.py @@ -1,4 +1,4 @@ -from tracer_reuse_payment import collect_payment +from tracer_reuse_module import collect_payment from aws_lambda_powertools import Tracer from aws_lambda_powertools.utilities.typing import LambdaContext diff --git a/examples/tracer/src/tracer_reuse_payment.py b/examples/tracer/src/tracer_reuse_module.py similarity index 100% rename from examples/tracer/src/tracer_reuse_payment.py rename to examples/tracer/src/tracer_reuse_module.py From d91838743cb2d7e3ae8b161ae8521194487585bf Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 8 Jul 2022 16:03:31 +0200 Subject: [PATCH 02/49] fix(ci): accept core arg in label related issue workflow --- .github/scripts/label_related_issue.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/scripts/label_related_issue.js b/.github/scripts/label_related_issue.js index fb47a7bc4b0..a66a63fd005 100644 --- a/.github/scripts/label_related_issue.js +++ b/.github/scripts/label_related_issue.js @@ -1,4 +1,4 @@ -module.exports = async ({github, context}) => { +module.exports = async ({github, context, core}) => { const prBody = context.payload.body; const prNumber = context.payload.number; const releaseLabel = process.env.RELEASE_LABEL; From 13558d9d001a76d91e3be0400dae184b12ef2ea0 Mon Sep 17 00:00:00 2001 From: Release bot Date: Tue, 9 Aug 2022 14:08:00 +0000 Subject: [PATCH 03/49] chore(ci): update changelog with latest changes --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b85db5304d..c6f5fe65e71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ ## Bug Fixes +* **ci:** move from pip-tools to poetry on layers to fix conflicts * **ci:** typo and bust gh actions cache * **ci:** use poetry to resolve layer deps; pip for CDK * **ci:** disable poetry venv for layer workflow as cdk ignores venv @@ -17,10 +18,14 @@ ## Documentation +* **jmespath_util:** snippets split, improved, and lint ([#1419](https://github.com/awslabs/aws-lambda-powertools-python/issues/1419)) +* **layer:** upgrade to 1.27.0 * **layer:** upgrade to 1.27.0 ## Maintenance +* **ci:** reduce payload and only send prod notification +* **ci:** update changelog with latest changes * **ci:** update changelog with latest changes * **ci:** update changelog with latest changes From 40ce5091a8fb0ee49a3c441d97333ebeb080e3fd Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 10 Aug 2022 00:09:15 +1000 Subject: [PATCH 04/49] docs(apigateway): removes duplicate admonition (#1426) --- docs/core/event_handler/api_gateway.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index 1358f545eb8..f4f45a051f8 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -135,9 +135,6 @@ Each dynamic route you set must be part of your function signature. This allows ???+ tip You can also nest dynamic paths, for example `/todos//`. -???+ tip - You can also nest dynamic paths, for example `/todos//`. - #### Catch-all routes ???+ note From 5951ab0210a8f33ed30bbffd0587f3353ea8c06f Mon Sep 17 00:00:00 2001 From: Release bot Date: Tue, 9 Aug 2022 14:09:54 +0000 Subject: [PATCH 05/49] chore(ci): update changelog with latest changes --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6f5fe65e71..9fc78d3dae1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,12 +18,14 @@ ## Documentation +* **apigateway:** removes duplicate admonition ([#1426](https://github.com/awslabs/aws-lambda-powertools-python/issues/1426)) * **jmespath_util:** snippets split, improved, and lint ([#1419](https://github.com/awslabs/aws-lambda-powertools-python/issues/1419)) * **layer:** upgrade to 1.27.0 * **layer:** upgrade to 1.27.0 ## Maintenance +* **ci:** update changelog with latest changes * **ci:** reduce payload and only send prod notification * **ci:** update changelog with latest changes * **ci:** update changelog with latest changes From 6b2390356d56428b2a35ef0810bb2b15e579bec8 Mon Sep 17 00:00:00 2001 From: Peter Schutt Date: Wed, 10 Aug 2022 00:11:08 +1000 Subject: [PATCH 06/49] docs(parser): minor grammar fix (#1427) --- docs/utilities/parser.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md index 8756725d1e0..cb69cf9699b 100644 --- a/docs/utilities/parser.md +++ b/docs/utilities/parser.md @@ -524,7 +524,7 @@ Parser is best suited for those looking for a trade-off between defining their m We export most common classes, exceptions, and utilities from Pydantic as part of parser e.g. `from aws_lambda_powertools.utilities.parser import BaseModel`. -If what's your trying to use isn't available as part of the high level import system, use the following escape hatch mechanism: +If what you're trying to use isn't available as part of the high level import system, use the following escape hatch mechanism: ```python title="Pydantic import escape hatch" from aws_lambda_powertools.utilities.parser.pydantic import From 6380b6365176ed3140febdefff7e739a81811a8a Mon Sep 17 00:00:00 2001 From: Release bot Date: Tue, 9 Aug 2022 14:11:42 +0000 Subject: [PATCH 07/49] chore(ci): update changelog with latest changes --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fc78d3dae1..9a738104460 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,9 +22,11 @@ * **jmespath_util:** snippets split, improved, and lint ([#1419](https://github.com/awslabs/aws-lambda-powertools-python/issues/1419)) * **layer:** upgrade to 1.27.0 * **layer:** upgrade to 1.27.0 +* **parser:** minor grammar fix ([#1427](https://github.com/awslabs/aws-lambda-powertools-python/issues/1427)) ## Maintenance +* **ci:** update changelog with latest changes * **ci:** update changelog with latest changes * **ci:** reduce payload and only send prod notification * **ci:** update changelog with latest changes From 8e24ae3feee39ae0d51bb47033d147bce2027846 Mon Sep 17 00:00:00 2001 From: Release bot Date: Tue, 9 Aug 2022 14:59:06 +0000 Subject: [PATCH 08/49] chore(ci): update changelog with latest changes --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9a738104460..27d160ec75f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,7 @@ ## Maintenance +* **ci:** update changelog with latest changes * **ci:** update changelog with latest changes * **ci:** update changelog with latest changes * **ci:** reduce payload and only send prod notification From becb38ccf4d32aaed805c246afcc8e59e0aa55cd Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Thu, 25 Aug 2022 19:21:04 +0200 Subject: [PATCH 09/49] chore(ci): add linter for GitHub Actions as pre-commit hook (#1479) --- .github/workflows/build_changelog.yml | 1 - .github/workflows/on_opened_pr.yml | 2 -- .github/workflows/python_build.yml | 3 +-- .github/workflows/reusable_deploy_layer_stack.yml | 3 +++ .github/workflows/reusable_export_pr_details.yml | 3 +++ .pre-commit-config.yaml | 5 +++++ 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_changelog.yml b/.github/workflows/build_changelog.yml index f0501083048..3cd6fffe855 100644 --- a/.github/workflows/build_changelog.yml +++ b/.github/workflows/build_changelog.yml @@ -6,5 +6,4 @@ on: jobs: changelog: - needs: release uses: ./.github/workflows/reusable_publish_changelog.yml diff --git a/.github/workflows/on_opened_pr.yml b/.github/workflows/on_opened_pr.yml index 2663d605325..6c5979c8b80 100644 --- a/.github/workflows/on_opened_pr.yml +++ b/.github/workflows/on_opened_pr.yml @@ -20,8 +20,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: "Debug workflow_run event" - run: echo "${{ github }}" - name: "Ensure related issue is present" uses: actions/github-script@v6 env: diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index 901c593ebce..e7a9b1273a5 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -28,7 +28,6 @@ jobs: matrix: python-version: [3.7, 3.8, 3.9] env: - OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} steps: - uses: actions/checkout@v3 @@ -56,6 +55,6 @@ jobs: with: file: ./coverage.xml # flags: unittests - env_vars: OS,PYTHON + env_vars: PYTHON name: aws-lambda-powertools-python-codecov # fail_ci_if_error: true # failing more consistently making CI unreliable despite all tests above passing diff --git a/.github/workflows/reusable_deploy_layer_stack.yml b/.github/workflows/reusable_deploy_layer_stack.yml index e1190e19873..20d69b9c814 100644 --- a/.github/workflows/reusable_deploy_layer_stack.yml +++ b/.github/workflows/reusable_deploy_layer_stack.yml @@ -8,12 +8,15 @@ on: workflow_call: inputs: stage: + description: "Deployment stage (BETA, PROD)" required: true type: string artefact-name: + description: "CDK Layer Artefact name to download" required: true type: string environment: + description: "GitHub Environment to use for encrypted secrets" required: true type: string diff --git a/.github/workflows/reusable_export_pr_details.yml b/.github/workflows/reusable_export_pr_details.yml index 86c3e7d645e..ec168b95f20 100644 --- a/.github/workflows/reusable_export_pr_details.yml +++ b/.github/workflows/reusable_export_pr_details.yml @@ -4,13 +4,16 @@ on: workflow_call: inputs: record_pr_workflow_id: + description: "Record PR workflow execution ID to download PR details" required: true type: number workflow_origin: # see https://github.com/awslabs/aws-lambda-powertools-python/issues/1349 + description: "Repository full name for runner integrity" required: true type: string secrets: token: + description: "GitHub Actions temporary and scoped token" required: true # Map the workflow outputs to job outputs outputs: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a41e0d945c..71b1125cf54 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,3 +39,8 @@ repos: hooks: - id: cfn-python-lint files: examples/.*\.(yaml|yml)$ + - repo: https://github.com/rhysd/actionlint + rev: v1.6.16 + hooks: + - id: actionlint-docker + args: [-pyflakes=] From 62a976f17098764a90be1507dc2c0f21e1d0aac5 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Thu, 25 Aug 2022 20:43:29 +0200 Subject: [PATCH 10/49] chore(ci): add workflow to suggest splitting large PRs (#1480) --- .github/scripts/comment_on_large_pr.js | 25 +++++++++++++++++++++++++ .github/workflows/on_label_added.yml | 25 +++++++++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 .github/scripts/comment_on_large_pr.js create mode 100644 .github/workflows/on_label_added.yml diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js new file mode 100644 index 00000000000..5d469c1635c --- /dev/null +++ b/.github/scripts/comment_on_large_pr.js @@ -0,0 +1,25 @@ +const { + PR_NUMBER, + IGNORE_AUTHORS, +} = require("./constants") + +module.exports = async ({github, context, core}) => { + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } + + core.info(`Commenting on PR ${PR_NUMBER}`) + + let msg = ` + ### ⚠️Large PR detected⚠️. + + Please consider breaking into smaller PRs to avoid significant review delays. Ignore if this PR has naturally grown to this size after reviews. + `; + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + body: msg, + issue_number: PR_NUMBER, + }); +} diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml new file mode 100644 index 00000000000..fd4064e5c6a --- /dev/null +++ b/.github/workflows/on_label_added.yml @@ -0,0 +1,25 @@ +name: On Label added + +on: + pull_request: + types: + - labeled + +jobs: + split-large-pr: + if: github.event.label.name == 'size/XXL' + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/checkout@v3 + - name: "Suggest split large Pull Request" + uses: actions/github-script@v6 + env: + PR_NUMBER: ${{ github.event.issue.number }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const script = require('.github/scripts/comment_on_large_pr.js') + await script({github, context, core}) From 631c570577fdb06c509875e5b8f86d3a582a5cfa Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 26 Aug 2022 10:48:01 +0200 Subject: [PATCH 11/49] docs(layer): upgrade to 1.28.0 (v33) --- docs/index.md | 55 ++++++++++++++++++++++++++++----------------------- 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/docs/index.md b/docs/index.md index 95ce2c2a707..78b76c7bed6 100644 --- a/docs/index.md +++ b/docs/index.md @@ -14,7 +14,7 @@ A suite of utilities for AWS Lambda functions to ease adopting best practices su Powertools is available in the following formats: -* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:29**](#){: .copyMe}:clipboard: +* **Lambda Layer**: [**arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33**](#){: .copyMe}:clipboard: * **PyPi**: **`pip install aws-lambda-powertools`** ???+ hint "Support this project by using Lambda Layers :heart:" @@ -32,23 +32,28 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: | Region | Layer ARN | | ---------------- | -------------------------------------------------------------------------------------------------------- | - | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | - | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:29](#){: .copyMe}:clipboard: | + | `af-south-1` | [arn:aws:lambda:af-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-east-1` | [arn:aws:lambda:ap-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-northeast-1` | [arn:aws:lambda:ap-northeast-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-northeast-2` | [arn:aws:lambda:ap-northeast-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-northeast-3` | [arn:aws:lambda:ap-northeast-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-south-1` | [arn:aws:lambda:ap-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-southeast-1` | [arn:aws:lambda:ap-southeast-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-southeast-2` | [arn:aws:lambda:ap-southeast-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ap-southeast-3` | [arn:aws:lambda:ap-southeast-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `ca-central-1` | [arn:aws:lambda:ca-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-central-1` | [arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-north-1` | [arn:aws:lambda:eu-north-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-south-1` | [arn:aws:lambda:eu-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-west-1` | [arn:aws:lambda:eu-west-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-west-2` | [arn:aws:lambda:eu-west-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `eu-west-3` | [arn:aws:lambda:eu-west-3:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `me-south-1` | [arn:aws:lambda:me-south-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `sa-east-1` | [arn:aws:lambda:sa-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-east-1` | [arn:aws:lambda:us-east-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-east-2` | [arn:aws:lambda:us-east-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-west-1` | [arn:aws:lambda:us-west-1:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | + | `us-west-2` | [arn:aws:lambda:us-west-2:017000801446:layer:AWSLambdaPowertoolsPython:33](#){: .copyMe}:clipboard: | ??? question "Can't find our Lambda Layer for your preferred AWS region?" You can use [Serverless Application Repository (SAR)](#sar) method, our [CDK Layer Construct](https://github.com/aws-samples/cdk-lambda-powertools-python-layer){target="_blank"}, or PyPi like you normally would for any other library. @@ -62,7 +67,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Type: AWS::Serverless::Function Properties: Layers: - - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:29 + - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:33 ``` === "Serverless framework" @@ -72,7 +77,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: hello: handler: lambda_function.lambda_handler layers: - - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:29 + - arn:aws:lambda:${aws:region}:017000801446:layer:AWSLambdaPowertoolsPython:33 ``` === "CDK" @@ -88,7 +93,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: powertools_layer = aws_lambda.LayerVersion.from_layer_version_arn( self, id="lambda-powertools", - layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:29" + layer_version_arn=f"arn:aws:lambda:{env.region}:017000801446:layer:AWSLambdaPowertoolsPython:33" ) aws_lambda.Function(self, 'sample-app-lambda', @@ -137,7 +142,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: role = aws_iam_role.iam_for_lambda.arn handler = "index.test" runtime = "python3.9" - layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:29"] + layers = ["arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33"] source_code_hash = filebase64sha256("lambda_function_payload.zip") } @@ -156,7 +161,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: ? Do you want to configure advanced settings? Yes ... ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33 ❯ amplify push -y @@ -167,7 +172,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: - Name: ? Which setting do you want to update? Lambda layers configuration ? Do you want to enable Lambda layers for this function? Yes - ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:29 + ? Enter up to 5 existing Lambda layer ARNs (comma-separated): arn:aws:lambda:eu-central-1:017000801446:layer:AWSLambdaPowertoolsPython:33 ? Do you want to edit the local lambda function now? No ``` @@ -175,7 +180,7 @@ You can include Lambda Powertools Lambda Layer using [AWS Lambda Console](https: Change {region} to your AWS region, e.g. `eu-west-1` ```bash title="AWS CLI" - aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:29 --region {region} + aws lambda get-layer-version-by-arn --arn arn:aws:lambda:{region}:017000801446:layer:AWSLambdaPowertoolsPython:33 --region {region} ``` The pre-signed URL to download this Lambda Layer will be within `Location` key. From 937ed38002056e3d8acb02de71beed6bc112c872 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 26 Aug 2022 08:48:52 +0000 Subject: [PATCH 12/49] update changelog with latest changes --- CHANGELOG.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ffd1f4094e8..f2d8ac0db89 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,25 @@ # Unreleased +## Documentation + +* **layer:** upgrade to 1.28.0 (v33) + +## Features + +* **ci:** add actionlint in pre-commit hook + +## Maintenance + +* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) +* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) +* **ci:** remove dangling debug step +* **ci:** fix invalid dependency leftover +* **ci:** add missing description fields +* **ci:** remove unused and undeclared OS matrix env +* **ci:** limit E2E workflow run for source code change +* **maintainers:** update release workflow link + ## [v1.28.0] - 2022-08-25 From 8a608c54add7a826be19736e7cbe21230f1099bc Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 26 Aug 2022 11:05:47 +0200 Subject: [PATCH 13/49] fix(ci): event resolution for on_label_added workflow --- .github/workflows/on_label_added.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml index fd4064e5c6a..023de081915 100644 --- a/.github/workflows/on_label_added.yml +++ b/.github/workflows/on_label_added.yml @@ -7,7 +7,7 @@ on: jobs: split-large-pr: - if: github.event.label.name == 'size/XXL' + if: ${{ github.event.label.name == 'size/XXL' }} runs-on: ubuntu-latest permissions: issues: write @@ -17,9 +17,9 @@ jobs: - name: "Suggest split large Pull Request" uses: actions/github-script@v6 env: - PR_NUMBER: ${{ github.event.issue.number }} + PR_NUMBER: ${{ github.event.pull_request.number }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | - const script = require('.github/scripts/comment_on_large_pr.js') - await script({github, context, core}) + const script = require('.github/scripts/comment_on_large_pr.js'); + await script({github, context, core}); From 366b326851ca6ab5780ec42f3d4c7c6804ffbcbb Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 26 Aug 2022 11:28:26 +0200 Subject: [PATCH 14/49] fix(ci): gracefully and successful exit changelog upon no changes --- .github/workflows/reusable_publish_changelog.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/reusable_publish_changelog.yml b/.github/workflows/reusable_publish_changelog.yml index 2cb786ed86a..30e49b98ae5 100644 --- a/.github/workflows/reusable_publish_changelog.yml +++ b/.github/workflows/reusable_publish_changelog.yml @@ -26,13 +26,15 @@ jobs: git config user.name "Release bot" git config user.email "aws-devax-open-source@amazon.com" git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/$origin # Git Detached mode (release notes) doesn't have origin - git pull origin $BRANCH + git config remote.origin.url >&- || git remote add origin https://github.com/"${origin}" # Git Detached mode (release notes) doesn't have origin + git pull origin "${BRANCH}" - name: "Generate latest changelog" run: make changelog - name: Update Changelog in trunk run: | + HAS_CHANGE=$(git status --porcelain) + test -z "${HAS_CHANGE}" && echo "Nothing to update" && exit 0 git add CHANGELOG.md git commit -m "update changelog with latest changes" - git pull origin $BRANCH # prevents concurrent branch update failing push - git push origin HEAD:refs/heads/$BRANCH + git pull origin "${BRANCH}" # prevents concurrent branch update failing push + git push origin HEAD:refs/heads/"${BRANCH}" From cae9557a1c80f9d36555b9d173a659273a2d6f68 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 26 Aug 2022 09:29:07 +0000 Subject: [PATCH 15/49] update changelog with latest changes --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f2d8ac0db89..1c5e30a52dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,11 @@ # Unreleased +## Bug Fixes + +* **ci:** gracefully and successful exit changelog upon no changes +* **ci:** event resolution for on_label_added workflow + ## Documentation * **layer:** upgrade to 1.28.0 (v33) From 06fb2556eed7021e4c2c175e99f1320b404b8b66 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Fri, 26 Aug 2022 15:10:34 +0200 Subject: [PATCH 16/49] chore(ci): enable ci checks for v2 --- .github/workflows/codeql-analysis.yml | 30 +++++++++++++-------------- .github/workflows/python_build.yml | 4 ++-- .github/workflows/run-e2e-tests.yml | 2 +- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index edd7e67bf91..f8a7849e7ea 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,7 +2,7 @@ name: "CodeQL" on: push: - branches: [develop] + branches: [develop, v2] jobs: analyze: @@ -14,23 +14,23 @@ jobs: matrix: # Override automatic language detection by changing the below list # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] - language: ['python'] + language: ["python"] # Learn more... # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout repository + uses: actions/checkout@v3 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/python_build.yml b/.github/workflows/python_build.yml index e7a9b1273a5..29ec0afaad5 100644 --- a/.github/workflows/python_build.yml +++ b/.github/workflows/python_build.yml @@ -10,6 +10,7 @@ on: - "mypy.ini" branches: - develop + - v2 push: paths: - "aws_lambda_powertools/**" @@ -19,6 +20,7 @@ on: - "mypy.ini" branches: - develop + - v2 jobs: build: @@ -54,7 +56,5 @@ jobs: uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # 3.1.0 with: file: ./coverage.xml - # flags: unittests env_vars: PYTHON name: aws-lambda-powertools-python-codecov - # fail_ci_if_error: true # failing more consistently making CI unreliable despite all tests above passing diff --git a/.github/workflows/run-e2e-tests.yml b/.github/workflows/run-e2e-tests.yml index a2ed9dacc1d..8bea436f208 100644 --- a/.github/workflows/run-e2e-tests.yml +++ b/.github/workflows/run-e2e-tests.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: push: - branches: [develop] + branches: [develop, v2] paths: - "aws_lambda_powertools/**" - "tests/e2e/**" From e473312a82391418e4cecc33ed40ecd0b796b0e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 26 Aug 2022 20:14:40 +0000 Subject: [PATCH 17/49] chore(deps-dev): bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 (#1481) Bumps [mypy-boto3-dynamodb](https://github.com/youtype/mypy_boto3_builder) from 1.24.55.post1 to 1.24.60. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: mypy-boto3-dynamodb dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 58 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/poetry.lock b/poetry.lock index c6696bdff76..3a6220e1efe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -15,10 +15,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "aws-cdk-lib" @@ -131,7 +131,7 @@ python-versions = "*" attrs = ">=17.3" [package.extras] -dev = ["pendulum", "hypothesis", "pytest", "sphinx", "coverage", "tox", "flake8", "watchdog", "wheel", "bumpversion"] +dev = ["bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "sphinx", "tox", "watchdog", "wheel"] [[package]] name = "cattrs" @@ -236,8 +236,8 @@ optional = true python-versions = ">=3.6,<4.0" [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -294,7 +294,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "filelock" @@ -440,7 +440,7 @@ python-versions = "*" python-dateutil = ">=2.8.1" [package.extras] -dev = ["wheel", "flake8", "markdown", "twine"] +dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "gitdb" @@ -486,9 +486,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -502,8 +502,8 @@ python-versions = ">=3.6" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "iniconfig" @@ -522,10 +522,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" @@ -649,8 +649,8 @@ packaging = "*" "ruamel.yaml" = "*" [package.extras] -test = ["flake8 (>=3.0)", "coverage"] -dev = ["pypandoc (>=1.4)", "flake8 (>=3.0)", "coverage"] +dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] +test = ["coverage", "flake8 (>=3.0)"] [[package]] name = "mkdocs" @@ -766,8 +766,8 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-dynamodb" -version = "1.24.55.post1" -description = "Type annotations for boto3.DynamoDB 1.24.55 service generated with mypy-boto3-builder 7.11.7" +version = "1.24.60" +description = "Type annotations for boto3.DynamoDB 1.24.60 service generated with mypy-boto3-builder 7.11.8" category = "dev" optional = false python-versions = ">=3.7" @@ -912,8 +912,8 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] -testing = ["pytest-benchmark", "pytest"] -dev = ["tox", "pre-commit"] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "publication" @@ -1070,7 +1070,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-forked" @@ -1096,7 +1096,7 @@ python-versions = ">=3.6" pytest = ">=5.0" [package.extras] -dev = ["pre-commit", "tox", "pytest-asyncio"] +dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" @@ -1314,8 +1314,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1359,8 +1359,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] pydantic = ["pydantic", "email-validator"] @@ -1368,7 +1368,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "a5ff8f9945c42eeee596b973e484efae6bcfde17a0e37cc708cac05b635cec1f" +content-hash = "4e25e40a2ddb51f50559dab19d3f80a6626173bc8f8d7d3da7ea53ba9d6255b0" [metadata.files] atomicwrites = [ @@ -1743,8 +1743,8 @@ mypy-boto3-cloudwatch = [ {file = "mypy_boto3_cloudwatch-1.24.55-py3-none-any.whl", hash = "sha256:23faf8fdfe928f9dcce453a60b03bda69177554eb88c2d7e5240ff91b5b14388"}, ] mypy-boto3-dynamodb = [ - {file = "mypy-boto3-dynamodb-1.24.55.post1.tar.gz", hash = "sha256:c469223c15556d93d247d38c0c31ce3c08d8073ca4597158a27abc70b8d7fbee"}, - {file = "mypy_boto3_dynamodb-1.24.55.post1-py3-none-any.whl", hash = "sha256:c762975d023b356c573d58105c7bfc1b9e7ee62c1299f09784e9dede533179e1"}, + {file = "mypy-boto3-dynamodb-1.24.60.tar.gz", hash = "sha256:aa552233fa8357d99f4a1021ef65b98679e26ebc35d04c31a9d70a4db779c236"}, + {file = "mypy_boto3_dynamodb-1.24.60-py3-none-any.whl", hash = "sha256:df8e91bb25dd6e4090aef22d33504a5e9e305e45e3262d81e7223df4b6ddee5f"}, ] mypy-boto3-lambda = [ {file = "mypy-boto3-lambda-1.24.54.tar.gz", hash = "sha256:c76d28d84bdf94c8980acd85bc07f2747559ca11a990fd6785c9c2389e13aff1"}, diff --git a/pyproject.toml b/pyproject.toml index aa6bfec1c1e..232da7ef9da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,7 +59,7 @@ pytest-benchmark = "^3.4.1" mypy-boto3-appconfig = { version = "^1.24.29", python = ">=3.7" } mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-cloudwatch = { version = "^1.24.35", python = ">=3.7" } -mypy-boto3-dynamodb = { version = "^1.24.27", python = ">=3.7" } +mypy-boto3-dynamodb = { version = "^1.24.60", python = ">=3.7" } mypy-boto3-lambda = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-secretsmanager = { version = "^1.24.11", python = ">=3.7" } From c85d94d94062ac29030e2a62664ac6e374065f5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Aug 2022 08:24:40 +0000 Subject: [PATCH 18/49] chore(deps-dev): bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 (#306) Bumps [mypy-boto3-dynamodb](https://github.com/youtype/mypy_boto3_builder) from 1.24.55.post1 to 1.24.60. - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) --- updated-dependencies: - dependency-name: mypy-boto3-dynamodb dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 58 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/poetry.lock b/poetry.lock index c6696bdff76..3a6220e1efe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -15,10 +15,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "aws-cdk-lib" @@ -131,7 +131,7 @@ python-versions = "*" attrs = ">=17.3" [package.extras] -dev = ["pendulum", "hypothesis", "pytest", "sphinx", "coverage", "tox", "flake8", "watchdog", "wheel", "bumpversion"] +dev = ["bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "sphinx", "tox", "watchdog", "wheel"] [[package]] name = "cattrs" @@ -236,8 +236,8 @@ optional = true python-versions = ">=3.6,<4.0" [package.extras] -dnssec = ["cryptography (>=2.6,<37.0)"] curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"] +dnssec = ["cryptography (>=2.6,<37.0)"] doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"] idna = ["idna (>=2.1,<4.0)"] trio = ["trio (>=0.14,<0.20)"] @@ -294,7 +294,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "filelock" @@ -440,7 +440,7 @@ python-versions = "*" python-dateutil = ">=2.8.1" [package.extras] -dev = ["wheel", "flake8", "markdown", "twine"] +dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "gitdb" @@ -486,9 +486,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -502,8 +502,8 @@ python-versions = ">=3.6" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [[package]] name = "iniconfig" @@ -522,10 +522,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" @@ -649,8 +649,8 @@ packaging = "*" "ruamel.yaml" = "*" [package.extras] -test = ["flake8 (>=3.0)", "coverage"] -dev = ["pypandoc (>=1.4)", "flake8 (>=3.0)", "coverage"] +dev = ["coverage", "flake8 (>=3.0)", "pypandoc (>=1.4)"] +test = ["coverage", "flake8 (>=3.0)"] [[package]] name = "mkdocs" @@ -766,8 +766,8 @@ typing-extensions = ">=4.1.0" [[package]] name = "mypy-boto3-dynamodb" -version = "1.24.55.post1" -description = "Type annotations for boto3.DynamoDB 1.24.55 service generated with mypy-boto3-builder 7.11.7" +version = "1.24.60" +description = "Type annotations for boto3.DynamoDB 1.24.60 service generated with mypy-boto3-builder 7.11.8" category = "dev" optional = false python-versions = ">=3.7" @@ -912,8 +912,8 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] -testing = ["pytest-benchmark", "pytest"] -dev = ["tox", "pre-commit"] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "publication" @@ -1070,7 +1070,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-forked" @@ -1096,7 +1096,7 @@ python-versions = ">=3.6" pytest = ">=5.0" [package.extras] -dev = ["pre-commit", "tox", "pytest-asyncio"] +dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-xdist" @@ -1314,8 +1314,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1359,8 +1359,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] pydantic = ["pydantic", "email-validator"] @@ -1368,7 +1368,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "a5ff8f9945c42eeee596b973e484efae6bcfde17a0e37cc708cac05b635cec1f" +content-hash = "4e25e40a2ddb51f50559dab19d3f80a6626173bc8f8d7d3da7ea53ba9d6255b0" [metadata.files] atomicwrites = [ @@ -1743,8 +1743,8 @@ mypy-boto3-cloudwatch = [ {file = "mypy_boto3_cloudwatch-1.24.55-py3-none-any.whl", hash = "sha256:23faf8fdfe928f9dcce453a60b03bda69177554eb88c2d7e5240ff91b5b14388"}, ] mypy-boto3-dynamodb = [ - {file = "mypy-boto3-dynamodb-1.24.55.post1.tar.gz", hash = "sha256:c469223c15556d93d247d38c0c31ce3c08d8073ca4597158a27abc70b8d7fbee"}, - {file = "mypy_boto3_dynamodb-1.24.55.post1-py3-none-any.whl", hash = "sha256:c762975d023b356c573d58105c7bfc1b9e7ee62c1299f09784e9dede533179e1"}, + {file = "mypy-boto3-dynamodb-1.24.60.tar.gz", hash = "sha256:aa552233fa8357d99f4a1021ef65b98679e26ebc35d04c31a9d70a4db779c236"}, + {file = "mypy_boto3_dynamodb-1.24.60-py3-none-any.whl", hash = "sha256:df8e91bb25dd6e4090aef22d33504a5e9e305e45e3262d81e7223df4b6ddee5f"}, ] mypy-boto3-lambda = [ {file = "mypy-boto3-lambda-1.24.54.tar.gz", hash = "sha256:c76d28d84bdf94c8980acd85bc07f2747559ca11a990fd6785c9c2389e13aff1"}, diff --git a/pyproject.toml b/pyproject.toml index aa6bfec1c1e..232da7ef9da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,7 +59,7 @@ pytest-benchmark = "^3.4.1" mypy-boto3-appconfig = { version = "^1.24.29", python = ">=3.7" } mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-cloudwatch = { version = "^1.24.35", python = ">=3.7" } -mypy-boto3-dynamodb = { version = "^1.24.27", python = ">=3.7" } +mypy-boto3-dynamodb = { version = "^1.24.60", python = ">=3.7" } mypy-boto3-lambda = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" } mypy-boto3-secretsmanager = { version = "^1.24.11", python = ">=3.7" } From c962046bf1d917e242ae156e66737824f348418a Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 16:12:03 +0200 Subject: [PATCH 19/49] fix(ci): ensure PR_AUTHOR is present for large_pr_split workflow --- .github/scripts/comment_on_large_pr.js | 1 + .github/workflows/on_label_added.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js index 5d469c1635c..02fb3f5bd13 100644 --- a/.github/scripts/comment_on_large_pr.js +++ b/.github/scripts/comment_on_large_pr.js @@ -1,5 +1,6 @@ const { PR_NUMBER, + PR_AUTHOR, IGNORE_AUTHORS, } = require("./constants") diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml index 023de081915..4e638cf6eea 100644 --- a/.github/workflows/on_label_added.yml +++ b/.github/workflows/on_label_added.yml @@ -18,6 +18,7 @@ jobs: uses: actions/github-script@v6 env: PR_NUMBER: ${{ github.event.pull_request.number }} + PR_AUTHOR: ${{ github.event.pull_request.user.login }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | From 6f8769d43dadf476f82edab7b01f8320fc5a6179 Mon Sep 17 00:00:00 2001 From: Stephan Date: Mon, 29 Aug 2022 16:45:40 +0200 Subject: [PATCH 20/49] feat(event_sources): add CloudWatch dashboard custom widget event (#1474) Co-authored-by: Ruben Fonseca Co-authored-by: Leandro Damascena --- .../utilities/data_classes/__init__.py | 2 + .../cloud_watch_custom_widget_event.py | 158 ++++++++++++++++++ docs/utilities/data_classes.md | 35 ++++ tests/events/cloudWatchDashboardEvent.json | 38 +++++ tests/functional/test_data_classes.py | 39 +++++ 5 files changed, 272 insertions(+) create mode 100644 aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py create mode 100644 tests/events/cloudWatchDashboardEvent.json diff --git a/aws_lambda_powertools/utilities/data_classes/__init__.py b/aws_lambda_powertools/utilities/data_classes/__init__.py index b20ec504732..20a41310523 100644 --- a/aws_lambda_powertools/utilities/data_classes/__init__.py +++ b/aws_lambda_powertools/utilities/data_classes/__init__.py @@ -5,6 +5,7 @@ from .alb_event import ALBEvent from .api_gateway_proxy_event import APIGatewayProxyEvent, APIGatewayProxyEventV2 from .appsync_resolver_event import AppSyncResolverEvent +from .cloud_watch_custom_widget_event import CloudWatchDashboardCustomWidgetEvent from .cloud_watch_logs_event import CloudWatchLogsEvent from .code_pipeline_job_event import CodePipelineJobEvent from .connect_contact_flow_event import ConnectContactFlowEvent @@ -23,6 +24,7 @@ "APIGatewayProxyEventV2", "AppSyncResolverEvent", "ALBEvent", + "CloudWatchDashboardCustomWidgetEvent", "CloudWatchLogsEvent", "CodePipelineJobEvent", "ConnectContactFlowEvent", diff --git a/aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py b/aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py new file mode 100644 index 00000000000..40219f944ba --- /dev/null +++ b/aws_lambda_powertools/utilities/data_classes/cloud_watch_custom_widget_event.py @@ -0,0 +1,158 @@ +from typing import Any, Dict, Optional + +from aws_lambda_powertools.utilities.data_classes.common import DictWrapper + + +class TimeZone(DictWrapper): + @property + def label(self) -> str: + """The time range label. Either 'UTC' or 'Local'""" + return self["label"] + + @property + def offset_iso(self) -> str: + """The time range offset in the format +/-00:00""" + return self["offsetISO"] + + @property + def offset_in_minutes(self) -> int: + """The time range offset in minutes""" + return int(self["offsetInMinutes"]) + + +class TimeRange(DictWrapper): + @property + def mode(self) -> str: + """The time range mode, i.e. 'relative' or 'absolute'""" + return self["mode"] + + @property + def start(self) -> int: + """The start time within the time range""" + return self["start"] + + @property + def end(self) -> int: + """The end time within the time range""" + return self["end"] + + @property + def relative_start(self) -> Optional[int]: + """The relative start time within the time range""" + return self.get("relativeStart") + + @property + def zoom_start(self) -> Optional[int]: + """The start time within the zoomed time range""" + return (self.get("zoom") or {}).get("start") + + @property + def zoom_end(self) -> Optional[int]: + """The end time within the zoomed time range""" + return (self.get("zoom") or {}).get("end") + + +class CloudWatchWidgetContext(DictWrapper): + @property + def dashboard_name(self) -> str: + """Get dashboard name, in which the widget is used""" + return self["dashboardName"] + + @property + def widget_id(self) -> str: + """Get widget ID""" + return self["widgetId"] + + @property + def domain(self) -> str: + """AWS domain name""" + return self["domain"] + + @property + def account_id(self) -> str: + """Get AWS Account ID""" + return self["accountId"] + + @property + def locale(self) -> str: + """Get locale language""" + return self["locale"] + + @property + def timezone(self) -> TimeZone: + """Timezone information of the dashboard""" + return TimeZone(self["timezone"]) + + @property + def period(self) -> int: + """The period shown on the dashboard""" + return int(self["period"]) + + @property + def is_auto_period(self) -> bool: + """Whether auto period is enabled""" + return bool(self["isAutoPeriod"]) + + @property + def time_range(self) -> TimeRange: + """The widget time range""" + return TimeRange(self["timeRange"]) + + @property + def theme(self) -> str: + """The dashboard theme, i.e. 'light' or 'dark'""" + return self["theme"] + + @property + def link_charts(self) -> bool: + """The widget is linked to other charts""" + return bool(self["linkCharts"]) + + @property + def title(self) -> str: + """Get widget title""" + return self["title"] + + @property + def params(self) -> Dict[str, Any]: + """Get widget parameters""" + return self["params"] + + @property + def forms(self) -> Dict[str, Any]: + """Get widget form data""" + return self["forms"]["all"] + + @property + def height(self) -> int: + """Get widget height""" + return int(self["height"]) + + @property + def width(self) -> int: + """Get widget width""" + return int(self["width"]) + + +class CloudWatchDashboardCustomWidgetEvent(DictWrapper): + """CloudWatch dashboard custom widget event + + You can use a Lambda function to create a custom widget on a CloudWatch dashboard. + + Documentation: + ------------- + - https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/add_custom_widget_dashboard_about.html + """ + + @property + def describe(self) -> bool: + """Display widget documentation""" + return bool(self.get("describe", False)) + + @property + def widget_context(self) -> Optional[CloudWatchWidgetContext]: + """The widget context""" + if self.get("widgetContext"): + return CloudWatchWidgetContext(self["widgetContext"]) + + return None diff --git a/docs/utilities/data_classes.md b/docs/utilities/data_classes.md index a450c8788e4..86cbebd3c97 100644 --- a/docs/utilities/data_classes.md +++ b/docs/utilities/data_classes.md @@ -68,6 +68,7 @@ Event Source | Data_class [Application Load Balancer](#application-load-balancer) | `ALBEvent` [AppSync Authorizer](#appsync-authorizer) | `AppSyncAuthorizerEvent` [AppSync Resolver](#appsync-resolver) | `AppSyncResolverEvent` +[CloudWatch Dashboard Custom Widget](#cloudwatch-dashboard-custom-widget) | `CloudWatchDashboardCustomWidgetEvent` [CloudWatch Logs](#cloudwatch-logs) | `CloudWatchLogsEvent` [CodePipeline Job Event](#codepipeline-job) | `CodePipelineJobEvent` [Cognito User Pool](#cognito-user-pool) | Multiple available under `cognito_user_pool_event` @@ -441,6 +442,40 @@ In this example, we also use the new Logger `correlation_id` and built-in `corre } ``` +### CloudWatch Dashboard Custom Widget + +=== "app.py" + + ```python + from aws_lambda_powertools.utilities.data_classes import event_source, CloudWatchDashboardCustomWidgetEvent + + const DOCS = ` + ## Echo + A simple echo script. Anything passed in \`\`\`echo\`\`\` parameter is returned as the content of custom widget. + + ### Widget parameters + Param | Description + ---|--- + **echo** | The content to echo back + + ### Example parameters + \`\`\` yaml + echo:

Hello world

+ \`\`\` + ` + + @event_source(data_class=CloudWatchDashboardCustomWidgetEvent) + def lambda_handler(event: CloudWatchDashboardCustomWidgetEvent, context): + + if event.describe: + return DOCS + + # You can directly return HTML or JSON content + # Alternatively, you can return markdown that will be rendered by CloudWatch + echo = event.widget_context.params["echo"] + return { "markdown": f"# {echo}" } + ``` + ### CloudWatch Logs CloudWatch Logs events by default are compressed and base64 encoded. You can use the helper function provided to decode, diff --git a/tests/events/cloudWatchDashboardEvent.json b/tests/events/cloudWatchDashboardEvent.json new file mode 100644 index 00000000000..fd2d3be62d6 --- /dev/null +++ b/tests/events/cloudWatchDashboardEvent.json @@ -0,0 +1,38 @@ +{ + "original": "param-to-widget", + "widgetContext": { + "dashboardName": "Name-of-current-dashboard", + "widgetId": "widget-16", + "domain": "https://us-east-1.console.aws.amazon.com", + "accountId": "123456789123", + "locale": "en", + "timezone": { + "label": "UTC", + "offsetISO": "+00:00", + "offsetInMinutes": 0 + }, + "period": 300, + "isAutoPeriod": true, + "timeRange": { + "mode": "relative", + "start": 1627236199729, + "end": 1627322599729, + "relativeStart": 86400012, + "zoom": { + "start": 1627276030434, + "end": 1627282956521 + } + }, + "theme": "light", + "linkCharts": true, + "title": "Tweets for Amazon website problem", + "forms": { + "all": {} + }, + "params": { + "original": "param-to-widget" + }, + "width": 588, + "height": 369 + } +} diff --git a/tests/functional/test_data_classes.py b/tests/functional/test_data_classes.py index 8a87075d16c..5c7423add64 100644 --- a/tests/functional/test_data_classes.py +++ b/tests/functional/test_data_classes.py @@ -13,6 +13,7 @@ APIGatewayProxyEvent, APIGatewayProxyEventV2, AppSyncResolverEvent, + CloudWatchDashboardCustomWidgetEvent, CloudWatchLogsEvent, CodePipelineJobEvent, EventBridgeEvent, @@ -99,6 +100,44 @@ def message(self) -> str: assert DataClassSample(data1).raw_event is data1 +def test_cloud_watch_dashboard_event(): + event = CloudWatchDashboardCustomWidgetEvent(load_event("cloudWatchDashboardEvent.json")) + assert event.describe is False + assert event.widget_context.account_id == "123456789123" + assert event.widget_context.domain == "https://us-east-1.console.aws.amazon.com" + assert event.widget_context.dashboard_name == "Name-of-current-dashboard" + assert event.widget_context.widget_id == "widget-16" + assert event.widget_context.locale == "en" + assert event.widget_context.timezone.label == "UTC" + assert event.widget_context.timezone.offset_iso == "+00:00" + assert event.widget_context.timezone.offset_in_minutes == 0 + assert event.widget_context.period == 300 + assert event.widget_context.is_auto_period is True + assert event.widget_context.time_range.mode == "relative" + assert event.widget_context.time_range.start == 1627236199729 + assert event.widget_context.time_range.end == 1627322599729 + assert event.widget_context.time_range.relative_start == 86400012 + assert event.widget_context.time_range.zoom_start == 1627276030434 + assert event.widget_context.time_range.zoom_end == 1627282956521 + assert event.widget_context.theme == "light" + assert event.widget_context.link_charts is True + assert event.widget_context.title == "Tweets for Amazon website problem" + assert event.widget_context.forms == {} + assert event.widget_context.params == {"original": "param-to-widget"} + assert event.widget_context.width == 588 + assert event.widget_context.height == 369 + assert event.widget_context.params["original"] == "param-to-widget" + assert event["original"] == "param-to-widget" + assert event.raw_event["original"] == "param-to-widget" + + +def test_cloud_watch_dashboard_describe_event(): + event = CloudWatchDashboardCustomWidgetEvent({"describe": True}) + assert event.describe is True + assert event.widget_context is None + assert event.raw_event == {"describe": True} + + def test_cloud_watch_trigger_event(): event = CloudWatchLogsEvent(load_event("cloudWatchLogEvent.json")) From ee4b46a5b3fc2fc42d6c06cf861f924fca6fd4cc Mon Sep 17 00:00:00 2001 From: Release bot Date: Mon, 29 Aug 2022 14:46:06 +0000 Subject: [PATCH 21/49] update changelog with latest changes --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1c5e30a52dc..335e1cc733c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ ## Bug Fixes +* **ci:** ensure PR_AUTHOR is present for large_pr_split workflow * **ci:** gracefully and successful exit changelog upon no changes * **ci:** event resolution for on_label_added workflow @@ -16,16 +17,19 @@ ## Features * **ci:** add actionlint in pre-commit hook +* **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) ## Maintenance +* **ci:** add missing description fields +* **ci:** enable ci checks for v2 * **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) * **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) * **ci:** remove dangling debug step * **ci:** fix invalid dependency leftover -* **ci:** add missing description fields * **ci:** remove unused and undeclared OS matrix env * **ci:** limit E2E workflow run for source code change +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) * **maintainers:** update release workflow link From fc791a18234fbfd61abb38bcf2cc4c2f917e71b5 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:00:23 +0200 Subject: [PATCH 22/49] fix(ci): on_label permissioning model & workflow execution --- .github/scripts/comment_on_large_pr.js | 52 ++++++++++++++++++++++---- .github/workflows/on_label_added.yml | 21 ++++++++--- 2 files changed, 61 insertions(+), 12 deletions(-) diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js index 02fb3f5bd13..4693f6463b0 100644 --- a/.github/scripts/comment_on_large_pr.js +++ b/.github/scripts/comment_on_large_pr.js @@ -1,14 +1,16 @@ const { PR_NUMBER, + PR_ACTION, PR_AUTHOR, IGNORE_AUTHORS, } = require("./constants") -module.exports = async ({github, context, core}) => { - if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { - return core.notice("Author in IGNORE_AUTHORS list; skipping...") - } +const notifyAuthor = async ({ + gh_client, + owner, + repository, +}) => { core.info(`Commenting on PR ${PR_NUMBER}`) let msg = ` @@ -17,10 +19,46 @@ module.exports = async ({github, context, core}) => { Please consider breaking into smaller PRs to avoid significant review delays. Ignore if this PR has naturally grown to this size after reviews. `; - await github.rest.issues.createComment({ + try { + await gh_client.rest.issues.createComment({ + owner: owner, + repo: repository, + body: msg, + issue_number: PR_NUMBER, + }); + } catch (error) { + core.setFailed("Failed to notify PR author to split large PR"); + console.error(err); + } +} + +module.exports = async ({github, context, core}) => { + if (IGNORE_AUTHORS.includes(PR_AUTHOR)) { + return core.notice("Author in IGNORE_AUTHORS list; skipping...") + } + + if (PR_ACTION != "labeled") { + return core.notice("Only run on PRs labeling actions; skipping") + } + + + /** @type {string[]} */ + const labels = await github.rest.issues.listLabelsOnIssue({ owner: context.repo.owner, repo: context.repo.repo, - body: msg, issue_number: PR_NUMBER, - }); + }) + + // Schema: https://docs.github.com/en/rest/issues/labels#list-labels-for-an-issue + for (const label of labels) { + core.info(`Label: ${label}`) + if (label.name == "size/XXL") { + await notifyAuthor({ + gh_client: github, + owner: context.repo.owner, + repository: context.repo.repo, + }) + break; + } + } } diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml index 4e638cf6eea..46be6c144b5 100644 --- a/.github/workflows/on_label_added.yml +++ b/.github/workflows/on_label_added.yml @@ -1,13 +1,23 @@ name: On Label added on: - pull_request: + workflow_run: + workflows: ["Record PR details"] types: - - labeled + - completed jobs: + get_pr_details: + if: ${{ github.event.workflow_run.conclusion == 'success' }} + uses: ./.github/workflows/reusable_export_pr_details.yml + with: + record_pr_workflow_id: ${{ github.event.workflow_run.id }} + workflow_origin: ${{ github.event.repository.full_name }} + secrets: + token: ${{ secrets.GITHUB_TOKEN }} + split-large-pr: - if: ${{ github.event.label.name == 'size/XXL' }} + needs: get_pr_details runs-on: ubuntu-latest permissions: issues: write @@ -17,8 +27,9 @@ jobs: - name: "Suggest split large Pull Request" uses: actions/github-script@v6 env: - PR_NUMBER: ${{ github.event.pull_request.number }} - PR_AUTHOR: ${{ github.event.pull_request.user.login }} + PR_NUMBER: ${{ needs.get_pr_details.outputs.prNumber }} + PR_ACTION: ${{ needs.get_pr_details.outputs.prAction }} + PR_AUTHOR: ${{ needs.get_pr_details.outputs.prAuthor }} with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | From cd262b95108f71bf4f9c832790026bcf805e0712 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:03:18 +0200 Subject: [PATCH 23/49] chore(ci): record pr details upon labeling --- .github/workflows/record_pr.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/record_pr.yml b/.github/workflows/record_pr.yml index 44f445a70ac..b1638ad8865 100644 --- a/.github/workflows/record_pr.yml +++ b/.github/workflows/record_pr.yml @@ -2,7 +2,7 @@ name: Record PR details on: pull_request: - types: [opened, edited, closed] + types: [opened, edited, closed, labeled] jobs: record_pr: From bca9695a80ade0ab5376443c8c10bcfe657e41ef Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:22:26 +0200 Subject: [PATCH 24/49] chore(ci): destructure assignment on comment_large_pr --- .github/scripts/comment_on_large_pr.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js index 4693f6463b0..3962c330523 100644 --- a/.github/scripts/comment_on_large_pr.js +++ b/.github/scripts/comment_on_large_pr.js @@ -43,7 +43,7 @@ module.exports = async ({github, context, core}) => { /** @type {string[]} */ - const labels = await github.rest.issues.listLabelsOnIssue({ + const { data: labels } = await github.rest.issues.listLabelsOnIssue({ owner: context.repo.owner, repo: context.repo.repo, issue_number: PR_NUMBER, From 5a121251dd13ed1362883c6d77f7378af39d76f4 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:27:15 +0200 Subject: [PATCH 25/49] fix(ci): pass core fns to large pr workflow script --- .github/scripts/comment_on_large_pr.js | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js index 3962c330523..145a691854f 100644 --- a/.github/scripts/comment_on_large_pr.js +++ b/.github/scripts/comment_on_large_pr.js @@ -6,7 +6,16 @@ const { } = require("./constants") +/** + * Notify PR author to split XXL PR in smaller chunks + * + * @param {object} core - core functions instance from @actions/core + * @param {object} gh_client - Pre-authenticated REST client (Octokit) + * @param {string} owner - GitHub Organization + * @param {string} repository - GitHub repository + */ const notifyAuthor = async ({ + core, gh_client, owner, repository, @@ -54,6 +63,7 @@ module.exports = async ({github, context, core}) => { core.info(`Label: ${label}`) if (label.name == "size/XXL") { await notifyAuthor({ + core: core, gh_client: github, owner: context.repo.owner, repository: context.repo.repo, From 74ec89370c2b79f251a0dbf89ff4052c9cee0d00 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:34:01 +0200 Subject: [PATCH 26/49] chore(ci): add note for state persistence on comment_large_pr --- .github/scripts/comment_on_large_pr.js | 3 +-- .github/workflows/on_label_added.yml | 1 + 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js index 145a691854f..087689c864e 100644 --- a/.github/scripts/comment_on_large_pr.js +++ b/.github/scripts/comment_on_large_pr.js @@ -22,8 +22,7 @@ const notifyAuthor = async ({ }) => { core.info(`Commenting on PR ${PR_NUMBER}`) - let msg = ` - ### ⚠️Large PR detected⚠️. + let msg = `### ⚠️Large PR detected⚠️ Please consider breaking into smaller PRs to avoid significant review delays. Ignore if this PR has naturally grown to this size after reviews. `; diff --git a/.github/workflows/on_label_added.yml b/.github/workflows/on_label_added.yml index 46be6c144b5..e9180d8010a 100644 --- a/.github/workflows/on_label_added.yml +++ b/.github/workflows/on_label_added.yml @@ -24,6 +24,7 @@ jobs: pull-requests: write steps: - uses: actions/checkout@v3 + # Maintenance: Persist state per PR as an artifact to avoid spam on label add - name: "Suggest split large Pull Request" uses: actions/github-script@v6 env: From 973dc462124180b5f009e2a75cc7f2b8d6ad44a0 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:40:16 +0200 Subject: [PATCH 27/49] chore(ci): format comment on comment_large_pr script --- .github/scripts/comment_on_large_pr.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/scripts/comment_on_large_pr.js b/.github/scripts/comment_on_large_pr.js index 087689c864e..c17199faf76 100644 --- a/.github/scripts/comment_on_large_pr.js +++ b/.github/scripts/comment_on_large_pr.js @@ -24,7 +24,7 @@ const notifyAuthor = async ({ let msg = `### ⚠️Large PR detected⚠️ - Please consider breaking into smaller PRs to avoid significant review delays. Ignore if this PR has naturally grown to this size after reviews. +Please consider breaking into smaller PRs to avoid significant review delays. Ignore if this PR has naturally grown to this size after reviews. `; try { From acd7af1dbd4da15b70fb6f154d9f9f0ced8782a7 Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Mon, 29 Aug 2022 17:45:17 +0200 Subject: [PATCH 28/49] chore(ci): create reusable docs publishing workflow (#1482) --- .github/workflows/on_push_docs.yml | 35 +++++++++ .github/workflows/on_release_notes.yml | 54 +++----------- .github/workflows/python_docs.yml | 71 ------------------ .github/workflows/rebuild_latest_docs.yml | 56 +++----------- .github/workflows/reusable_publish_docs.yml | 83 +++++++++++++++++++++ 5 files changed, 136 insertions(+), 163 deletions(-) create mode 100644 .github/workflows/on_push_docs.yml delete mode 100644 .github/workflows/python_docs.yml create mode 100644 .github/workflows/reusable_publish_docs.yml diff --git a/.github/workflows/on_push_docs.yml b/.github/workflows/on_push_docs.yml new file mode 100644 index 00000000000..d46879ca6b1 --- /dev/null +++ b/.github/workflows/on_push_docs.yml @@ -0,0 +1,35 @@ +name: Docs + +on: + push: + branches: + - develop + paths: + - "docs/**" + - "mkdocs.yml" + - "examples/**" + +jobs: + changelog: + permissions: + contents: write + uses: ./.github/workflows/reusable_publish_changelog.yml + + release-docs: + needs: changelog + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: develop + alias: stage +# Maintenance: Only necessary in repo migration +# - name: Create redirect from old docs +# run: | +# git checkout gh-pages +# test -f 404.html && echo "Redirect already set" && exit 0 +# git checkout develop -- 404.html +# git add 404.html +# git commit -m "chore: set docs redirect" --no-verify +# git push origin gh-pages -f diff --git a/.github/workflows/on_release_notes.yml b/.github/workflows/on_release_notes.yml index 563d1fefc79..2b431defff0 100644 --- a/.github/workflows/on_release_notes.yml +++ b/.github/workflows/on_release_notes.yml @@ -69,8 +69,8 @@ jobs: id: release_version # transform tag format `v> $GITHUB_ENV + RELEASE_VERSION="${RELEASE_TAG_VERSION:1}" + echo "RELEASE_VERSION=${RELEASE_VERSION}" >> "$GITHUB_ENV" echo "::set-output name=RELEASE_VERSION::${RELEASE_VERSION}" - name: Install dependencies run: make dev @@ -78,7 +78,7 @@ jobs: if: ${{ !inputs.skip_code_quality }} run: make pr - name: Bump package version - run: poetry version ${RELEASE_VERSION} + run: poetry version "${RELEASE_VERSION}" - name: Build python package and wheel if: ${{ !inputs.skip_pypi }} run: poetry build @@ -101,7 +101,7 @@ jobs: role-to-assume: ${{ secrets.AWS_SAR_ROLE_ARN }} - name: publish lambda layer in SAR by triggering the internal codepipeline run: | - aws ssm put-parameter --name "powertools-python-release-version" --value $RELEASE_VERSION --overwrite + aws ssm put-parameter --name "powertools-python-release-version" --value "$RELEASE_VERSION" --overwrite aws codepipeline start-pipeline-execution --name ${{ secrets.AWS_SAR_PIPELINE_NAME }} changelog: @@ -115,47 +115,11 @@ jobs: permissions: contents: write pages: write - runs-on: ubuntu-latest - env: - RELEASE_VERSION: ${{ needs.release.outputs.RELEASE_VERSION }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Git client setup and refresh tip - run: | - git config user.name "Release bot" - git config user.email "aws-devax-open-source@amazon.com" - git config pull.rebase true - git config remote.origin.url >&- || git remote add origin https://github.com/$origin # Git Detached mode (release notes) doesn't have origin - git pull origin $BRANCH - - name: Install poetry - run: pipx install poetry - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "poetry" - - name: Install dependencies - run: make dev - - name: Build docs website and API reference - run: | - make release-docs VERSION=${RELEASE_VERSION} ALIAS="latest" - poetry run mike set-default --push latest - - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: ${{ env.RELEASE_VERSION }}/api - - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: latest/api + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: ${{ needs.release.outputs.RELEASE_VERSION }} + alias: latest + detached_mode: true post_release: needs: release diff --git a/.github/workflows/python_docs.yml b/.github/workflows/python_docs.yml deleted file mode 100644 index 75f34bdfcbe..00000000000 --- a/.github/workflows/python_docs.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Docs - -# Maintenance: Create a reusable workflow to be more easily reused across release, push, and doc hot fixes -# this should include inputs on whether to release API docs, what version to release, and whether to rebuild /latest - -on: - push: - branches: - - develop - paths: - - "docs/**" - - "mkdocs.yml" - - "examples/**" - workflow_dispatch: - -jobs: - changelog: - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - - docs: - needs: changelog - # Force Github action to run only a single job at a time (based on the group name) - # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` - concurrency: - group: on-docs-build - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Install poetry - run: pipx install poetry - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "poetry" - # Maintenance: temporarily until we drop Python 3.6 and make cfn-lint a dev dependency - - name: Setup Cloud Formation Linter with Latest Version - uses: scottbrenner/cfn-lint-action@ee9ee62016ef62c5fd366e6be920df4b310ed353 # v2.2.4 - - name: Install dependencies - run: make dev - - name: Lint documentation - run: | - make lint-docs - cfn-lint examples/**/*.yaml - - name: Setup doc deploy - run: | - git config --global user.name Docs deploy - git config --global user.email docs@dummy.bot.com - git config pull.rebase true - git pull --rebase - - name: Build docs website and API reference - run: make release-docs VERSION="develop" ALIAS="stage" - - name: Deploy all docs - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: develop/api - - name: Create redirect from old docs - run: | - git checkout gh-pages - test -f 404.html && echo "Redirect already set" && exit 0 - git checkout develop -- 404.html - git add 404.html - git commit -m "chore: set docs redirect" --no-verify - git push origin gh-pages -f diff --git a/.github/workflows/rebuild_latest_docs.yml b/.github/workflows/rebuild_latest_docs.yml index bd9cffe8feb..eb995d95a12 100644 --- a/.github/workflows/rebuild_latest_docs.yml +++ b/.github/workflows/rebuild_latest_docs.yml @@ -5,14 +5,13 @@ name: Rebuild latest docs # # 1. Trigger "Rebuild latest docs" workflow manually: https://docs.github.com/en/actions/managing-workflow-runs/manually-running-a-workflow # 2. Use the latest version released under Releases e.g. v1.22.0 -# 3. Set `Build and publish docs only` field to `true` on: workflow_dispatch: inputs: latest_published_version: description: "Latest PyPi published version to rebuild latest docs for, e.g. v1.26.7" - default: "v1.26.7" + default: "v1.28.0" required: true jobs: @@ -21,49 +20,12 @@ jobs: contents: write uses: ./.github/workflows/reusable_publish_changelog.yml - release: + release-docs: needs: changelog - # Force Github action to run only a single job at a time (based on the group name) - # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` - concurrency: - group: on-docs-rebuild - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - name: Install poetry - run: pipx install poetry - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: "poetry" - - name: Set release notes tag - run: | - RELEASE_TAG_VERSION=${{ github.event.inputs.latest_published_version }} - echo "RELEASE_TAG_VERSION=${RELEASE_TAG_VERSION:1}" >> $GITHUB_ENV - - name: Install dependencies - run: make dev - - name: Setup doc deploy - run: | - git config --global user.name Docs deploy - git config --global user.email aws-devax-open-source@amazon.com - - name: Build docs website and API reference - run: | - make release-docs VERSION=${RELEASE_TAG_VERSION} ALIAS="latest" - poetry run mike set-default --push latest - - name: Release API docs to release version - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: ${{ env.RELEASE_TAG_VERSION }}/api - - name: Release API docs to latest - uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./api - keep_files: true - destination_dir: latest/api + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: ${{ inputs.latest_published_version }} + alias: latest diff --git a/.github/workflows/reusable_publish_docs.yml b/.github/workflows/reusable_publish_docs.yml new file mode 100644 index 00000000000..a157783abf4 --- /dev/null +++ b/.github/workflows/reusable_publish_docs.yml @@ -0,0 +1,83 @@ +name: Reusable publish documentation + +env: + BRANCH: develop + ORIGIN: awslabs/aws-lambda-powertools-python + +on: + workflow_call: + inputs: + version: + description: "Version to build and publish docs (v1.28.0, develop)" + required: true + type: string + alias: + description: "Alias to associate version (latest, stage)" + required: true + type: string + detached_mode: + description: "Whether it's running in git detached mode to ensure git is sync'd" + required: false + default: false + type: boolean + +permissions: + contents: write + pages: write + +jobs: + publish_docs: + # Force Github action to run only a single job at a time (based on the group name) + # This is to prevent "race-condition" in publishing a new version of doc to `gh-pages` + concurrency: + group: on-docs-rebuild + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install poetry + run: pipx install poetry + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: "poetry" + - name: Install dependencies + run: make dev + - name: Git client setup + run: | + git config --global user.name Docs deploy + git config --global user.email aws-devax-open-source@amazon.com + - name: Git refresh tip (detached mode) + # Git Detached mode (release notes) doesn't have origin + if: ${{ inputs.detached_mode }} + run: | + git config pull.rebase true + git config remote.origin.url >&- || git remote add origin https://github.com/"$ORIGIN" + git pull origin "$BRANCH" + - name: Build docs website and API reference + env: + VERSION: ${{ inputs.version }} + ALIAS: ${{ inputs.alias }} + run: | + make release-docs VERSION="$VERSION" ALIAS="$ALIAS" + poetry run mike set-default --push latest + # Maintenance: Migrate to new gh-pages action + - name: Release API docs + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 + env: + VERSION: ${{ inputs.version }} + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: ${{ env.VERSION }}/api + - name: Release API docs to latest + if: ${{ inputs.alias == 'latest' }} + uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./api + keep_files: true + destination_dir: latest/api From 30708d52770c63fd37a0c120353a91bcad13519e Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 17:50:57 +0200 Subject: [PATCH 29/49] chore(ci): create docs workflow for v2 --- .github/workflows/v2_on_push_docs.yml | 35 +++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 .github/workflows/v2_on_push_docs.yml diff --git a/.github/workflows/v2_on_push_docs.yml b/.github/workflows/v2_on_push_docs.yml new file mode 100644 index 00000000000..997a00103dd --- /dev/null +++ b/.github/workflows/v2_on_push_docs.yml @@ -0,0 +1,35 @@ +name: Docs v2 + +on: + push: + branches: + - v2 + paths: + - "docs/**" + - "mkdocs.yml" + - "examples/**" + +jobs: + changelog: + permissions: + contents: write + uses: ./.github/workflows/reusable_publish_changelog.yml + + release-docs: + needs: changelog + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: v2 + alias: alpha +# Maintenance: Only necessary in repo migration +# - name: Create redirect from old docs +# run: | +# git checkout gh-pages +# test -f 404.html && echo "Redirect already set" && exit 0 +# git checkout develop -- 404.html +# git add 404.html +# git commit -m "chore: set docs redirect" --no-verify +# git push origin gh-pages -f From d09f6d74770b69b141b90b99cdec7ea37ee5c715 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 18:10:08 +0200 Subject: [PATCH 30/49] chore(ci): create adhoc docs workflow for v2 --- .github/workflows/v2_rebuild_latest_docs.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .github/workflows/v2_rebuild_latest_docs.yml diff --git a/.github/workflows/v2_rebuild_latest_docs.yml b/.github/workflows/v2_rebuild_latest_docs.yml new file mode 100644 index 00000000000..f655bd30a9d --- /dev/null +++ b/.github/workflows/v2_rebuild_latest_docs.yml @@ -0,0 +1,20 @@ +name: V2 Rebuild latest docs + +on: + workflow_dispatch: + +jobs: + changelog: + permissions: + contents: write + uses: ./.github/workflows/reusable_publish_changelog.yml + + release-docs: + needs: changelog + permissions: + contents: write + pages: write + uses: ./.github/workflows/reusable_publish_docs.yml + with: + version: v2 + alias: alpha From fe78ae8e0760e614652c1fbcdd09ad15597896f3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 18:10:51 +0200 Subject: [PATCH 31/49] chore(ci): create adhoc docs workflow for v2 --- .github/workflows/v2_rebuild_latest_docs.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/v2_rebuild_latest_docs.yml b/.github/workflows/v2_rebuild_latest_docs.yml index f655bd30a9d..6d833cc3fef 100644 --- a/.github/workflows/v2_rebuild_latest_docs.yml +++ b/.github/workflows/v2_rebuild_latest_docs.yml @@ -4,13 +4,7 @@ on: workflow_dispatch: jobs: - changelog: - permissions: - contents: write - uses: ./.github/workflows/reusable_publish_changelog.yml - release-docs: - needs: changelog permissions: contents: write pages: write From a832be816ceb16296bd1f03061d8a3a47f426414 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Aug 2022 10:51:04 +0200 Subject: [PATCH 32/49] chore(deps-dev): bump mkdocs-material from 8.4.1 to 8.4.2 (#1483) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3a6220e1efe..5ad5a446e04 100644 --- a/poetry.lock +++ b/poetry.lock @@ -690,7 +690,7 @@ mkdocs = ">=0.17" [[package]] name = "mkdocs-material" -version = "8.4.1" +version = "8.4.2" description = "Documentation that simply works" category = "dev" optional = false @@ -1368,7 +1368,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.6.2" -content-hash = "4e25e40a2ddb51f50559dab19d3f80a6626173bc8f8d7d3da7ea53ba9d6255b0" +content-hash = "d9e0120a5a8dbeae8d895aa09dd994cd02e51c9dfcb886f026ede88efe741f00" [metadata.files] atomicwrites = [ @@ -1698,8 +1698,8 @@ mkdocs-git-revision-date-plugin = [ {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, ] mkdocs-material = [ - {file = "mkdocs-material-8.4.1.tar.gz", hash = "sha256:92c70f94b2e1f8a05d9e05eec1c7af9dffc516802d69222329db89503c97b4f3"}, - {file = "mkdocs_material-8.4.1-py2.py3-none-any.whl", hash = "sha256:319a6254819ce9d864ff79de48c43842fccfdebb43e4e6820eef75216f8cfb0a"}, + {file = "mkdocs-material-8.4.2.tar.gz", hash = "sha256:704c64c3fff126a3923c2961d95f26b19be621342a6a4e49ed039f0bb7a5c540"}, + {file = "mkdocs_material-8.4.2-py2.py3-none-any.whl", hash = "sha256:166287bb0e4197804906bf0389a852d5ced43182c30127ac8b48a4e497ecd7e5"}, ] mkdocs-material-extensions = [ {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, diff --git a/pyproject.toml b/pyproject.toml index 232da7ef9da..bd06c078350 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ mypy-boto3-xray = { version = "^1.24.0", python = ">=3.7" } types-requests = "^2.28.8" typing-extensions = { version = "^4.3.0", python = ">=3.7" } python-snappy = "^0.6.1" -mkdocs-material = { version = "^8.3.9", python = ">=3.7" } +mkdocs-material = { version = "^8.4.2", python = ">=3.7" } filelock = { version = "^3.8.0", python = ">=3.7" } [tool.poetry.extras] From 2cf38cc66e2a5a58775219bfa11ed3a389ff5d1d Mon Sep 17 00:00:00 2001 From: Heitor Lessa Date: Tue, 30 Aug 2022 15:42:37 +0200 Subject: [PATCH 33/49] chore(ci): sync package version with pypi pyproject.toml isn't being pushed back to trunk as part of release. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bd06c078350..b25d5f19d4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_lambda_powertools" -version = "1.26.6" +version = "1.28.0" description = "A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, batching, idempotency, feature flags, and more." authors = ["Amazon Web Services"] include = ["aws_lambda_powertools/py.typed", "THIRD-PARTY-LICENSES"] From c9071400ac073ebc8d7f53c5184fed24df594b5e Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Mon, 29 Aug 2022 17:56:24 +0200 Subject: [PATCH 34/49] feat(event_handler): improved support for headers and cookies in v2 (#1455) Co-authored-by: Heitor Lessa --- .gitignore | 3 + aws_lambda_powertools/__init__.py | 6 +- .../event_handler/api_gateway.py | 27 +- .../shared/headers_serializer.py | 111 ++ .../utilities/data_classes/alb_event.py | 13 + .../data_classes/api_gateway_proxy_event.py | 11 + .../utilities/data_classes/common.py | 5 + docs/core/event_handler/api_gateway.md | 11 +- docs/upgrade.md | 57 + .../src/binary_responses_output.json | 4 +- .../src/compressing_responses_output.json | 6 +- .../src/fine_grained_responses.py | 3 +- .../src/fine_grained_responses_output.json | 7 +- ...ting_started_rest_api_resolver_output.json | 4 +- .../src/setting_cors_output.json | 8 +- mkdocs.yml | 1 + poetry.lock | 971 ++++++++---------- pyproject.toml | 36 +- tests/e2e/event_handler/__init__.py | 0 tests/e2e/event_handler/conftest.py | 28 + .../e2e/event_handler/handlers/alb_handler.py | 18 + .../handlers/api_gateway_http_handler.py | 18 + .../handlers/api_gateway_rest_handler.py | 18 + .../handlers/lambda_function_url_handler.py | 18 + tests/e2e/event_handler/infrastructure.py | 81 ++ .../event_handler/test_header_serializer.py | 141 +++ tests/e2e/utils/data_fetcher/__init__.py | 2 +- tests/e2e/utils/data_fetcher/common.py | 12 + tests/e2e/utils/infrastructure.py | 19 +- tests/events/albMultiValueHeadersEvent.json | 35 + .../event_handler/test_api_gateway.py | 170 +-- .../event_handler/test_lambda_function_url.py | 21 + tests/functional/test_headers_serializer.py | 147 +++ 33 files changed, 1331 insertions(+), 681 deletions(-) create mode 100644 aws_lambda_powertools/shared/headers_serializer.py create mode 100644 docs/upgrade.md create mode 100644 tests/e2e/event_handler/__init__.py create mode 100644 tests/e2e/event_handler/conftest.py create mode 100644 tests/e2e/event_handler/handlers/alb_handler.py create mode 100644 tests/e2e/event_handler/handlers/api_gateway_http_handler.py create mode 100644 tests/e2e/event_handler/handlers/api_gateway_rest_handler.py create mode 100644 tests/e2e/event_handler/handlers/lambda_function_url_handler.py create mode 100644 tests/e2e/event_handler/infrastructure.py create mode 100644 tests/e2e/event_handler/test_header_serializer.py create mode 100644 tests/events/albMultiValueHeadersEvent.json create mode 100644 tests/functional/test_headers_serializer.py diff --git a/.gitignore b/.gitignore index b776e1999c2..cc01240a405 100644 --- a/.gitignore +++ b/.gitignore @@ -305,5 +305,8 @@ site/ !404.html !docs/overrides/*.html +# CDK +.cdk + !.github/workflows/lib examples/**/sam/.aws-sam diff --git a/aws_lambda_powertools/__init__.py b/aws_lambda_powertools/__init__.py index 65b5eb86730..750ae92c4d1 100644 --- a/aws_lambda_powertools/__init__.py +++ b/aws_lambda_powertools/__init__.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -"""Top-level package for Lambda Python Powertools.""" - +from pathlib import Path +"""Top-level package for Lambda Python Powertools.""" from .logging import Logger # noqa: F401 from .metrics import Metrics, single_metric # noqa: F401 from .package_logger import set_package_logger_handler @@ -10,4 +10,6 @@ __author__ = """Amazon Web Services""" +PACKAGE_PATH = Path(__file__).parent + set_package_logger_handler() diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 903fc7e828f..11adcfc2ed6 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -32,7 +32,7 @@ _SAFE_URI = "-._~()'!*:@,;" # https://www.ietf.org/rfc/rfc3986.txt # API GW/ALB decode non-safe URI chars; we must support them too _UNSAFE_URI = "%<> \[\]{}|^" # noqa: W605 -_NAMED_GROUP_BOUNDARY_PATTERN = fr"(?P\1[{_SAFE_URI}{_UNSAFE_URI}\\w]+)" +_NAMED_GROUP_BOUNDARY_PATTERN = rf"(?P\1[{_SAFE_URI}{_UNSAFE_URI}\\w]+)" class ProxyEventType(Enum): @@ -124,10 +124,11 @@ def __init__( def to_dict(self) -> Dict[str, str]: """Builds the configured Access-Control http headers""" - headers = { + headers: Dict[str, str] = { "Access-Control-Allow-Origin": self.allow_origin, "Access-Control-Allow-Headers": ",".join(sorted(self.allow_headers)), } + if self.expose_headers: headers["Access-Control-Expose-Headers"] = ",".join(self.expose_headers) if self.max_age is not None: @@ -145,7 +146,8 @@ def __init__( status_code: int, content_type: Optional[str], body: Union[str, bytes, None], - headers: Optional[Dict] = None, + headers: Optional[Dict[str, Union[str, List[str]]]] = None, + cookies: Optional[List[str]] = None, ): """ @@ -158,13 +160,16 @@ def __init__( provided http headers body: Union[str, bytes, None] Optionally set the response body. Note: bytes body will be automatically base64 encoded - headers: dict - Optionally set specific http headers. Setting "Content-Type" hear would override the `content_type` value. + headers: dict[str, Union[str, List[str]]] + Optionally set specific http headers. Setting "Content-Type" here would override the `content_type` value. + cookies: list[str] + Optionally set cookies. """ self.status_code = status_code self.body = body self.base64_encoded = False - self.headers: Dict = headers or {} + self.headers: Dict[str, Union[str, List[str]]] = headers if headers else {} + self.cookies = cookies or [] if content_type: self.headers.setdefault("Content-Type", content_type) @@ -196,11 +201,12 @@ def _add_cors(self, cors: CORSConfig): def _add_cache_control(self, cache_control: str): """Set the specified cache control headers for 200 http responses. For non-200 `no-cache` is used.""" - self.response.headers["Cache-Control"] = cache_control if self.response.status_code == 200 else "no-cache" + cache_control = cache_control if self.response.status_code == 200 else "no-cache" + self.response.headers["Cache-Control"] = cache_control def _compress(self): """Compress the response body, but only if `Accept-Encoding` headers includes gzip.""" - self.response.headers["Content-Encoding"] = "gzip" + self.response.headers["Content-Encoding"].append("gzip") if isinstance(self.response.body, str): logger.debug("Converting string response to bytes before compressing it") self.response.body = bytes(self.response.body, "utf-8") @@ -226,11 +232,12 @@ def build(self, event: BaseProxyEvent, cors: Optional[CORSConfig] = None) -> Dic logger.debug("Encoding bytes response with base64") self.response.base64_encoded = True self.response.body = base64.b64encode(self.response.body).decode() + return { "statusCode": self.response.status_code, - "headers": self.response.headers, "body": self.response.body, "isBase64Encoded": self.response.base64_encoded, + **event.header_serializer().serialize(headers=self.response.headers, cookies=self.response.cookies), } @@ -596,7 +603,7 @@ def _path_starts_with(path: str, prefix: str): def _not_found(self, method: str) -> ResponseBuilder: """Called when no matching route was found and includes support for the cors preflight response""" - headers = {} + headers: Dict[str, Union[str, List[str]]] = {} if self._cors: logger.debug("CORS is enabled, updating headers.") headers.update(self._cors.to_dict()) diff --git a/aws_lambda_powertools/shared/headers_serializer.py b/aws_lambda_powertools/shared/headers_serializer.py new file mode 100644 index 00000000000..4db7effe81b --- /dev/null +++ b/aws_lambda_powertools/shared/headers_serializer.py @@ -0,0 +1,111 @@ +import warnings +from collections import defaultdict +from typing import Any, Dict, List, Union + + +class BaseHeadersSerializer: + """ + Helper class to correctly serialize headers and cookies for Amazon API Gateway, + ALB and Lambda Function URL response payload. + """ + + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + """ + Serializes headers and cookies according to the request type. + Returns a dict that can be merged with the response payload. + + Parameters + ---------- + headers: Dict[str, List[str]] + A dictionary of headers to set in the response + cookies: List[str] + A list of cookies to set in the response + """ + raise NotImplementedError() + + +class HttpApiHeadersSerializer(BaseHeadersSerializer): + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + """ + When using HTTP APIs or LambdaFunctionURLs, everything is taken care automatically for us. + We can directly assign a list of cookies and a dict of headers to the response payload, and the + runtime will automatically serialize them correctly on the output. + + https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.proxy-format + https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html#http-api-develop-integrations-lambda.response + """ + + # Format 2.0 doesn't have multiValueHeaders or multiValueQueryStringParameters fields. + # Duplicate headers are combined with commas and included in the headers field. + combined_headers: Dict[str, str] = {} + for key, values in headers.items(): + if isinstance(values, str): + combined_headers[key] = values + else: + combined_headers[key] = ", ".join(values) + + return {"headers": combined_headers, "cookies": cookies} + + +class MultiValueHeadersSerializer(BaseHeadersSerializer): + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + """ + When using REST APIs, headers can be encoded using the `multiValueHeaders` key on the response. + This is also the case when using an ALB integration with the `multiValueHeaders` option enabled. + The solution covers headers with just one key or multiple keys. + + https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-lambda-proxy-integrations.html#api-gateway-simple-proxy-for-lambda-output-format + https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#multi-value-headers-response + """ + payload: Dict[str, List[str]] = defaultdict(list) + + for key, values in headers.items(): + if isinstance(values, str): + payload[key].append(values) + else: + for value in values: + payload[key].append(value) + + if cookies: + payload.setdefault("Set-Cookie", []) + for cookie in cookies: + payload["Set-Cookie"].append(cookie) + + return {"multiValueHeaders": payload} + + +class SingleValueHeadersSerializer(BaseHeadersSerializer): + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + """ + The ALB integration has `multiValueHeaders` disabled by default. + If we try to set multiple headers with the same key, or more than one cookie, print a warning. + + https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#respond-to-load-balancer + """ + payload: Dict[str, Dict[str, str]] = {} + payload.setdefault("headers", {}) + + if cookies: + if len(cookies) > 1: + warnings.warn( + "Can't encode more than one cookie in the response. Sending the last cookie only. " + "Did you enable multiValueHeaders on the ALB Target Group?" + ) + + # We can only send one cookie, send the last one + payload["headers"]["Set-Cookie"] = cookies[-1] + + for key, values in headers.items(): + if isinstance(values, str): + payload["headers"][key] = values + else: + if len(values) > 1: + warnings.warn( + f"Can't encode more than one header value for the same key ('{key}') in the response. " + "Did you enable multiValueHeaders on the ALB Target Group?" + ) + + # We can only set one header per key, send the last one + payload["headers"][key] = values[-1] + + return payload diff --git a/aws_lambda_powertools/utilities/data_classes/alb_event.py b/aws_lambda_powertools/utilities/data_classes/alb_event.py index 159779c86a7..1bd49fd05b6 100644 --- a/aws_lambda_powertools/utilities/data_classes/alb_event.py +++ b/aws_lambda_powertools/utilities/data_classes/alb_event.py @@ -1,5 +1,10 @@ from typing import Dict, List, Optional +from aws_lambda_powertools.shared.headers_serializer import ( + BaseHeadersSerializer, + MultiValueHeadersSerializer, + SingleValueHeadersSerializer, +) from aws_lambda_powertools.utilities.data_classes.common import BaseProxyEvent, DictWrapper @@ -30,3 +35,11 @@ def multi_value_query_string_parameters(self) -> Optional[Dict[str, List[str]]]: @property def multi_value_headers(self) -> Optional[Dict[str, List[str]]]: return self.get("multiValueHeaders") + + def header_serializer(self) -> BaseHeadersSerializer: + # When using the ALB integration, the `multiValueHeaders` feature can be disabled (default) or enabled. + # We can determine if the feature is enabled by looking if the event has a `multiValueHeaders` key. + if self.multi_value_headers: + return MultiValueHeadersSerializer() + + return SingleValueHeadersSerializer() diff --git a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py index be374aba398..030d9739fa4 100644 --- a/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py +++ b/aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py @@ -1,5 +1,10 @@ from typing import Any, Dict, List, Optional +from aws_lambda_powertools.shared.headers_serializer import ( + BaseHeadersSerializer, + HttpApiHeadersSerializer, + MultiValueHeadersSerializer, +) from aws_lambda_powertools.utilities.data_classes.common import ( BaseProxyEvent, BaseRequestContext, @@ -106,6 +111,9 @@ def path_parameters(self) -> Optional[Dict[str, str]]: def stage_variables(self) -> Optional[Dict[str, str]]: return self.get("stageVariables") + def header_serializer(self) -> BaseHeadersSerializer: + return MultiValueHeadersSerializer() + class RequestContextV2AuthorizerIam(DictWrapper): @property @@ -250,3 +258,6 @@ def path(self) -> str: def http_method(self) -> str: """The HTTP method used. Valid values include: DELETE, GET, HEAD, OPTIONS, PATCH, POST, and PUT.""" return self.request_context.http.method + + def header_serializer(self): + return HttpApiHeadersSerializer() diff --git a/aws_lambda_powertools/utilities/data_classes/common.py b/aws_lambda_powertools/utilities/data_classes/common.py index 2109ee3dd3e..ffd608f3015 100644 --- a/aws_lambda_powertools/utilities/data_classes/common.py +++ b/aws_lambda_powertools/utilities/data_classes/common.py @@ -2,6 +2,8 @@ import json from typing import Any, Dict, Optional +from aws_lambda_powertools.shared.headers_serializer import BaseHeadersSerializer + class DictWrapper: """Provides a single read only access to a wrapper dict""" @@ -127,6 +129,9 @@ def get_header_value( """ return get_header_value(self.headers, name, default_value, case_sensitive) + def header_serializer(self) -> BaseHeadersSerializer: + raise NotImplementedError() + class RequestContextClientCert(DictWrapper): @property diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index f4f45a051f8..c4cae718289 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -312,11 +312,18 @@ For convenience, these are the default values when using `CORSConfig` to enable ### Fine grained responses -You can use the `Response` class to have full control over the response, for example you might want to add additional headers or set a custom Content-type. +You can use the `Response` class to have full control over the response. For example, you might want to add additional headers, cookies, or set a custom Content-type. + +???+ info + Powertools serializes headers and cookies according to the type of input event. + Some event sources require headers and cookies to be encoded as `multiValueHeaders`. + +???+ warning "Using multiple values for HTTP headers in ALB?" + Make sure you [enable the multi value headers feature](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/lambda-functions.html#multi-value-headers) to serialize response headers correctly. === "fine_grained_responses.py" - ```python hl_lines="7 24-28" + ```python hl_lines="7 24-29" --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` diff --git a/docs/upgrade.md b/docs/upgrade.md new file mode 100644 index 00000000000..91ad54e42d3 --- /dev/null +++ b/docs/upgrade.md @@ -0,0 +1,57 @@ +--- +title: Upgrade guide +description: Guide to update between major Powertools versions +--- + + + +## Migrate to v2 from v1 + +The transition from Powertools for Python v1 to v2 is as painless as possible, as we aimed for minimal breaking changes. +Changes at a glance: + +* The API for **event handler's `Response`** has minor changes to support multi value headers and cookies. + +???+ important + Powertools for Python v2 drops suport for Python 3.6, following the Python 3.6 End-Of-Life (EOL) reached on December 23, 2021. + +### Initial Steps + +Before you start, we suggest making a copy of your current working project or create a new branch with git. + +1. **Upgrade** Python to at least v3.7 + +2. **Ensure** you have the latest `aws-lambda-powertools` + + ```bash + pip install aws-lambda-powertools -U + ``` + +3. **Review** the following sections to confirm whether they affect your code + +## Event Handler Response (headers and cookies) + +The `Response` class of the event handler utility changed slightly: + +1. The `headers` parameter now expects either a value or list of values per header (type `Union[str, Dict[str, List[str]]]`) +2. We introduced a new `cookies` parameter (type `List[str]`) + +???+ note + Code that set headers as `Dict[str, str]` will still work unchanged. + +```python hl_lines="6 12 13" +@app.get("/todos") +def get_todos(): + # Before + return Response( + # ... + headers={"Content-Type": "text/plain"} + ) + + # After + return Response( + # ... + headers={"Content-Type": ["text/plain"]}, + cookies=["CookieName=CookieValue"] + ) +``` diff --git a/examples/event_handler_rest/src/binary_responses_output.json b/examples/event_handler_rest/src/binary_responses_output.json index 0938dee6811..ec59d251732 100644 --- a/examples/event_handler_rest/src/binary_responses_output.json +++ b/examples/event_handler_rest/src/binary_responses_output.json @@ -1,7 +1,7 @@ { "body": "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZyB3aWR0aD0iMjU2cHgiIGhlaWdodD0iMjU2cHgiIHZpZXdCb3g9IjAgMCAyNTYgMjU2IiB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgeG1sbnM6eGxpbms9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkveGxpbmsiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4KICAgIDx0aXRsZT5BV1MgTGFtYmRhPC90aXRsZT4KICAgIDxkZWZzPgogICAgICAgIDxsaW5lYXJHcmFkaWVudCB4MT0iMCUiIHkxPSIxMDAlIiB4Mj0iMTAwJSIgeTI9IjAlIiBpZD0ibGluZWFyR3JhZGllbnQtMSI+CiAgICAgICAgICAgIDxzdG9wIHN0b3AtY29sb3I9IiNDODUxMUIiIG9mZnNldD0iMCUiPjwvc3RvcD4KICAgICAgICAgICAgPHN0b3Agc3RvcC1jb2xvcj0iI0ZGOTkwMCIgb2Zmc2V0PSIxMDAlIj48L3N0b3A+CiAgICAgICAgPC9saW5lYXJHcmFkaWVudD4KICAgIDwvZGVmcz4KICAgIDxnPgogICAgICAgIDxyZWN0IGZpbGw9InVybCgjbGluZWFyR3JhZGllbnQtMSkiIHg9IjAiIHk9IjAiIHdpZHRoPSIyNTYiIGhlaWdodD0iMjU2Ij48L3JlY3Q+CiAgICAgICAgPHBhdGggZD0iTTg5LjYyNDExMjYsMjExLjIgTDQ5Ljg5MDMyNzcsMjExLjIgTDkzLjgzNTQ4MzIsMTE5LjM0NzIgTDExMy43NDcyOCwxNjAuMzM5MiBMODkuNjI0MTEyNiwyMTEuMiBaIE05Ni43MDI5MzU3LDExMC41Njk2IEM5Ni4xNjQwODU4LDEwOS40NjU2IDk1LjA0MTQ4MTMsMTA4Ljc2NDggOTMuODE2MjM4NCwxMDguNzY0OCBMOTMuODA2NjE2MywxMDguNzY0OCBDOTIuNTcxNzUxNCwxMDguNzY4IDkxLjQ0OTE0NjYsMTA5LjQ3NTIgOTAuOTE5OTE4NywxMTAuNTg1NiBMNDEuOTEzNDIwOCwyMTMuMDIwOCBDNDEuNDM4NzE5NywyMTQuMDEyOCA0MS41MDYwNzU4LDIxNS4xNzc2IDQyLjA5NjI0NTEsMjE2LjEwODggQzQyLjY3OTk5OTQsMjE3LjAzNjggNDMuNzA2MzgwNSwyMTcuNiA0NC44MDY1MzMxLDIxNy42IEw5MS42NTQ0MjMsMjE3LjYgQzkyLjg5NTcwMjcsMjE3LjYgOTQuMDIxNTE0OSwyMTYuODg2NCA5NC41NTM5NTAxLDIxNS43Njk2IEwxMjAuMjAzODU5LDE2MS42ODk2IEMxMjAuNjE3NjE5LDE2MC44MTI4IDEyMC42MTQ0MTIsMTU5Ljc5ODQgMTIwLjE4NzgyMiwxNTguOTI4IEw5Ni43MDI5MzU3LDExMC41Njk2IFogTTIwNy45ODUxMTcsMjExLjIgTDE2OC41MDc5MjgsMjExLjIgTDEwNS4xNzM3ODksNzguNjI0IEMxMDQuNjQ0NTYxLDc3LjUxMDQgMTAzLjUxNTU0MSw3Ni44IDEwMi4yNzc0NjksNzYuOCBMNzYuNDQ3OTQzLDc2LjggTDc2LjQ3NjgwOTksNDQuOCBMMTI3LjEwMzA2Niw0NC44IEwxOTAuMTQ1MzI4LDE3Ny4zNzI4IEMxOTAuNjc0NTU2LDE3OC40ODY0IDE5MS44MDM1NzUsMTc5LjIgMTkzLjA0MTY0NywxNzkuMiBMMjA3Ljk4NTExNywxNzkuMiBMMjA3Ljk4NTExNywyMTEuMiBaIE0yMTEuMTkyNTU4LDE3Mi44IEwxOTUuMDcxOTU4LDE3Mi44IEwxMzIuMDI5Njk2LDQwLjIyNzIgQzEzMS41MDA0NjgsMzkuMTEzNiAxMzAuMzcxNDQ5LDM4LjQgMTI5LjEzMDE2OSwzOC40IEw3My4yNzI1NzYsMzguNCBDNzEuNTA1Mjc1OCwzOC40IDcwLjA2ODM0MjEsMzkuODMwNCA3MC4wNjUxMzQ0LDQxLjU5NjggTDcwLjAyOTg1MjgsNzkuOTk2OCBDNzAuMDI5ODUyOCw4MC44NDggNzAuMzYzNDI2Niw4MS42NjA4IDcwLjk2OTYzMyw4Mi4yNjI0IEM3MS41Njk0MjQ2LDgyLjg2NCA3Mi4zODQxMTQ2LDgzLjIgNzMuMjM3Mjk0MSw4My4yIEwxMDAuMjUzNTczLDgzLjIgTDE2My41OTA5MiwyMTUuNzc2IEMxNjQuMTIzMzU1LDIxNi44ODk2IDE2NS4yNDU5NiwyMTcuNiAxNjYuNDg0MDMyLDIxNy42IEwyMTEuMTkyNTU4LDIxNy42IEMyMTIuOTY2Mjc0LDIxNy42IDIxNC40LDIxNi4xNjY0IDIxNC40LDIxNC40IEwyMTQuNCwxNzYgQzIxNC40LDE3NC4yMzM2IDIxMi45NjYyNzQsMTcyLjggMjExLjE5MjU1OCwxNzIuOCBMMjExLjE5MjU1OCwxNzIuOCBaIiBmaWxsPSIjRkZGRkZGIj48L3BhdGg+CiAgICA8L2c+Cjwvc3ZnPg==", - "headers": { - "Content-Type": "image/svg+xml" + "multiValueHeaders": { + "Content-Type": ["image/svg+xml"] }, "isBase64Encoded": true, "statusCode": 200 diff --git a/examples/event_handler_rest/src/compressing_responses_output.json b/examples/event_handler_rest/src/compressing_responses_output.json index 0836b3aa726..60a63966494 100644 --- a/examples/event_handler_rest/src/compressing_responses_output.json +++ b/examples/event_handler_rest/src/compressing_responses_output.json @@ -1,8 +1,8 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "Content-Encoding": "gzip" + "multiValueHeaders": { + "Content-Type": ["application/json"], + "Content-Encoding": ["gzip"] }, "body": "H4sIAAAAAAACE42STU4DMQyFrxJl3QXln96AMyAW7sSDLCVxiJ0Kqerd8TCCUOgii1EmP/783pOPXjmw+N3L0TfB+hz8brvxtC5KGtHvfMCIkzZx0HT5MPmNnziViIr2dIYoeNr8Q1x3xHsjcVadIbkZJoq2RXU8zzQROLseQ9505NzeCNQdMJNBE+UmY4zbzjAJhWtlZ57sB84BWtul+rteH2HPlVgWARwjqXkxpklK5gmEHAQqJBMtFsGVygcKmNVRjG0wxvuzGF2L0dpVUOKMC3bfJNjJgWMrCuZk7cUp02AiD72D6WKHHwUDKbiJs6AZ0VZXKOUx4uNvzdxT+E4mLcMA+6G8nzrLQkaxkNEVrFKW2VGbJCoCY7q2V3+tiv5kGThyxfTecDWbgGz/NfYXhL6ePgF9PnFdPgMAAA==", "isBase64Encoded": true diff --git a/examples/event_handler_rest/src/fine_grained_responses.py b/examples/event_handler_rest/src/fine_grained_responses.py index 3e477160307..4892de9c798 100644 --- a/examples/event_handler_rest/src/fine_grained_responses.py +++ b/examples/event_handler_rest/src/fine_grained_responses.py @@ -19,13 +19,14 @@ def get_todos(): todos: requests.Response = requests.get("https://jsonplaceholder.typicode.com/todos") todos.raise_for_status() - custom_headers = {"X-Transaction-Id": f"{uuid4()}"} + custom_headers = {"X-Transaction-Id": [f"{uuid4()}"]} return Response( status_code=HTTPStatus.OK.value, # 200 content_type=content_types.APPLICATION_JSON, body=todos.json()[:10], headers=custom_headers, + cookies=["=; Secure; Expires="], ) diff --git a/examples/event_handler_rest/src/fine_grained_responses_output.json b/examples/event_handler_rest/src/fine_grained_responses_output.json index c3d58098e80..1ce606839b1 100644 --- a/examples/event_handler_rest/src/fine_grained_responses_output.json +++ b/examples/event_handler_rest/src/fine_grained_responses_output.json @@ -1,8 +1,9 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "X-Transaction-Id": "3490eea9-791b-47a0-91a4-326317db61a9" + "multiValueHeaders": { + "Content-Type": ["application/json"], + "X-Transaction-Id": ["3490eea9-791b-47a0-91a4-326317db61a9"], + "Set-Cookie": ["=; Secure; Expires="] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json index 2ef3714531f..24d2b5c6dbc 100644 --- a/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json +++ b/examples/event_handler_rest/src/getting_started_rest_api_resolver_output.json @@ -1,7 +1,7 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json" + "multiValueHeaders": { + "Content-Type": ["application/json"] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/examples/event_handler_rest/src/setting_cors_output.json b/examples/event_handler_rest/src/setting_cors_output.json index ca86e892d38..19660941e91 100644 --- a/examples/event_handler_rest/src/setting_cors_output.json +++ b/examples/event_handler_rest/src/setting_cors_output.json @@ -1,9 +1,9 @@ { "statusCode": 200, - "headers": { - "Content-Type": "application/json", - "Access-Control-Allow-Origin": "https://www.example.com", - "Access-Control-Allow-Headers": "Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key" + "multiValueHeaders": { + "Content-Type": ["application/json"], + "Access-Control-Allow-Origin": ["https://www.example.com"], + "Access-Control-Allow-Headers": ["Authorization,Content-Type,X-Amz-Date,X-Amz-Security-Token,X-Api-Key"] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/mkdocs.yml b/mkdocs.yml index 171cf36eb13..59fcdfa6a08 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -10,6 +10,7 @@ nav: - Tutorial: tutorial/index.md - Roadmap: roadmap.md - API reference: api/" target="_blank + - Upgrade guide: upgrade.md - Core utilities: - core/tracer.md - core/logger.md diff --git a/poetry.lock b/poetry.lock index 5ad5a446e04..8c8ff2f1821 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,30 +8,62 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "aws-cdk-lib" -version = "2.23.0" +version = "2.39.1" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "~=3.7" + +[package.dependencies] +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.65.1,<2.0.0" +publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" + +[[package]] +name = "aws-cdk.aws-apigatewayv2-alpha" +version = "2.39.1a0" +description = "The CDK Construct Library for AWS::APIGatewayv2" +category = "dev" +optional = false +python-versions = "~=3.7" + +[package.dependencies] +aws-cdk-lib = ">=2.39.1,<3.0.0" +constructs = ">=10.0.0,<11.0.0" +jsii = ">=1.65.1,<2.0.0" +publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" + +[[package]] +name = "aws-cdk.aws-apigatewayv2-integrations-alpha" +version = "2.39.1a0" +description = "Integrations for AWS APIGateway V2" +category = "dev" +optional = false +python-versions = "~=3.7" [package.dependencies] +aws-cdk-lib = ">=2.39.1,<3.0.0" +"aws-cdk.aws-apigatewayv2-alpha" = "2.39.1.a0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.57.0,<2.0.0" +jsii = ">=1.65.1,<2.0.0" publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" [[package]] name = "aws-xray-sdk" @@ -47,11 +79,11 @@ wrapt = "*" [[package]] name = "bandit" -version = "1.7.1" +version = "1.7.4" description = "Security oriented static analyser for python code." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} @@ -59,57 +91,57 @@ GitPython = ">=1.0.1" PyYAML = ">=5.3.1" stevedore = ">=1.20.0" +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] +toml = ["toml"] +yaml = ["pyyaml"] + [[package]] name = "black" -version = "21.12b0" +version = "22.6.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] -click = ">=7.1.2" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} +click = ">=8.0.0" mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0,<1" +pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = ">=0.2.6,<2.0.0" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} -typing-extensions = [ - {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, - {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, -] +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.23.10" +version = "1.24.63" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] -botocore = ">=1.26.10,<1.27.0" +botocore = ">=1.27.63,<1.28.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.5.0,<0.6.0" +s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.26.10" +version = "1.27.63" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] jmespath = ">=0.7.1,<2.0.0" @@ -117,21 +149,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.8)"] - -[[package]] -name = "cattrs" -version = "1.0.0" -description = "Composable complex class support for attrs." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -attrs = ">=17.3" - -[package.extras] -dev = ["bumpversion", "coverage", "flake8", "hypothesis", "pendulum", "pytest", "sphinx", "tox", "watchdog", "wheel"] +crt = ["awscrt (==0.14.0)"] [[package]] name = "cattrs" @@ -156,22 +174,22 @@ python-versions = ">=3.6" [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.4" +version = "8.1.3" description = "Composable command line interface toolkit" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -187,38 +205,31 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "constructs" -version = "10.1.1" +version = "10.1.92" description = "A programming model for software-defined state" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "~=3.7" [package.dependencies] -jsii = ">=1.57.0,<2.0.0" +jsii = ">=1.66.0,<2.0.0" publication = ">=0.0.3" +typeguard = ">=2.13.3,<2.14.0" [[package]] name = "coverage" -version = "6.2" +version = "6.4.4" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -tomli = {version = "*", optional = true, markers = "extra == \"toml\""} +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] -[[package]] -name = "dataclasses" -version = "0.8" -description = "A backport of the dataclasses module for Python 3.6" -category = "main" -optional = false -python-versions = ">=3.6, <3.7" - [[package]] name = "decorator" version = "5.1.1" @@ -265,7 +276,7 @@ python-versions = "*" [[package]] name = "exceptiongroup" -version = "1.0.0rc8" +version = "1.0.0rc9" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -322,6 +333,19 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.7.0,<2.8.0" pyflakes = ">=2.3.0,<2.4.0" +[[package]] +name = "flake8-black" +version = "0.3.3" +description = "flake8 plugin to call black as a code style validator" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +black = ">=22.1.0" +flake8 = ">=3.0.0" +tomli = "*" + [[package]] name = "flake8-bugbear" version = "22.8.23" @@ -353,28 +377,27 @@ test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] [[package]] name = "flake8-comprehensions" -version = "3.7.0" +version = "3.10.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] -flake8 = ">=3.0,<3.2.0 || >3.2.0,<5" +flake8 = ">=3.0,<3.2.0 || >3.2.0" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "flake8-debugger" -version = "4.0.0" +version = "4.1.2" description = "ipdb/pdb statement checker plugin for flake8" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] flake8 = ">=3.0" pycodestyle = "*" -six = "*" [[package]] name = "flake8-eradicate" @@ -455,15 +478,15 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.20" -description = "Python Git Library" +version = "3.1.27" +description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} [[package]] name = "idna" @@ -490,21 +513,6 @@ docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] -[[package]] -name = "importlib-resources" -version = "5.4.0" -description = "Read resources from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] - [[package]] name = "iniconfig" version = "1.1.1" @@ -529,11 +537,11 @@ requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" -version = "3.0.3" +version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] MarkupSafe = ">=2.0" @@ -543,44 +551,44 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jmespath" -version = "0.10.0" +version = "1.0.1" description = "JSON Matching Expressions" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" [[package]] name = "jsii" -version = "1.57.0" +version = "1.66.0" description = "Python client for jsii runtime" category = "dev" optional = false -python-versions = "~=3.6" +python-versions = "~=3.7" [package.dependencies] -attrs = ">=21.2,<22.0" -cattrs = [ - {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, - {version = ">=1.8,<22.2", markers = "python_version >= \"3.7\""}, -] -importlib-resources = {version = "*", markers = "python_version < \"3.7\""} +attrs = ">=21.2,<23.0" +cattrs = ">=1.8,<22.2" +publication = ">=0.0.3" python-dateutil = "*" +typeguard = ">=2.13.3,<2.14.0" typing-extensions = ">=3.7,<5.0" [[package]] name = "mako" -version = "1.1.6" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +version = "1.2.2" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" [package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} MarkupSafe = ">=0.9.2" [package.extras] babel = ["babel"] lingua = ["lingua"] +testing = ["pytest"] [[package]] name = "mando" @@ -612,11 +620,11 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.0.1" +version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "mccabe" @@ -862,11 +870,11 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.9.0" +version = "0.10.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [[package]] name = "pbr" @@ -890,14 +898,14 @@ markdown = ">=3.0" [[package]] name = "platformdirs" -version = "2.4.0" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] @@ -949,15 +957,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.2" +version = "1.10.0" description = "Data validation and settings management using python type hints" category = "main" optional = true -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.1.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -995,22 +1002,22 @@ markdown = ">=3.2" [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.0.1" +version = "7.1.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -1086,11 +1093,11 @@ pytest = ">=3.10" [[package]] name = "pytest-mock" -version = "3.6.1" +version = "3.8.2" description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] pytest = ">=5.0" @@ -1169,21 +1176,21 @@ mando = ">=0.6,<0.7" [[package]] name = "requests" -version = "2.27.1" +version = "2.28.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "retry" @@ -1222,11 +1229,11 @@ python-versions = ">=3.5" [[package]] name = "s3transfer" -version = "0.5.2" +version = "0.6.0" description = "An Amazon S3 Transfer Manager" category = "main" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -1264,11 +1271,11 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "tomli" -version = "1.2.3" +version = "2.0.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "typed-ast" @@ -1278,6 +1285,18 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "typeguard" +version = "2.13.3" +description = "Run-time type checker for Python" +category = "dev" +optional = false +python-versions = ">=3.5.3" + +[package.extras] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] + [[package]] name = "types-requests" version = "2.28.9" @@ -1367,148 +1386,148 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" -python-versions = "^3.6.2" -content-hash = "d9e0120a5a8dbeae8d895aa09dd994cd02e51c9dfcb886f026ede88efe741f00" +python-versions = "^3.7.4" +content-hash = "81514fdb005bee75315860470f16a71590648bbfac05872150530f3ee720181d" [metadata.files] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] +atomicwrites = [] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] aws-cdk-lib = [ - {file = "aws-cdk-lib-2.23.0.tar.gz", hash = "sha256:3e07d1c6b320795d38567be183e56c2125b4c4492589775257aabec3d3e2a384"}, - {file = "aws_cdk_lib-2.23.0-py3-none-any.whl", hash = "sha256:1ec04a146d3364cd0fc4da08e3f8ca25e28df68abaa90641936db17a415ca4bc"}, + {file = "aws-cdk-lib-2.39.1.tar.gz", hash = "sha256:41a97713b52cc558a53269f97665dea263ee635159455b29c2ca64a8f4772bc6"}, + {file = "aws_cdk_lib-2.39.1-py3-none-any.whl", hash = "sha256:c22cafee652238dcc7dfab14849f45783503a27d76f9f7fcb51232d017e8576a"}, +] +"aws-cdk.aws-apigatewayv2-alpha" = [ + {file = "aws-cdk.aws-apigatewayv2-alpha-2.39.1a0.tar.gz", hash = "sha256:2a506e8e9015f1cf15f951b4dbc09ffee17d96aa77491b84ca1ab4b790388bdc"}, + {file = "aws_cdk.aws_apigatewayv2_alpha-2.39.1a0-py3-none-any.whl", hash = "sha256:00ec8ee0c777f3dba81a40553e649aac3f707484af07c90d0f369ceb78512164"}, ] -aws-xray-sdk = [ - {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, - {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"}, +"aws-cdk.aws-apigatewayv2-integrations-alpha" = [ + {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.39.1a0.tar.gz", hash = "sha256:67f7e38214466bd15438301828c0b210b08fc16ecf35781210cdda4eae3151e2"}, + {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.39.1a0-py3-none-any.whl", hash = "sha256:29a46bad1fe1fd8d9c2356686636a0e83db9e4b6b24d8765f5024fc2988f8661"}, ] +aws-xray-sdk = [] bandit = [ - {file = "bandit-1.7.1-py3-none-any.whl", hash = "sha256:f5acd838e59c038a159b5c621cf0f8270b279e884eadd7b782d7491c02add0d4"}, - {file = "bandit-1.7.1.tar.gz", hash = "sha256:a81b00b5436e6880fa8ad6799bc830e02032047713cbb143a12939ac67eb756c"}, + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] black = [ - {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, - {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, + {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, + {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, + {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, + {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, + {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, + {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, + {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, + {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, + {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, + {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, + {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, + {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, + {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, + {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, + {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, + {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, + {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, + {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, + {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, + {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, + {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, + {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, + {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, ] boto3 = [ - {file = "boto3-1.23.10-py3-none-any.whl", hash = "sha256:40d08614f17a69075e175c02c5d5aab69a6153fd50e40fa7057b913ac7bf40e7"}, - {file = "boto3-1.23.10.tar.gz", hash = "sha256:2a4395e3241c20eef441d7443a5e6eaa0ee3f7114653fb9d9cef41587526f7bd"}, + {file = "boto3-1.24.63-py3-none-any.whl", hash = "sha256:719bfafbe4e076055aa1a51269ffdbe9c61446679b67f31d61c237976661154c"}, + {file = "boto3-1.24.63.tar.gz", hash = "sha256:0e6ef4b5e47b6073887961028201ecfc2024198125f20fbe5f5c00234f124543"}, ] botocore = [ - {file = "botocore-1.26.10-py3-none-any.whl", hash = "sha256:8a4a984bf901ccefe40037da11ba2abd1ddbcb3b490a492b7f218509c99fc12f"}, - {file = "botocore-1.26.10.tar.gz", hash = "sha256:5df2cf7ebe34377470172bd0bbc582cf98c5cbd02da0909a14e9e2885ab3ae9c"}, -] -cattrs = [ - {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, - {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, - {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, - {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, -] -certifi = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, + {file = "botocore-1.27.63-py3-none-any.whl", hash = "sha256:8567dee549430a53210c6b898dea3a8fc8ee9d7934ec1df7545c547cacbb2b8f"}, + {file = "botocore-1.27.63.tar.gz", hash = "sha256:b97e17c930a7f45b50f94956a4474c1cd7b828e3dcd8a84dd0e3306ca6189335"}, ] +cattrs = [] +certifi = [] charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] click = [ - {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, - {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, -] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] +colorama = [] constructs = [ - {file = "constructs-10.1.1-py3-none-any.whl", hash = "sha256:c1f3deb196f54e070ded3c92c4339f73ef2b6022d35fb34908c0ebfa7ef8a640"}, - {file = "constructs-10.1.1.tar.gz", hash = "sha256:6ce0dd1352367237b5d7c51a25740482c852735d2a5e067c536acc1657f39ea5"}, + {file = "constructs-10.1.92-py3-none-any.whl", hash = "sha256:297f1194754e2698eeb0ac69d2fa88558b8ec2a19cf4d4c6b999f3f62d9e5c7c"}, + {file = "constructs-10.1.92.tar.gz", hash = "sha256:1f3a63c65423e551339f50633400551a2f4d8a0eafa1418da78617e060a80cd3"}, ] coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, -] -dataclasses = [ - {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, - {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] + {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, + {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, + {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, + {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, + {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, + {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, + {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, + {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, + {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, + {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, + {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, + {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, + {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, + {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, + {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, + {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, +] +decorator = [] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] -email-validator = [ - {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"}, - {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"}, -] +email-validator = [] eradicate = [ {file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"}, {file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"}, - {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"}, -] -execnet = [ - {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, - {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, -] -fastjsonschema = [ - {file = "fastjsonschema-2.16.1-py3-none-any.whl", hash = "sha256:2f7158c4de792555753d6c2277d6a2af2d406dfd97aeca21d17173561ede4fe6"}, - {file = "fastjsonschema-2.16.1.tar.gz", hash = "sha256:d6fa3ffbe719768d70e298b9fb847484e2bdfdb7241ed052b8d57a9294a8c334"}, + {file = "exceptiongroup-1.0.0rc9-py3-none-any.whl", hash = "sha256:2e3c3fc1538a094aab74fad52d6c33fc94de3dfee3ee01f187c0e0c72aec5337"}, + {file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"}, ] +execnet = [] +fastjsonschema = [] filelock = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, @@ -1517,51 +1536,42 @@ flake8 = [ {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] +flake8-black = [ + {file = "flake8-black-0.3.3.tar.gz", hash = "sha256:8211f5e20e954cb57c709acccf2f3281ce27016d4c4b989c3e51f878bb7ce12a"}, + {file = "flake8_black-0.3.3-py3-none-any.whl", hash = "sha256:7d667d0059fd1aa468de1669d77cc934b7f1feeac258d57bdae69a8e73c4cd90"}, +] flake8-bugbear = [ {file = "flake8-bugbear-22.8.23.tar.gz", hash = "sha256:de0717d11124a082118dd08387b34fd86b2721642ec2d8e92be66cfa5ea7c445"}, {file = "flake8_bugbear-22.8.23-py3-none-any.whl", hash = "sha256:1b0ebe0873d1cd55bf9f1588bfcb930db339018ef44a3981a26532daa9fd14a8"}, ] -flake8-builtins = [ - {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, - {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, -] +flake8-builtins = [] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.7.0.tar.gz", hash = "sha256:6b3218b2dde8ac5959c6476cde8f41a79e823c22feb656be2710cd2a3232cef9"}, - {file = "flake8_comprehensions-3.7.0-py3-none-any.whl", hash = "sha256:a5d7aea6315bbbd6fbcb2b4e80bff6a54d1600155e26236e555d0c6fe1d62522"}, + {file = "flake8-comprehensions-3.10.0.tar.gz", hash = "sha256:181158f7e7aa26a63a0a38e6017cef28c6adee71278ce56ce11f6ec9c4905058"}, + {file = "flake8_comprehensions-3.10.0-py3-none-any.whl", hash = "sha256:dad454fd3d525039121e98fa1dd90c46bc138708196a4ebbc949ad3c859adedb"}, ] flake8-debugger = [ - {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"}, - {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"}, + {file = "flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840"}, + {file = "flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf"}, ] flake8-eradicate = [ {file = "flake8-eradicate-1.3.0.tar.gz", hash = "sha256:e4c98f00d17dc8653e3388cac2624cd81e9735de2fd4a8dcf99029633ebd7a63"}, {file = "flake8_eradicate-1.3.0-py3-none-any.whl", hash = "sha256:85a71e0c5f4e07f7c6c5fec520483561fd6bd295417d622855bdeade99242e3d"}, ] -flake8-fixme = [ - {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"}, - {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"}, -] +flake8-fixme = [] flake8-isort = [ {file = "flake8-isort-4.2.0.tar.gz", hash = "sha256:26571500cd54976bbc0cf1006ffbcd1a68dd102f816b7a1051b219616ba9fee0"}, {file = "flake8_isort-4.2.0-py3-none-any.whl", hash = "sha256:5b87630fb3719bf4c1833fd11e0d9534f43efdeba524863e15d8f14a7ef6adbf"}, ] -flake8-variables-names = [ - {file = "flake8_variables_names-0.0.4.tar.gz", hash = "sha256:d6fa0571a807c72940b5773827c5760421ea6f8206595ff0a8ecfa01e42bf2cf"}, -] -future = [ - {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, -] +flake8-variables-names = [] +future = [] ghp-import = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, ] -gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, -] +gitdb = [] gitpython = [ - {file = "GitPython-3.1.20-py3-none-any.whl", hash = "sha256:b1e1c269deab1b08ce65403cf14e10d2ef1f6c89e33ea7c5e5bb0222ea593b8a"}, - {file = "GitPython-3.1.20.tar.gz", hash = "sha256:df0e072a200703a65387b0cfdf0466e3bab729c0458cf6b7349d0e9877636519"}, + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, @@ -1571,165 +1581,85 @@ importlib-metadata = [ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] -importlib-resources = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] +iniconfig = [] +isort = [] jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jmespath = [ - {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, - {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] jsii = [ - {file = "jsii-1.57.0-py3-none-any.whl", hash = "sha256:4888091986a9ed8d50b042cc9c35a9564dd54c19e78adb890bf06d9ffac1b325"}, - {file = "jsii-1.57.0.tar.gz", hash = "sha256:ff7a3c51c1a653dd8a4342043b5f8e40b928bc617e3141e0d5d66175d22a754b"}, + {file = "jsii-1.66.0-py3-none-any.whl", hash = "sha256:3c776c3fc32ed1e28fc95625617f106ca28e9e87b0b47d4d0bb66b78b873cafe"}, + {file = "jsii-1.66.0.tar.gz", hash = "sha256:11c64c7c799776fe05922032970c36ac81c726dbdc18039dd3d2e848458d105c"}, ] mako = [ - {file = "Mako-1.1.6-py2.py3-none-any.whl", hash = "sha256:afaf8e515d075b22fad7d7b8b30e4a1c90624ff2f3733a06ec125f5a5f043a57"}, - {file = "Mako-1.1.6.tar.gz", hash = "sha256:4e9e345a41924a954251b95b4b28e14a301145b544901332e658907a7464b6b2"}, -] -mando = [ - {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"}, - {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, + {file = "Mako-1.2.2-py3-none-any.whl", hash = "sha256:8efcb8004681b5f71d09c983ad5a9e6f5c40601a6ec469148753292abc0da534"}, + {file = "Mako-1.2.2.tar.gz", hash = "sha256:3724869b363ba630a272a5f89f68c070352137b8fd1757650017b7e06fda163f"}, ] +mando = [] markdown = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mergedeep = [ - {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, - {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, -] -mike = [ - {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"}, - {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"}, -] + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [] +mergedeep = [] +mike = [] mkdocs = [ {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, ] -mkdocs-git-revision-date-plugin = [ - {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, -] +mkdocs-git-revision-date-plugin = [] mkdocs-material = [ {file = "mkdocs-material-8.4.2.tar.gz", hash = "sha256:704c64c3fff126a3923c2961d95f26b19be621342a6a4e49ed039f0bb7a5c540"}, {file = "mkdocs_material-8.4.2-py2.py3-none-any.whl", hash = "sha256:166287bb0e4197804906bf0389a852d5ced43182c30127ac8b48a4e497ecd7e5"}, ] -mkdocs-material-extensions = [ - {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, - {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, -] -mypy = [ - {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, - {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, - {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, - {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, - {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, - {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, - {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, - {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, - {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, - {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, - {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, - {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, - {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, - {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, - {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, - {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, - {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, - {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, - {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, -] +mkdocs-material-extensions = [] +mypy = [] mypy-boto3-appconfig = [ {file = "mypy-boto3-appconfig-1.24.36.post1.tar.gz", hash = "sha256:e1916b3754915cb411ef977083500e1f30f81f7b3aea6ff5eed1cec91944dea6"}, {file = "mypy_boto3_appconfig-1.24.36.post1-py3-none-any.whl", hash = "sha256:a5dbe549dbebf4bc7a6cfcbfa9dff89ceb4983c042b785763ee656504bdb49f6"}, @@ -1770,85 +1700,66 @@ mypy-boto3-xray = [ {file = "mypy-boto3-xray-1.24.36.post1.tar.gz", hash = "sha256:104f1ecf7f1f6278c582201e71a7ab64843d3a3fdc8f23295cf68788cc77e9bb"}, {file = "mypy_boto3_xray-1.24.36.post1-py3-none-any.whl", hash = "sha256:97b9f0686c717c8be99ac06cb52febaf71712b4e4cd0b61ed2eb5ed012a9b5fd"}, ] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] +mypy-extensions = [] +packaging = [] pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, + {file = "pathspec-0.10.0-py3-none-any.whl", hash = "sha256:aefa80ac32d5bf1f96139dca67cefb69a431beff4e6bf1168468f37d7ab87015"}, + {file = "pathspec-0.10.0.tar.gz", hash = "sha256:01eecd304ba0e6eeed188ae5fa568e99ef10265af7fd9ab737d6412b4ee0ab85"}, ] pbr = [ {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, ] -pdoc3 = [ - {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, - {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, -] +pdoc3 = [] platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -publication = [ - {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, - {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -py-cpuinfo = [ - {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] +pluggy = [] +publication = [] +py = [] +py-cpuinfo = [] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydantic = [ - {file = "pydantic-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c9e04a6cdb7a363d7cb3ccf0efea51e0abb48e180c0d31dca8d247967d85c6e"}, - {file = "pydantic-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafe841be1103f340a24977f61dee76172e4ae5f647ab9e7fd1e1fca51524f08"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afacf6d2a41ed91fc631bade88b1d319c51ab5418870802cedb590b709c5ae3c"}, - {file = "pydantic-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ee0d69b2a5b341fc7927e92cae7ddcfd95e624dfc4870b32a85568bd65e6131"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ff68fc85355532ea77559ede81f35fff79a6a5543477e168ab3a381887caea76"}, - {file = "pydantic-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0f5e142ef8217019e3eef6ae1b6b55f09a7a15972958d44fbd228214cede567"}, - {file = "pydantic-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:615661bfc37e82ac677543704437ff737418e4ea04bef9cf11c6d27346606044"}, - {file = "pydantic-1.9.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:328558c9f2eed77bd8fffad3cef39dbbe3edc7044517f4625a769d45d4cf7555"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bd446bdb7755c3a94e56d7bdfd3ee92396070efa8ef3a34fab9579fe6aa1d84"}, - {file = "pydantic-1.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0b214e57623a535936005797567231a12d0da0c29711eb3514bc2b3cd008d0f"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d8ce3fb0841763a89322ea0432f1f59a2d3feae07a63ea2c958b2315e1ae8adb"}, - {file = "pydantic-1.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b34ba24f3e2d0b39b43f0ca62008f7ba962cff51efa56e64ee25c4af6eed987b"}, - {file = "pydantic-1.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:84d76ecc908d917f4684b354a39fd885d69dd0491be175f3465fe4b59811c001"}, - {file = "pydantic-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4de71c718c9756d679420c69f216776c2e977459f77e8f679a4a961dc7304a56"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5803ad846cdd1ed0d97eb00292b870c29c1f03732a010e66908ff48a762f20e4"}, - {file = "pydantic-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8c5360a0297a713b4123608a7909e6869e1b56d0e96eb0d792c27585d40757f"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cdb4272678db803ddf94caa4f94f8672e9a46bae4a44f167095e4d06fec12979"}, - {file = "pydantic-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19b5686387ea0d1ea52ecc4cffb71abb21702c5e5b2ac626fd4dbaa0834aa49d"}, - {file = "pydantic-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e0b4fb13ad4db4058a7c3c80e2569adbd810c25e6ca3bbd8b2a9cc2cc871d7"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91089b2e281713f3893cd01d8e576771cd5bfdfbff5d0ed95969f47ef6d676c3"}, - {file = "pydantic-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e631c70c9280e3129f071635b81207cad85e6c08e253539467e4ead0e5b219aa"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3946f87e5cef3ba2e7bd3a4eb5a20385fe36521d6cc1ebf3c08a6697c6cfb3"}, - {file = "pydantic-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5565a49effe38d51882cb7bac18bda013cdb34d80ac336428e8908f0b72499b0"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd67cb2c2d9602ad159389c29e4ca964b86fa2f35c2faef54c3eb28b4efd36c8"}, - {file = "pydantic-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4aafd4e55e8ad5bd1b19572ea2df546ccace7945853832bb99422a79c70ce9b8"}, - {file = "pydantic-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:d70916235d478404a3fa8c997b003b5f33aeac4686ac1baa767234a0f8ac2326"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ca86b525264daa5f6b192f216a0d1e860b7383e3da1c65a1908f9c02f42801"}, - {file = "pydantic-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1061c6ee6204f4f5a27133126854948e3b3d51fcc16ead2e5d04378c199b2f44"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e78578f0c7481c850d1c969aca9a65405887003484d24f6110458fb02cca7747"}, - {file = "pydantic-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da164119602212a3fe7e3bc08911a89db4710ae51444b4224c2382fd09ad453"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ead3cd020d526f75b4188e0a8d71c0dbbe1b4b6b5dc0ea775a93aca16256aeb"}, - {file = "pydantic-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7d0f183b305629765910eaad707800d2f47c6ac5bcfb8c6397abdc30b69eeb15"}, - {file = "pydantic-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1a68f4f65a9ee64b6ccccb5bf7e17db07caebd2730109cb8a95863cfa9c4e55"}, - {file = "pydantic-1.9.2-py3-none-any.whl", hash = "sha256:78a4d6bdfd116a559aeec9a4cfe77dda62acc6233f8b56a716edad2651023e5e"}, - {file = "pydantic-1.9.2.tar.gz", hash = "sha256:8cb0bc509bfb71305d7a59d00163d5f9fc4530f0881ea32c74ff4f74c85f3d3d"}, + {file = "pydantic-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e34e46dd08dafd4c75b8378efe3eae7d8e5212950fcd894d86c1df2dcfb80fe"}, + {file = "pydantic-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4af55f33ae5be6cccecd4fa462630daffef1f161f60c3f194b24eca705d50748"}, + {file = "pydantic-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1856bc6640aced42886f7ee48f5ed1fa5adf35e34064b5f9532b52d5a3b8a0d3"}, + {file = "pydantic-1.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ae7e210929a1b7d288034835dd787e5b0597192d58ab7342bacbeec0f33df"}, + {file = "pydantic-1.10.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1192c17667d21652ab93b5eecd1a776cd0a4e384ea8c331bb830c9d130293af"}, + {file = "pydantic-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:026427be4e251f876e7519a63af37ae5ebb8b593ca8b02180bdc6becd1ea4ef4"}, + {file = "pydantic-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:d1dffae1f219d06a997ec78d1d2daafdbfecf243ad8eb36bfbcbc73e30e17385"}, + {file = "pydantic-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b549eebe8de4e50fc3b4f8c1f9cc2f731d91787fc3f7d031561668377b8679bc"}, + {file = "pydantic-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0ba8710bfdaddb7424c05ad2dc1da04796003751eac6ad30c218ac1d68a174e"}, + {file = "pydantic-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0985ba95af937389c9ce8d747138417303569cb736bd12469646ef53cd66e1c"}, + {file = "pydantic-1.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d484fbbe6267b6c936a6d005d5170ab553f3f4367348c7e88d3e17f0a7179981"}, + {file = "pydantic-1.10.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9500586151cd56a20bacb8f1082df1b4489000120d1c7ddc44c8b20870e8adbd"}, + {file = "pydantic-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b5212604aaf5954e9a7cea8f0c60d6dbef996aa7b41edefd329e6b5011ce8cf"}, + {file = "pydantic-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:39212b3853eea165a3cda11075d5b7d09d4291fcbc3c0ecefd23797ee21b29e9"}, + {file = "pydantic-1.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b3e3aed33fbd9518cf508d5415a58af683743d53dc5e58953973d73605774f34"}, + {file = "pydantic-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed4e5c18cac70fadd4cf339f444c4f1795f0876dfd5b70cf0a841890b52f0001"}, + {file = "pydantic-1.10.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45a6d0a9fdaad2a27ea69aec4659705ed8f60a5664e892c73e2b977d8f5166cc"}, + {file = "pydantic-1.10.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:158f1479367da20914961b5406ac3b29dfe1d858ae2af96c444f73543defcf0c"}, + {file = "pydantic-1.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:172aaeeaff8fc3ac326fb8a2934a063ca0938586c5fe8848285052de83a240f7"}, + {file = "pydantic-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:231b19c010288bfbfdcd3f79df38b5ff893c6547cd8c7d006203435790b22815"}, + {file = "pydantic-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:22206c152f9b86c0ee169928f9c24e1c0c566edb2462600b298ccb04860961aa"}, + {file = "pydantic-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8ef840ef803ef17a7bd52480eb85faca0eed728d70233fd560f7d1066330247"}, + {file = "pydantic-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f99b4de6936a0f9fe255d1c7fdc447700ddd027c9ad38a612d453ed5fc7d6d0"}, + {file = "pydantic-1.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:134b4fd805737496ce4efd24ce2f8da0e08c66dcfc054fee1a19673eec780f2c"}, + {file = "pydantic-1.10.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c4c76af6ad47bc46cf16bd0e4a5e536a7a2bec0dec14ea08b712daa6645bf293"}, + {file = "pydantic-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e03402b0a6b23a2d0b9ee31e45d80612c95562b5af8b5c900171b9d9015ddc5f"}, + {file = "pydantic-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3a3a60fcb5ce08cab593b7978d02db67b8d153e9d582adab7c0b69d7200d78be"}, + {file = "pydantic-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d8e5c5a50821c55b76dcf422610225cb7e44685cdd81832d0d504fa8c9343f35"}, + {file = "pydantic-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:645b83297a9428a675c98c1f69a7237a381900e34f23245c0ea73d74e454bf68"}, + {file = "pydantic-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab3f31f35dc4f8fc85b04d13569e5fdc9de2d3050ae64c1fdc3430dfe7d92d"}, + {file = "pydantic-1.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e290915a0ed53d3c59d6071fc7d2c843ed04c33affcd752dd1f3daa859b44a76"}, + {file = "pydantic-1.10.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:af669da39ede365069dbc5de56564b011e3353f801acdbdd7145002a78abc3d9"}, + {file = "pydantic-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e796f915762dec4678fafc89b1f0441ab9209517a8a682ddb3f988f7ffe0827"}, + {file = "pydantic-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:652727f9e1d3ae30bd8a4dfbebcafd50df45277b97f3deabbbfedcf731f94aa5"}, + {file = "pydantic-1.10.0-py3-none-any.whl", hash = "sha256:4d2b9258f5bd2d129bd4cf2d31f9d40094b9ed6ef64896e2f7a70729b2d599ea"}, + {file = "pydantic-1.10.0.tar.gz", hash = "sha256:e13788fcad1baf5eb3236856b2a9a74f7dac6b3ea7ca1f60a4ad8bad4239cf4c"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, @@ -1858,46 +1769,22 @@ pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] -pymdown-extensions = [ - {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"}, - {file = "pymdown_extensions-9.5.tar.gz", hash = "sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0"}, -] -pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, -] +pymdown-extensions = [] +pyparsing = [] pytest = [ - {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, - {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, - {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, -] -pytest-benchmark = [ - {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, - {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, -] -pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, -] -pytest-forked = [ - {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, - {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, ] +pytest-asyncio = [] +pytest-benchmark = [] +pytest-cov = [] +pytest-forked = [] pytest-mock = [ - {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, - {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, -] -pytest-xdist = [ - {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, - {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"}, + {file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"}, ] +pytest-xdist = [] +python-dateutil = [] python-snappy = [ {file = "python-snappy-0.6.1.tar.gz", hash = "sha256:b6a107ab06206acc5359d4c5632bd9b22d448702a79b3169b0c62e0fb808bb2a"}, {file = "python_snappy-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b7f920eaf46ebf41bd26f9df51c160d40f9e00b7b48471c3438cb8d027f7fb9b"}, @@ -1983,77 +1870,28 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -pyyaml-env-tag = [ - {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, - {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, -] -radon = [ - {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, - {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -retry = [ - {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, - {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, -] +pyyaml-env-tag = [] +radon = [] +requests = [] +retry = [] "ruamel.yaml" = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] -"ruamel.yaml.clib" = [ - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, - {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, - {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, - {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, - {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, - {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, - {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, - {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, -] +"ruamel.yaml.clib" = [] s3transfer = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -smmap = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] -stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, -] +smmap = [] +stevedore = [] tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] typed-ast = [ {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, @@ -2081,6 +1919,10 @@ typed-ast = [ {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] +typeguard = [ + {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, + {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, +] types-requests = [ {file = "types-requests-2.28.9.tar.gz", hash = "sha256:feaf581bd580497a47fe845d506fa3b91b484cf706ff27774e87659837de9962"}, {file = "types_requests-2.28.9-py3-none-any.whl", hash = "sha256:86cb66d3de2f53eac5c09adc42cf6547eefbd0c7e1210beca1ee751c35d96083"}, @@ -2190,10 +2032,7 @@ wrapt = [ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] -xenon = [ - {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"}, - {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, -] +xenon = [] zipp = [ {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, diff --git a/pyproject.toml b/pyproject.toml index b25d5f19d4e..6420a49f179 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -20,7 +19,7 @@ keywords = ["aws_lambda_powertools", "aws", "tracing", "logging", "lambda", "pow license = "MIT-0" [tool.poetry.dependencies] -python = "^3.6.2" +python = "^3.7.4" aws-xray-sdk = "^2.8.0" fastjsonschema = "^2.14.5" boto3 = "^1.18" @@ -32,13 +31,14 @@ email-validator = {version = "*", optional = true } # issue #1148 coverage = {extras = ["toml"], version = "^6.2"} pytest = "^7.0.1" -black = "^21.12b0" +black = "^22.6.0" flake8-builtins = "^1.5.3" flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" flake8-fixme = "^1.1.1" flake8-isort = "^4.1.2" flake8-variables-names = "^0.0.4" +flake8-black = "^0.3.3" isort = "^5.10.1" pytest-cov = "^3.0.0" pytest-mock = "^3.5.1" @@ -54,23 +54,25 @@ mike = "^0.6.0" mypy = "^0.971" retry = "^0.9.2" pytest-xdist = "^2.5.0" -aws-cdk-lib = "^2.23.0" +aws-cdk-lib = "^2.38.1" +"aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" +"aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" pytest-benchmark = "^3.4.1" -mypy-boto3-appconfig = { version = "^1.24.29", python = ">=3.7" } -mypy-boto3-cloudformation = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-cloudwatch = { version = "^1.24.35", python = ">=3.7" } -mypy-boto3-dynamodb = { version = "^1.24.60", python = ">=3.7" } -mypy-boto3-lambda = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-logs = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-secretsmanager = { version = "^1.24.11", python = ">=3.7" } -mypy-boto3-ssm = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-s3 = { version = "^1.24.0", python = ">=3.7" } -mypy-boto3-xray = { version = "^1.24.0", python = ">=3.7" } +mypy-boto3-appconfig = "^1.24.29" +mypy-boto3-cloudformation = "^1.24.0" +mypy-boto3-cloudwatch = "^1.24.35" +mypy-boto3-dynamodb = "^1.24.60" +mypy-boto3-lambda = "^1.24.0" +mypy-boto3-logs = "^1.24.0" +mypy-boto3-secretsmanager = "^1.24.11" +mypy-boto3-ssm = "^1.24.0" +mypy-boto3-s3 = "^1.24.0" +mypy-boto3-xray = "^1.24.0" types-requests = "^2.28.8" -typing-extensions = { version = "^4.3.0", python = ">=3.7" } +typing-extensions = "^4.3.0" python-snappy = "^0.6.1" -mkdocs-material = { version = "^8.4.2", python = ">=3.7" } -filelock = { version = "^3.8.0", python = ">=3.7" } +mkdocs-material = "^8.4.2" +filelock = "^3.8.0" [tool.poetry.extras] pydantic = ["pydantic", "email-validator"] diff --git a/tests/e2e/event_handler/__init__.py b/tests/e2e/event_handler/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/e2e/event_handler/conftest.py b/tests/e2e/event_handler/conftest.py new file mode 100644 index 00000000000..207ec443456 --- /dev/null +++ b/tests/e2e/event_handler/conftest.py @@ -0,0 +1,28 @@ +from pathlib import Path + +import pytest + +from tests.e2e.event_handler.infrastructure import EventHandlerStack + + +@pytest.fixture(autouse=True, scope="module") +def infrastructure(request: pytest.FixtureRequest, lambda_layer_arn: str): + """Setup and teardown logic for E2E test infrastructure + + Parameters + ---------- + request : pytest.FixtureRequest + pytest request fixture to introspect absolute path to test being executed + lambda_layer_arn : str + Lambda Layer ARN + + Yields + ------ + Dict[str, str] + CloudFormation Outputs from deployed infrastructure + """ + stack = EventHandlerStack(handlers_dir=Path(f"{request.path.parent}/handlers"), layer_arn=lambda_layer_arn) + try: + yield stack.deploy() + finally: + stack.delete() diff --git a/tests/e2e/event_handler/handlers/alb_handler.py b/tests/e2e/event_handler/handlers/alb_handler.py new file mode 100644 index 00000000000..4c3f4f9dac3 --- /dev/null +++ b/tests/e2e/event_handler/handlers/alb_handler.py @@ -0,0 +1,18 @@ +from aws_lambda_powertools.event_handler import ALBResolver, Response, content_types + +app = ALBResolver() + + +@app.get("/todos") +def hello(): + return Response( + status_code=200, + content_type=content_types.TEXT_PLAIN, + body="Hello world", + cookies=["CookieMonster", "MonsterCookie"], + headers={"Foo": ["bar", "zbr"]}, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/handlers/api_gateway_http_handler.py b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py new file mode 100644 index 00000000000..1a20b730285 --- /dev/null +++ b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py @@ -0,0 +1,18 @@ +from aws_lambda_powertools.event_handler import APIGatewayHttpResolver, Response, content_types + +app = APIGatewayHttpResolver() + + +@app.get("/todos") +def hello(): + return Response( + status_code=200, + content_type=content_types.TEXT_PLAIN, + body="Hello world", + cookies=["CookieMonster", "MonsterCookie"], + headers={"Foo": ["bar", "zbr"]}, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py new file mode 100644 index 00000000000..2f5ad0b94fa --- /dev/null +++ b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py @@ -0,0 +1,18 @@ +from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types + +app = APIGatewayRestResolver() + + +@app.get("/todos") +def hello(): + return Response( + status_code=200, + content_type=content_types.TEXT_PLAIN, + body="Hello world", + cookies=["CookieMonster", "MonsterCookie"], + headers={"Foo": ["bar", "zbr"]}, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/handlers/lambda_function_url_handler.py b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py new file mode 100644 index 00000000000..3fd4b46ea28 --- /dev/null +++ b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py @@ -0,0 +1,18 @@ +from aws_lambda_powertools.event_handler import LambdaFunctionUrlResolver, Response, content_types + +app = LambdaFunctionUrlResolver() + + +@app.get("/todos") +def hello(): + return Response( + status_code=200, + content_type=content_types.TEXT_PLAIN, + body="Hello world", + cookies=["CookieMonster", "MonsterCookie"], + headers={"Foo": ["bar", "zbr"]}, + ) + + +def lambda_handler(event, context): + return app.resolve(event, context) diff --git a/tests/e2e/event_handler/infrastructure.py b/tests/e2e/event_handler/infrastructure.py new file mode 100644 index 00000000000..62421b8aac9 --- /dev/null +++ b/tests/e2e/event_handler/infrastructure.py @@ -0,0 +1,81 @@ +from pathlib import Path +from typing import Dict, Optional + +from aws_cdk import CfnOutput +from aws_cdk import aws_apigateway as apigwv1 +from aws_cdk import aws_apigatewayv2_alpha as apigwv2 +from aws_cdk import aws_apigatewayv2_integrations_alpha as apigwv2integrations +from aws_cdk import aws_ec2 as ec2 +from aws_cdk import aws_elasticloadbalancingv2 as elbv2 +from aws_cdk import aws_elasticloadbalancingv2_targets as targets +from aws_cdk.aws_lambda import Function, FunctionUrlAuthType + +from tests.e2e.utils.infrastructure import BaseInfrastructure + + +class EventHandlerStack(BaseInfrastructure): + FEATURE_NAME = "event-handlers" + + def __init__(self, handlers_dir: Path, feature_name: str = FEATURE_NAME, layer_arn: str = "") -> None: + super().__init__(feature_name, handlers_dir, layer_arn) + + def create_resources(self): + functions = self.create_lambda_functions() + + self._create_alb(function=functions["AlbHandler"]) + self._create_api_gateway_rest(function=functions["ApiGatewayRestHandler"]) + self._create_api_gateway_http(function=functions["ApiGatewayHttpHandler"]) + self._create_lambda_function_url(function=functions["LambdaFunctionUrlHandler"]) + + def _create_alb(self, function: Function): + vpc = ec2.Vpc(self.stack, "EventHandlerVPC", max_azs=2) + + alb = elbv2.ApplicationLoadBalancer(self.stack, "ALB", vpc=vpc, internet_facing=True) + CfnOutput(self.stack, "ALBDnsName", value=alb.load_balancer_dns_name) + + self._create_alb_listener(alb=alb, name="Basic", port=80, function=function) + self._create_alb_listener( + alb=alb, + name="MultiValueHeader", + port=8080, + function=function, + attributes={"lambda.multi_value_headers.enabled": "true"}, + ) + + def _create_alb_listener( + self, + alb: elbv2.ApplicationLoadBalancer, + name: str, + port: int, + function: Function, + attributes: Optional[Dict[str, str]] = None, + ): + listener = alb.add_listener(name, port=port, protocol=elbv2.ApplicationProtocol.HTTP) + target = listener.add_targets(f"ALB{name}Target", targets=[targets.LambdaTarget(function)]) + if attributes is not None: + for key, value in attributes.items(): + target.set_attribute(key, value) + CfnOutput(self.stack, f"ALB{name}ListenerPort", value=str(port)) + + def _create_api_gateway_http(self, function: Function): + apigw = apigwv2.HttpApi(self.stack, "APIGatewayHTTP", create_default_stage=True) + apigw.add_routes( + path="/todos", + methods=[apigwv2.HttpMethod.GET], + integration=apigwv2integrations.HttpLambdaIntegration("TodosIntegration", function), + ) + + CfnOutput(self.stack, "APIGatewayHTTPUrl", value=(apigw.url or "")) + + def _create_api_gateway_rest(self, function: Function): + apigw = apigwv1.RestApi(self.stack, "APIGatewayRest", deploy_options=apigwv1.StageOptions(stage_name="dev")) + + todos = apigw.root.add_resource("todos") + todos.add_method("GET", apigwv1.LambdaIntegration(function, proxy=True)) + + CfnOutput(self.stack, "APIGatewayRestUrl", value=apigw.url) + + def _create_lambda_function_url(self, function: Function): + # Maintenance: move auth to IAM when we create sigv4 builders + function_url = function.add_function_url(auth_type=FunctionUrlAuthType.NONE) + CfnOutput(self.stack, "LambdaFunctionUrl", value=function_url.url) diff --git a/tests/e2e/event_handler/test_header_serializer.py b/tests/e2e/event_handler/test_header_serializer.py new file mode 100644 index 00000000000..2b1d51bfb3d --- /dev/null +++ b/tests/e2e/event_handler/test_header_serializer.py @@ -0,0 +1,141 @@ +import pytest +from requests import Request + +from tests.e2e.utils import data_fetcher + + +@pytest.fixture +def alb_basic_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBBasicListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def alb_multi_value_header_listener_endpoint(infrastructure: dict) -> str: + dns_name = infrastructure.get("ALBDnsName") + port = infrastructure.get("ALBMultiValueHeaderListenerPort", "") + return f"http://{dns_name}:{port}" + + +@pytest.fixture +def apigw_rest_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayRestUrl", "") + + +@pytest.fixture +def apigw_http_endpoint(infrastructure: dict) -> str: + return infrastructure.get("APIGatewayHTTPUrl", "") + + +@pytest.fixture +def lambda_function_url_endpoint(infrastructure: dict) -> str: + return infrastructure.get("LambdaFunctionUrl", "") + + +def test_alb_headers_serializer(alb_basic_listener_endpoint): + # GIVEN + url = f"{alb_basic_listener_endpoint}/todos" + + # WHEN + response = data_fetcher.get_http_response(Request(method="GET", url=url)) + + # THEN + assert response.status_code == 200 + assert response.content == b"Hello world" + assert response.headers["content-type"] == "text/plain" + + # Only the last header for key "Foo" should be set + assert response.headers["Foo"] == "zbr" + + # Only the last cookie should be set + assert "MonsterCookie" in response.cookies.keys() + assert "CookieMonster" not in response.cookies.keys() + + +def test_alb_multi_value_headers_serializer(alb_multi_value_header_listener_endpoint): + # GIVEN + url = f"{alb_multi_value_header_listener_endpoint}/todos" + + # WHEN + response = data_fetcher.get_http_response(Request(method="GET", url=url)) + + # THEN + assert response.status_code == 200 + assert response.content == b"Hello world" + assert response.headers["content-type"] == "text/plain" + + # Only the last header for key "Foo" should be set + assert "Foo" in response.headers + foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] + assert sorted(foo_headers) == ["bar", "zbr"] + + # Only the last cookie should be set + assert "MonsterCookie" in response.cookies.keys() + assert "CookieMonster" in response.cookies.keys() + + +def test_api_gateway_rest_headers_serializer(apigw_rest_endpoint): + # GIVEN + url = f"{apigw_rest_endpoint}/todos" + + # WHEN + response = data_fetcher.get_http_response(Request(method="GET", url=url)) + + # THEN + assert response.status_code == 200 + assert response.content == b"Hello world" + assert response.headers["content-type"] == "text/plain" + + # Only the last header for key "Foo" should be set + assert "Foo" in response.headers + foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] + assert sorted(foo_headers) == ["bar", "zbr"] + + # Only the last cookie should be set + assert "MonsterCookie" in response.cookies.keys() + assert "CookieMonster" in response.cookies.keys() + + +def test_api_gateway_http_headers_serializer(apigw_http_endpoint): + # GIVEN + url = f"{apigw_http_endpoint}/todos" + + # WHEN + response = data_fetcher.get_http_response(Request(method="GET", url=url)) + + # THEN + assert response.status_code == 200 + assert response.content == b"Hello world" + assert response.headers["content-type"] == "text/plain" + + # Only the last header for key "Foo" should be set + assert "Foo" in response.headers + foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] + assert sorted(foo_headers) == ["bar", "zbr"] + + # Only the last cookie should be set + assert "MonsterCookie" in response.cookies.keys() + assert "CookieMonster" in response.cookies.keys() + + +def test_lambda_function_url_headers_serializer(lambda_function_url_endpoint): + # GIVEN + url = f"{lambda_function_url_endpoint}todos" # the function url endpoint already has the trailing / + + # WHEN + response = data_fetcher.get_http_response(Request(method="GET", url=url)) + + # THEN + assert response.status_code == 200 + assert response.content == b"Hello world" + assert response.headers["content-type"] == "text/plain" + + # Only the last header for key "Foo" should be set + assert "Foo" in response.headers + foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] + assert sorted(foo_headers) == ["bar", "zbr"] + + # Only the last cookie should be set + assert "MonsterCookie" in response.cookies.keys() + assert "CookieMonster" in response.cookies.keys() diff --git a/tests/e2e/utils/data_fetcher/__init__.py b/tests/e2e/utils/data_fetcher/__init__.py index 43024f9946f..be6909537e5 100644 --- a/tests/e2e/utils/data_fetcher/__init__.py +++ b/tests/e2e/utils/data_fetcher/__init__.py @@ -1,4 +1,4 @@ -from tests.e2e.utils.data_fetcher.common import get_lambda_response +from tests.e2e.utils.data_fetcher.common import get_http_response, get_lambda_response from tests.e2e.utils.data_fetcher.logs import get_logs from tests.e2e.utils.data_fetcher.metrics import get_metrics from tests.e2e.utils.data_fetcher.traces import get_traces diff --git a/tests/e2e/utils/data_fetcher/common.py b/tests/e2e/utils/data_fetcher/common.py index 2de8838dc74..29f97eab2de 100644 --- a/tests/e2e/utils/data_fetcher/common.py +++ b/tests/e2e/utils/data_fetcher/common.py @@ -2,8 +2,12 @@ from typing import Optional, Tuple import boto3 +import requests as requests from mypy_boto3_lambda import LambdaClient from mypy_boto3_lambda.type_defs import InvocationResponseTypeDef +from requests import Request, Response +from requests.exceptions import RequestException +from retry import retry def get_lambda_response( @@ -13,3 +17,11 @@ def get_lambda_response( payload = payload or "" execution_time = datetime.utcnow() return client.invoke(FunctionName=lambda_arn, InvocationType="RequestResponse", Payload=payload), execution_time + + +@retry(RequestException, delay=2, jitter=1.5, tries=5) +def get_http_response(request: Request) -> Response: + session = requests.Session() + result = session.send(request.prepare()) + result.raise_for_status() + return result diff --git a/tests/e2e/utils/infrastructure.py b/tests/e2e/utils/infrastructure.py index cddd6844504..6a1aa0b86ce 100644 --- a/tests/e2e/utils/infrastructure.py +++ b/tests/e2e/utils/infrastructure.py @@ -15,9 +15,11 @@ from filelock import FileLock from mypy_boto3_cloudformation import CloudFormationClient +from aws_lambda_powertools import PACKAGE_PATH from tests.e2e.utils.asset import Assets PYTHON_RUNTIME_VERSION = f"V{''.join(map(str, sys.version_info[:2]))}" +SOURCE_CODE_ROOT_PATH = PACKAGE_PATH.parent logger = logging.getLogger(__name__) @@ -48,7 +50,7 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - # NOTE: Investigate why cdk.Environment in Stack # changes synthesized asset (no object_key in asset manifest) - self.app = App() + self.app = App(outdir=str(SOURCE_CODE_ROOT_PATH / ".cdk")) self.stack = Stack(self.app, self.stack_name) self.session = boto3.Session() self.cfn: CloudFormationClient = self.session.client("cloudformation") @@ -57,7 +59,7 @@ def __init__(self, feature_name: str, handlers_dir: Path, layer_arn: str = "") - self.account_id = self.session.client("sts").get_caller_identity()["Account"] self.region = self.session.region_name - def create_lambda_functions(self, function_props: Optional[Dict] = None): + def create_lambda_functions(self, function_props: Optional[Dict] = None) -> Dict[str, Function]: """Create Lambda functions available under handlers_dir It creates CloudFormation Outputs for every function found in PascalCase. For example, @@ -69,6 +71,11 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): function_props: Optional[Dict] Dictionary representing CDK Lambda FunctionProps to override defaults + Returns + ------- + output: Dict[str, Function] + A dict with PascalCased function names and the corresponding CDK Function object + Examples -------- @@ -97,6 +104,8 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): layer = LayerVersion.from_layer_version_arn(self.stack, "layer-arn", layer_version_arn=self.layer_arn) function_settings_override = function_props or {} + output: Dict[str, Function] = {} + for fn in handlers: fn_name = fn.stem fn_name_pascal_case = fn_name.title().replace("_", "") # basic_handler -> BasicHandler @@ -124,6 +133,10 @@ def create_lambda_functions(self, function_props: Optional[Dict] = None): # CFN Outputs only support hyphen hence pascal case self.add_cfn_output(name=fn_name_pascal_case, value=function.function_name, arn=function.function_arn) + output[fn_name_pascal_case] = function + + return output + def deploy(self) -> Dict[str, str]: """Creates CloudFormation Stack and return stack outputs as dict @@ -287,7 +300,7 @@ def _create_layer(self) -> str: layer_version_name="aws-lambda-powertools-e2e-test", compatible_runtimes=[PythonVersion[PYTHON_RUNTIME_VERSION].value["runtime"]], code=Code.from_asset( - path=".", + path=str(SOURCE_CODE_ROOT_PATH), bundling=BundlingOptions( image=DockerImage.from_build( str(Path(__file__).parent), diff --git a/tests/events/albMultiValueHeadersEvent.json b/tests/events/albMultiValueHeadersEvent.json new file mode 100644 index 00000000000..6b34709605c --- /dev/null +++ b/tests/events/albMultiValueHeadersEvent.json @@ -0,0 +1,35 @@ +{ + "requestContext": { + "elb": { + "targetGroupArn": "arn:aws:elasticloadbalancing:eu-central-1:1234567890:targetgroup/alb-c-Targe-11GDXTPQ7663S/804a67588bfdc10f" + } + }, + "httpMethod": "GET", + "path": "/todos", + "multiValueQueryStringParameters": {}, + "multiValueHeaders": { + "accept": [ + "*/*" + ], + "host": [ + "alb-c-LoadB-14POFKYCLBNSF-1815800096.eu-central-1.elb.amazonaws.com" + ], + "user-agent": [ + "curl/7.79.1" + ], + "x-amzn-trace-id": [ + "Root=1-62fa9327-21cdd4da4c6db451490a5fb7" + ], + "x-forwarded-for": [ + "123.123.123.123" + ], + "x-forwarded-port": [ + "80" + ], + "x-forwarded-proto": [ + "http" + ] + }, + "body": "", + "isBase64Encoded": false +} diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index 4b1d7c1ee32..125a0f8c147 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -92,7 +92,26 @@ def get_lambda() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEvent assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] + + +def test_api_gateway_v1_cookies(): + # GIVEN a Http API V1 proxy type event + app = APIGatewayRestResolver() + cookie = "CookieMonster" + + @app.get("/my/path") + def get_lambda() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEvent) + return Response(200, content_types.TEXT_PLAIN, "Hello world", cookies=[cookie]) + + # WHEN calling the event handler + result = app(LOAD_GW_EVENT, {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEvent + assert result["statusCode"] == 200 + assert result["multiValueHeaders"]["Set-Cookie"] == [cookie] def test_api_gateway(): @@ -110,7 +129,7 @@ def get_lambda() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEvent assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML] assert result["body"] == "foo" @@ -132,9 +151,30 @@ def my_path() -> Response: # AND set the current_event type as APIGatewayProxyEventV2 assert result["statusCode"] == 200 assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert "Cookies" not in result["headers"] assert result["body"] == "tom" +def test_api_gateway_v2_cookies(): + # GIVEN a Http API V2 proxy type event + app = APIGatewayHttpResolver() + cookie = "CookieMonster" + + @app.post("/my/path") + def my_path() -> Response: + assert isinstance(app.current_event, APIGatewayProxyEventV2) + return Response(200, content_types.TEXT_PLAIN, "Hello world", cookies=[cookie]) + + # WHEN calling the event handler + result = app(load_event("apiGatewayProxyV2Event.json"), {}) + + # THEN process event correctly + # AND set the current_event type as APIGatewayProxyEventV2 + assert result["statusCode"] == 200 + assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert result["cookies"] == [cookie] + + def test_include_rule_matching(): # GIVEN app = ApiGatewayResolver() @@ -149,7 +189,7 @@ def get_lambda(my_id: str, name: str) -> Response: # THEN assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML] assert result["body"] == "path" @@ -200,7 +240,7 @@ def handler(event, context): result = handler(LOAD_GW_EVENT, None) assert result["statusCode"] == 404 # AND cors headers are not returned - assert "Access-Control-Allow-Origin" not in result["headers"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] def test_cors(): @@ -223,17 +263,17 @@ def handler(event, context): result = handler(LOAD_GW_EVENT, None) # THEN the headers should include cors headers - assert "headers" in result - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert headers["Access-Control-Allow-Origin"] == "*" + assert "multiValueHeaders" in result + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert headers["Access-Control-Allow-Origin"] == ["*"] assert "Access-Control-Allow-Credentials" not in headers - assert headers["Access-Control-Allow-Headers"] == ",".join(sorted(CORSConfig._REQUIRED_HEADERS)) + assert headers["Access-Control-Allow-Headers"] == [",".join(sorted(CORSConfig._REQUIRED_HEADERS))] # THEN for routes without cors flag return no cors headers mock_event = {"path": "/my/request", "httpMethod": "GET"} result = handler(mock_event, None) - assert "Access-Control-Allow-Origin" not in result["headers"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] def test_cors_preflight_body_is_empty_not_null(): @@ -272,8 +312,8 @@ def handler(event, context): assert isinstance(body, str) decompress = zlib.decompress(base64.b64decode(body), wbits=zlib.MAX_WBITS | 16).decode("UTF-8") assert decompress == expected_value - headers = result["headers"] - assert headers["Content-Encoding"] == "gzip" + headers = result["multiValueHeaders"] + assert headers["Content-Encoding"] == ["gzip"] def test_base64_encode(): @@ -292,8 +332,8 @@ def read_image() -> Response: assert result["isBase64Encoded"] is True body = result["body"] assert isinstance(body, str) - headers = result["headers"] - assert headers["Content-Encoding"] == "gzip" + headers = result["multiValueHeaders"] + assert headers["Content-Encoding"] == ["gzip"] def test_compress_no_accept_encoding(): @@ -348,9 +388,9 @@ def handler(event, context): result = handler({"path": "/success", "httpMethod": "GET"}, None) # THEN return the set Cache-Control - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert headers["Cache-Control"] == "max-age=600" + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert headers["Cache-Control"] == ["max-age=600"] def test_cache_control_non_200(): @@ -369,9 +409,9 @@ def handler(event, context): result = handler({"path": "/fails", "httpMethod": "DELETE"}, None) # THEN return a Cache-Control of "no-cache" - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert headers["Cache-Control"] == "no-cache" + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert headers["Cache-Control"] == ["no-cache"] def test_rest_api(): @@ -388,7 +428,7 @@ def rest_func() -> Dict: # THEN automatically process this as a json rest api response assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected_str = json.dumps(expected_dict, separators=(",", ":"), indent=None, cls=Encoder) assert result["body"] == expected_str @@ -403,7 +443,7 @@ def rest_func() -> Response: status_code=404, content_type="used-if-not-set-in-header", body="Not found", - headers={"Content-Type": "header-content-type-wins", "custom": "value"}, + headers={"Content-Type": ["header-content-type-wins"], "custom": ["value"]}, ) # WHEN calling the event handler @@ -411,8 +451,8 @@ def rest_func() -> Response: # THEN the result can include some additional field control like overriding http headers assert result["statusCode"] == 404 - assert result["headers"]["Content-Type"] == "header-content-type-wins" - assert result["headers"]["custom"] == "value" + assert result["multiValueHeaders"]["Content-Type"] == ["header-content-type-wins"] + assert result["multiValueHeaders"]["custom"] == ["value"] assert result["body"] == "Not found" @@ -441,16 +481,16 @@ def another_one(): result = app(event, None) # THEN routes by default return the custom cors headers - assert "headers" in result - headers = result["headers"] - assert headers["Content-Type"] == content_types.APPLICATION_JSON - assert headers["Access-Control-Allow-Origin"] == cors_config.allow_origin - expected_allows_headers = ",".join(sorted(set(allow_header + cors_config._REQUIRED_HEADERS))) + assert "multiValueHeaders" in result + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.APPLICATION_JSON] + assert headers["Access-Control-Allow-Origin"] == [cors_config.allow_origin] + expected_allows_headers = [",".join(sorted(set(allow_header + cors_config._REQUIRED_HEADERS)))] assert headers["Access-Control-Allow-Headers"] == expected_allows_headers - assert headers["Access-Control-Expose-Headers"] == ",".join(cors_config.expose_headers) - assert headers["Access-Control-Max-Age"] == str(cors_config.max_age) + assert headers["Access-Control-Expose-Headers"] == [",".join(cors_config.expose_headers)] + assert headers["Access-Control-Max-Age"] == [str(cors_config.max_age)] assert "Access-Control-Allow-Credentials" in headers - assert headers["Access-Control-Allow-Credentials"] == "true" + assert headers["Access-Control-Allow-Credentials"] == ["true"] # AND custom cors was set on the app assert isinstance(app._cors, CORSConfig) @@ -459,7 +499,7 @@ def another_one(): # AND routes without cors don't include "Access-Control" headers event = {"path": "/another-one", "httpMethod": "GET"} result = app(event, None) - headers = result["headers"] + headers = result["multiValueHeaders"] assert "Access-Control-Allow-Origin" not in headers @@ -474,7 +514,7 @@ def test_no_content_response(): # THEN return an None body and no Content-Type header assert result["statusCode"] == response.status_code assert result["body"] is None - headers = result["headers"] + headers = result["multiValueHeaders"] assert "Content-Type" not in headers @@ -489,7 +529,7 @@ def test_no_matches_with_cors(): # THEN return a 404 # AND cors headers are returned assert result["statusCode"] == 404 - assert "Access-Control-Allow-Origin" in result["headers"] + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] assert "Not found" in result["body"] @@ -517,10 +557,10 @@ def post_no_cors(): # AND include Access-Control-Allow-Methods of the cors methods used assert result["statusCode"] == 204 assert result["body"] == "" - headers = result["headers"] + headers = result["multiValueHeaders"] assert "Content-Type" not in headers - assert "Access-Control-Allow-Origin" in result["headers"] - assert headers["Access-Control-Allow-Methods"] == "DELETE,GET,OPTIONS" + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] + assert headers["Access-Control-Allow-Methods"] == [",".join(sorted(["DELETE", "GET", "OPTIONS"]))] def test_custom_preflight_response(): @@ -535,7 +575,7 @@ def custom_preflight(): status_code=200, content_type=content_types.TEXT_HTML, body="Foo", - headers={"Access-Control-Allow-Methods": "CUSTOM"}, + headers={"Access-Control-Allow-Methods": ["CUSTOM"]}, ) @app.route(method="CUSTOM", rule="/some-call", cors=True) @@ -548,10 +588,10 @@ def custom_method(): # THEN return the custom preflight response assert result["statusCode"] == 200 assert result["body"] == "Foo" - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_HTML - assert "Access-Control-Allow-Origin" in result["headers"] - assert headers["Access-Control-Allow-Methods"] == "CUSTOM" + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_HTML] + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] + assert headers["Access-Control-Allow-Methods"] == ["CUSTOM"] def test_service_error_responses(json_dump): @@ -569,7 +609,7 @@ def bad_request_error(): # THEN return the bad request error response # AND status code equals 400 assert result["statusCode"] == 400 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 400, "message": "Missing required parameter"} assert result["body"] == json_dump(expected) @@ -584,7 +624,7 @@ def unauthorized_error(): # THEN return the unauthorized error response # AND status code equals 401 assert result["statusCode"] == 401 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 401, "message": "Unauthorized"} assert result["body"] == json_dump(expected) @@ -599,7 +639,7 @@ def not_found_error(): # THEN return the not found error response # AND status code equals 404 assert result["statusCode"] == 404 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 404, "message": "Not found"} assert result["body"] == json_dump(expected) @@ -614,7 +654,7 @@ def internal_server_error(): # THEN return the internal server error response # AND status code equals 500 assert result["statusCode"] == 500 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 500, "message": "Internal server error"} assert result["body"] == json_dump(expected) @@ -629,8 +669,8 @@ def service_error(): # THEN return the service error response # AND status code equals 502 assert result["statusCode"] == 502 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON - assert "Access-Control-Allow-Origin" in result["headers"] + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] + assert "Access-Control-Allow-Origin" in result["multiValueHeaders"] expected = {"statusCode": 502, "message": "Something went wrong!"} assert result["body"] == json_dump(expected) @@ -653,8 +693,8 @@ def raises_error(): # AND include the exception traceback in the response assert result["statusCode"] == 500 assert "Traceback (most recent call last)" in result["body"] - headers = result["headers"] - assert headers["Content-Type"] == content_types.TEXT_PLAIN + headers = result["multiValueHeaders"] + assert headers["Content-Type"] == [content_types.TEXT_PLAIN] def test_debug_unhandled_exceptions_debug_off(): @@ -941,7 +981,7 @@ def base(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router(): @@ -959,7 +999,7 @@ def foo(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_params(): @@ -985,7 +1025,7 @@ def foo(account_id): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_prefix(): @@ -1004,7 +1044,7 @@ def foo(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_prefix_equals_path(): @@ -1024,7 +1064,7 @@ def foo(): # THEN process event correctly assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_with_different_methods(): @@ -1074,7 +1114,7 @@ def patch_func(): result = app(LOAD_GW_EVENT, None) assert result["statusCode"] == 404 # AND cors headers are not returned - assert "Access-Control-Allow-Origin" not in result["headers"] + assert "Access-Control-Allow-Origin" not in result["multiValueHeaders"] def test_duplicate_routes(): @@ -1133,11 +1173,11 @@ def foo(account_id): # THEN events are processed correctly assert get_result["statusCode"] == 200 - assert get_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert get_result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] assert post_result["statusCode"] == 200 - assert post_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert post_result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] assert put_result["statusCode"] == 404 - assert put_result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert put_result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_api_gateway_app_router_access_to_resolver(): @@ -1156,7 +1196,7 @@ def foo(): result = app(LOAD_GW_EVENT, {}) assert result["statusCode"] == 200 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] def test_exception_handler(): @@ -1182,7 +1222,7 @@ def get_lambda() -> Response: # THEN call the exception_handler assert result["statusCode"] == 418 - assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_HTML] assert result["body"] == "Foo!" @@ -1209,7 +1249,7 @@ def get_lambda() -> Response: # THEN call the exception_handler assert result["statusCode"] == 500 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] assert result["body"] == "CUSTOM ERROR FORMAT" @@ -1228,7 +1268,7 @@ def handle_not_found(exc: NotFoundError) -> Response: # THEN call the exception_handler assert result["statusCode"] == 404 - assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN + assert result["multiValueHeaders"]["Content-Type"] == [content_types.TEXT_PLAIN] assert result["body"] == "I am a teapot!" @@ -1266,7 +1306,7 @@ def get_lambda() -> Response: # THEN call the exception_handler assert result["statusCode"] == 400 - assert result["headers"]["Content-Type"] == content_types.APPLICATION_JSON + assert result["multiValueHeaders"]["Content-Type"] == [content_types.APPLICATION_JSON] expected = {"statusCode": 400, "message": "Bad request"} assert result["body"] == json_dump(expected) diff --git a/tests/functional/event_handler/test_lambda_function_url.py b/tests/functional/event_handler/test_lambda_function_url.py index dc00c535580..ae0a231d46b 100644 --- a/tests/functional/event_handler/test_lambda_function_url.py +++ b/tests/functional/event_handler/test_lambda_function_url.py @@ -21,9 +21,30 @@ def foo(): # AND set the current_event type as LambdaFunctionUrlEvent assert result["statusCode"] == 200 assert result["headers"]["Content-Type"] == content_types.TEXT_HTML + assert "Cookies" not in result["headers"] assert result["body"] == "foo" +def test_lambda_function_url_event_with_cookies(): + # GIVEN a Lambda Function Url type event + app = LambdaFunctionUrlResolver() + cookie = "CookieMonster" + + @app.get("/") + def foo(): + assert isinstance(app.current_event, LambdaFunctionUrlEvent) + assert app.lambda_context == {} + return Response(200, content_types.TEXT_PLAIN, "foo", cookies=[cookie]) + + # WHEN calling the event handler + result = app(load_event("lambdaFunctionUrlEvent.json"), {}) + + # THEN process event correctly + # AND set the current_event type as LambdaFunctionUrlEvent + assert result["statusCode"] == 200 + assert result["cookies"] == [cookie] + + def test_lambda_function_url_no_matches(): # GIVEN a Lambda Function Url type event app = LambdaFunctionUrlResolver() diff --git a/tests/functional/test_headers_serializer.py b/tests/functional/test_headers_serializer.py new file mode 100644 index 00000000000..8a27ce8baa8 --- /dev/null +++ b/tests/functional/test_headers_serializer.py @@ -0,0 +1,147 @@ +from collections import defaultdict + +import pytest + +from aws_lambda_powertools.shared.headers_serializer import ( + HttpApiHeadersSerializer, + MultiValueHeadersSerializer, + SingleValueHeadersSerializer, +) + + +def test_http_api_headers_serializer(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + header_values = ["bar", "zbr"] + headers = {"Foo": header_values} + + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers=headers, cookies=cookies) + + assert payload["cookies"] == cookies + assert payload["headers"]["Foo"] == ", ".join(header_values) + + +def test_http_api_headers_serializer_with_empty_values(): + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[]) + assert payload == {"headers": {}, "cookies": []} + + +def test_http_api_headers_serializer_with_headers_only(): + content_type = "text/html" + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": [content_type]}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_http_api_headers_serializer_with_single_headers_only(): + content_type = "text/html" + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": content_type}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_http_api_headers_serializer_with_cookies_only(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + serializer = HttpApiHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=cookies) + assert payload["cookies"] == cookies + + +def test_multi_value_headers_serializer(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + header_values = ["bar", "zbr"] + headers = {"Foo": header_values} + + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers=headers, cookies=cookies) + + assert payload["multiValueHeaders"]["Set-Cookie"] == cookies + assert payload["multiValueHeaders"]["Foo"] == header_values + + +def test_multi_value_headers_serializer_with_headers_only(): + content_type = "text/html" + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": [content_type]}, cookies=[]) + assert payload["multiValueHeaders"]["Content-Type"] == [content_type] + + +def test_multi_value_headers_serializer_with_single_headers_only(): + content_type = "text/html" + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": content_type}, cookies=[]) + assert payload["multiValueHeaders"]["Content-Type"] == [content_type] + + +def test_multi_value_headers_serializer_with_cookies_only(): + cookie = "UUID=12345" + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[cookie]) + assert payload["multiValueHeaders"]["Set-Cookie"] == [cookie] + + +def test_multi_value_headers_serializer_with_empty_values(): + serializer = MultiValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[]) + assert payload["multiValueHeaders"] == defaultdict(list) + + +def test_single_value_headers_serializer(): + cookie = "UUID=12345" + content_type = "text/html" + headers = {"Content-Type": [content_type]} + + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers=headers, cookies=[cookie]) + assert payload["headers"]["Content-Type"] == content_type + assert payload["headers"]["Set-Cookie"] == cookie + + +def test_single_value_headers_serializer_with_headers_only(): + content_type = "text/html" + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": [content_type]}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_single_value_headers_serializer_with_single_headers_only(): + content_type = "text/html" + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={"Content-Type": content_type}, cookies=[]) + assert payload["headers"]["Content-Type"] == content_type + + +def test_single_value_headers_serializer_with_cookies_only(): + cookie = "UUID=12345" + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[cookie]) + assert payload["headers"] == {"Set-Cookie": cookie} + + +def test_single_value_headers_serializer_with_empty_values(): + serializer = SingleValueHeadersSerializer() + payload = serializer.serialize(headers={}, cookies=[]) + assert payload["headers"] == {} + + +def test_single_value_headers_with_multiple_cookies_warning(): + cookies = ["UUID=12345", "SSID=0xdeadbeef"] + warning_message = "Can't encode more than one cookie in the response. Sending the last cookie only." + serializer = SingleValueHeadersSerializer() + + with pytest.warns(match=warning_message): + payload = serializer.serialize(cookies=cookies, headers={}) + + assert payload["headers"]["Set-Cookie"] == cookies[-1] + + +def test_single_value_headers_with_multiple_header_values_warning(): + headers = {"Foo": ["bar", "zbr"]} + warning_message = "Can't encode more than one header value for the same key." + serializer = SingleValueHeadersSerializer() + + with pytest.warns(match=warning_message): + payload = serializer.serialize(cookies=[], headers=headers) + + assert payload["headers"]["Foo"] == headers["Foo"][-1] From e62b6cc273c71d3a4eef56dcc0c4f92f60a025f3 Mon Sep 17 00:00:00 2001 From: heitorlessa Date: Mon, 29 Aug 2022 18:00:47 +0200 Subject: [PATCH 35/49] docs(homepage): note about v2 version --- docs/index.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/index.md b/docs/index.md index 78b76c7bed6..1c6c4e80995 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,6 +5,9 @@ description: AWS Lambda Powertools for Python +???+ danger + This documentation is for v2 that is not yet released. + A suite of utilities for AWS Lambda functions to ease adopting best practices such as tracing, structured logging, custom metrics, idempotency, batching, and more. ???+ note From db4a594463d0dff06d3a06df4bf9dd6ca55ff83e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=BAben=20Fonseca?= Date: Wed, 31 Aug 2022 10:26:49 +0200 Subject: [PATCH 36/49] fix(event_handler): fix bug with previous array implementation --- aws_lambda_powertools/event_handler/api_gateway.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 11adcfc2ed6..2d315fcc434 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -206,7 +206,7 @@ def _add_cache_control(self, cache_control: str): def _compress(self): """Compress the response body, but only if `Accept-Encoding` headers includes gzip.""" - self.response.headers["Content-Encoding"].append("gzip") + self.response.headers["Content-Encoding"] = "gzip" if isinstance(self.response.body, str): logger.debug("Converting string response to bytes before compressing it") self.response.body = bytes(self.response.body, "utf-8") From 0678af2171ce4396c93ba44825764acf21026616 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=BAben=20Fonseca?= Date: Wed, 31 Aug 2022 10:55:09 +0200 Subject: [PATCH 37/49] chore(bandit): update baseline --- bandit.baseline | 1871 ++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 1694 insertions(+), 177 deletions(-) diff --git a/bandit.baseline b/bandit.baseline index 2421c4f1e24..144682ffccb 100644 --- a/bandit.baseline +++ b/bandit.baseline @@ -1,226 +1,1743 @@ { "errors": [], - "generated_at": "2020-05-12T08:59:59Z", + "generated_at": "2022-08-31T08:51:10Z", "metrics": { "_totals": { - "CONFIDENCE.HIGH": 1.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 1.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 1375, - "nosec": 0 + "CONFIDENCE.HIGH": 1, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 11903, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 6, - "nosec": 0 - }, - "aws_lambda_powertools/helper/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 2, - "nosec": 0 - }, - "aws_lambda_powertools/helper/models.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 108, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 23, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/api_gateway.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 645, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/appsync.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 140, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/content_types.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/cookies.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 58, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 30, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/event_handler/lambda_function_url.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 40, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/exceptions/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/logging/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 5, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 4, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/correlation_paths.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/logging/exceptions.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, "loc": 2, - "nosec": 0 + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/filters.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/formatter.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 220, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/lambda_context.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 47, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/logging/logger.py": { - "CONFIDENCE.HIGH": 1.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 1.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 375, - "nosec": 0 + "CONFIDENCE.HIGH": 1, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 366, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/logging/utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 58, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 15, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/base.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 162, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 241, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/exceptions.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 12, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/metric.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 90, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 95, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/metrics/metrics.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 82, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 170, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/middleware_factory/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, "loc": 3, - "nosec": 0 + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/middleware_factory/exceptions.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 3, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/middleware_factory/factory.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 103, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 106, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/package_logger.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/cache_dict.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/constants.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 24, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/functions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 46, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/headers_serializer.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 85, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/json_encoder.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 13, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/lazy_import.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 24, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/shared/types.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/tracing/__init__.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, - "loc": 6, - "nosec": 0 + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/tracing/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 117, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/tracing/extensions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 11, + "nosec": 0, + "skipped_tests": 0 }, "aws_lambda_powertools/tracing/tracer.py": { - "CONFIDENCE.HIGH": 0.0, - "CONFIDENCE.LOW": 0.0, - "CONFIDENCE.MEDIUM": 0.0, - "CONFIDENCE.UNDEFINED": 0.0, - "SEVERITY.HIGH": 0.0, - "SEVERITY.LOW": 0.0, - "SEVERITY.MEDIUM": 0.0, - "SEVERITY.UNDEFINED": 0.0, + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 641, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 1, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 24, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 333, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 33, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/batch/sqs.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 204, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 36, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/active_mq_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 100, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/alb_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 32, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/api_gateway_authorizer_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 439, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/api_gateway_proxy_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 203, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync/scalar_types_utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 73, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync_authorizer_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 89, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/appsync_resolver_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 178, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/cloud_watch_logs_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 77, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/code_pipeline_job_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 185, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/cognito_user_pool_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 624, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/common.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 316, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/connect_contact_flow_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 128, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/dynamo_db_stream_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 237, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/event_bridge_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 53, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/event_source.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 31, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/kinesis_stream_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 77, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/lambda_function_url_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/rabbit_mq_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 93, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/s3_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 143, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/s3_object_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 256, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/ses_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 202, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/sns_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 91, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/data_classes/sqs_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 111, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/appconfig.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 92, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 50, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 8, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/feature_flags.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 247, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/feature_flags/schema.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 216, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 7, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 177, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/config.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 48, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 35, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/idempotency.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 128, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/persistence/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/persistence/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, "loc": 401, - "nosec": 0 + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/idempotency/persistence/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 201, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/jmespath_utils/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 56, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/jmespath_utils/envelopes.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 8, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 23, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/appconfig.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 151, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 272, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 178, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 7, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/secrets.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 129, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parameters/ssm.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 277, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 17, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 23, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/apigw.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/apigwv2.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 42, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/cloudwatch.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 32, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 34, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/kinesis.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 35, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/lambda_function_url.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 25, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/sns.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 59, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/envelopes/sqs.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 30, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 4, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 100, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/alb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/apigw.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 77, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/apigwv2.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 55, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/cloudwatch.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 33, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/dynamodb.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 26, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/event_bridge.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 14, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/kinesis.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 31, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/lambda_function_url.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/s3.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 49, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/s3_object_event.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 40, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/ses.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 52, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/sns.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 36, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/models/sqs.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 32, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/parser.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 128, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/pydantic.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 1, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/parser/types.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 11, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_client_context.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 18, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_client_context_mobile_client.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 22, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_cognito_identity.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/typing/lambda_context.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 64, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 12, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 39, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/envelopes.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 53, + "nosec": 0, + "skipped_tests": 0 + }, + "aws_lambda_powertools/utilities/validation/validator.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 168, + "nosec": 0, + "skipped_tests": 0 } }, "results": [ { - "code": "369 try:\n370 if self.sampling_rate and random.random() <= float(self.sampling_rate):\n371 logger.debug(\"Setting log level to Debug due to sampling rate\")\n", + "code": "259 try:\n260 if self.sampling_rate and random.random() <= float(self.sampling_rate):\n261 logger.debug(\"Setting log level to Debug due to sampling rate\")\n", + "col_offset": 38, "filename": "aws_lambda_powertools/logging/logger.py", "issue_confidence": "HIGH", + "issue_cwe": { + "id": 330, + "link": "https://cwe.mitre.org/data/definitions/330.html" + }, "issue_severity": "LOW", "issue_text": "Standard pseudo-random generators are not suitable for security/cryptographic purposes.", - "line_number": 370, + "line_number": 260, "line_range": [ - 370 + 260 ], - "more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b311-random", + "more_info": "https://bandit.readthedocs.io/en/1.7.4/blacklists/blacklist_calls.html#b311-random", "test_id": "B311", "test_name": "blacklist" } ] -} +} \ No newline at end of file From b5f4ad9173ca694e495d9e78d9b452b2a06d0559 Mon Sep 17 00:00:00 2001 From: Release bot Date: Wed, 31 Aug 2022 09:49:53 +0000 Subject: [PATCH 38/49] update changelog with latest changes --- CHANGELOG.md | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 335e1cc733c..ef29bb20d79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,30 +6,47 @@ ## Bug Fixes +* **ci:** pass core fns to large pr workflow script +* **ci:** on_label permissioning model & workflow execution * **ci:** ensure PR_AUTHOR is present for large_pr_split workflow * **ci:** gracefully and successful exit changelog upon no changes * **ci:** event resolution for on_label_added workflow +* **event_handler:** fix bug with previous array implementation ## Documentation +* **homepage:** note about v2 version * **layer:** upgrade to 1.28.0 (v33) ## Features * **ci:** add actionlint in pre-commit hook +* **event_handler:** improved support for headers and cookies in v2 ([#1455](https://github.com/awslabs/aws-lambda-powertools-python/issues/1455)) * **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) ## Maintenance +* **bandit:** update baseline * **ci:** add missing description fields +* **ci:** limit E2E workflow run for source code change +* **ci:** create adhoc docs workflow for v2 +* **ci:** create adhoc docs workflow for v2 +* **ci:** create docs workflow for v2 +* **ci:** create reusable docs publishing workflow ([#1482](https://github.com/awslabs/aws-lambda-powertools-python/issues/1482)) +* **ci:** format comment on comment_large_pr script +* **ci:** add note for state persistence on comment_large_pr +* **ci:** destructure assignment on comment_large_pr +* **ci:** record pr details upon labeling +* **ci:** sync package version with pypi +* **ci:** remove unused and undeclared OS matrix env * **ci:** enable ci checks for v2 * **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) * **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) * **ci:** remove dangling debug step * **ci:** fix invalid dependency leftover -* **ci:** remove unused and undeclared OS matrix env -* **ci:** limit E2E workflow run for source code change +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) +* **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) * **maintainers:** update release workflow link From 8e00fd9bca1e7dae878dd1db243288763f7e3ae1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 06:53:08 +0200 Subject: [PATCH 39/49] chore(deps-dev): bump flake8-variables-names from 0.0.4 to 0.0.5 (#1490) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 281 ++++++++++++++++++++++++++++++++++++++++--------- pyproject.toml | 2 +- 2 files changed, 233 insertions(+), 50 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8c8ff2f1821..aae9d7357d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -437,11 +437,11 @@ test = ["pytest-cov"] [[package]] name = "flake8-variables-names" -version = "0.0.4" +version = "0.0.5" description = "A flake8 extension that helps to make more readable variables names" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "future" @@ -1387,10 +1387,12 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.7.4" -content-hash = "81514fdb005bee75315860470f16a71590648bbfac05872150530f3ee720181d" +content-hash = "e3d80709960638644c1d61cdcf0715bc684215ed48e1321eb29a32545cd32d11" [metadata.files] -atomicwrites = [] +atomicwrites = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] attrs = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, @@ -1407,7 +1409,10 @@ aws-cdk-lib = [ {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.39.1a0.tar.gz", hash = "sha256:67f7e38214466bd15438301828c0b210b08fc16ecf35781210cdda4eae3151e2"}, {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.39.1a0-py3-none-any.whl", hash = "sha256:29a46bad1fe1fd8d9c2356686636a0e83db9e4b6b24d8765f5024fc2988f8661"}, ] -aws-xray-sdk = [] +aws-xray-sdk = [ + {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, + {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"}, +] bandit = [ {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, @@ -1445,8 +1450,14 @@ botocore = [ {file = "botocore-1.27.63-py3-none-any.whl", hash = "sha256:8567dee549430a53210c6b898dea3a8fc8ee9d7934ec1df7545c547cacbb2b8f"}, {file = "botocore-1.27.63.tar.gz", hash = "sha256:b97e17c930a7f45b50f94956a4474c1cd7b828e3dcd8a84dd0e3306ca6189335"}, ] -cattrs = [] -certifi = [] +cattrs = [ + {file = "cattrs-22.1.0-py3-none-any.whl", hash = "sha256:d55c477b4672f93606e992049f15d526dc7867e6c756cd6256d4af92e2b1e364"}, + {file = "cattrs-22.1.0.tar.gz", hash = "sha256:94b67b64cf92c994f8784c40c082177dc916e0489a73a9a36b24eb18a9db40c6"}, +] +certifi = [ + {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, + {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, +] charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, @@ -1455,7 +1466,10 @@ click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] -colorama = [] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] constructs = [ {file = "constructs-10.1.92-py3-none-any.whl", hash = "sha256:297f1194754e2698eeb0ac69d2fa88558b8ec2a19cf4d4c6b999f3f62d9e5c7c"}, {file = "constructs-10.1.92.tar.gz", hash = "sha256:1f3a63c65423e551339f50633400551a2f4d8a0eafa1418da78617e060a80cd3"}, @@ -1512,12 +1526,18 @@ coverage = [ {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, ] -decorator = [] +decorator = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] dnspython = [ {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"}, {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] -email-validator = [] +email-validator = [ + {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"}, + {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"}, +] eradicate = [ {file = "eradicate-2.1.0-py3-none-any.whl", hash = "sha256:8bfaca181db9227dc88bdbce4d051a9627604c2243e7d85324f6d6ce0fd08bb2"}, {file = "eradicate-2.1.0.tar.gz", hash = "sha256:aac7384ab25b1bf21c4c012de9b4bf8398945a14c98c911545b2ea50ab558014"}, @@ -1526,8 +1546,14 @@ exceptiongroup = [ {file = "exceptiongroup-1.0.0rc9-py3-none-any.whl", hash = "sha256:2e3c3fc1538a094aab74fad52d6c33fc94de3dfee3ee01f187c0e0c72aec5337"}, {file = "exceptiongroup-1.0.0rc9.tar.gz", hash = "sha256:9086a4a21ef9b31c72181c77c040a074ba0889ee56a7b289ff0afb0d97655f96"}, ] -execnet = [] -fastjsonschema = [] +execnet = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] +fastjsonschema = [ + {file = "fastjsonschema-2.16.1-py3-none-any.whl", hash = "sha256:2f7158c4de792555753d6c2277d6a2af2d406dfd97aeca21d17173561ede4fe6"}, + {file = "fastjsonschema-2.16.1.tar.gz", hash = "sha256:d6fa3ffbe719768d70e298b9fb847484e2bdfdb7241ed052b8d57a9294a8c334"}, +] filelock = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, @@ -1544,7 +1570,10 @@ flake8-bugbear = [ {file = "flake8-bugbear-22.8.23.tar.gz", hash = "sha256:de0717d11124a082118dd08387b34fd86b2721642ec2d8e92be66cfa5ea7c445"}, {file = "flake8_bugbear-22.8.23-py3-none-any.whl", hash = "sha256:1b0ebe0873d1cd55bf9f1588bfcb930db339018ef44a3981a26532daa9fd14a8"}, ] -flake8-builtins = [] +flake8-builtins = [ + {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, + {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, +] flake8-comprehensions = [ {file = "flake8-comprehensions-3.10.0.tar.gz", hash = "sha256:181158f7e7aa26a63a0a38e6017cef28c6adee71278ce56ce11f6ec9c4905058"}, {file = "flake8_comprehensions-3.10.0-py3-none-any.whl", hash = "sha256:dad454fd3d525039121e98fa1dd90c46bc138708196a4ebbc949ad3c859adedb"}, @@ -1557,18 +1586,29 @@ flake8-eradicate = [ {file = "flake8-eradicate-1.3.0.tar.gz", hash = "sha256:e4c98f00d17dc8653e3388cac2624cd81e9735de2fd4a8dcf99029633ebd7a63"}, {file = "flake8_eradicate-1.3.0-py3-none-any.whl", hash = "sha256:85a71e0c5f4e07f7c6c5fec520483561fd6bd295417d622855bdeade99242e3d"}, ] -flake8-fixme = [] +flake8-fixme = [ + {file = "flake8-fixme-1.1.1.tar.gz", hash = "sha256:50cade07d27a4c30d4f12351478df87339e67640c83041b664724bda6d16f33a"}, + {file = "flake8_fixme-1.1.1-py2.py3-none-any.whl", hash = "sha256:226a6f2ef916730899f29ac140bed5d4a17e5aba79f00a0e3ae1eff1997cb1ac"}, +] flake8-isort = [ {file = "flake8-isort-4.2.0.tar.gz", hash = "sha256:26571500cd54976bbc0cf1006ffbcd1a68dd102f816b7a1051b219616ba9fee0"}, {file = "flake8_isort-4.2.0-py3-none-any.whl", hash = "sha256:5b87630fb3719bf4c1833fd11e0d9534f43efdeba524863e15d8f14a7ef6adbf"}, ] -flake8-variables-names = [] -future = [] +flake8-variables-names = [ + {file = "flake8_variables_names-0.0.5-py3-none-any.whl", hash = "sha256:e3277031696bbe10b5132b49938cde1d70fcae9561533b7bd7ab8e69cb27addb"}, + {file = "flake8_variables_names-0.0.5.tar.gz", hash = "sha256:30133e14ee2300e13a60393a00f74d98110c76070ac67d1ab91606f02824a7e1"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] ghp-import = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, ] -gitdb = [] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] gitpython = [ {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, @@ -1581,8 +1621,14 @@ importlib-metadata = [ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, ] -iniconfig = [] -isort = [] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, @@ -1599,7 +1645,10 @@ mako = [ {file = "Mako-1.2.2-py3-none-any.whl", hash = "sha256:8efcb8004681b5f71d09c983ad5a9e6f5c40601a6ec469148753292abc0da534"}, {file = "Mako-1.2.2.tar.gz", hash = "sha256:3724869b363ba630a272a5f89f68c070352137b8fd1757650017b7e06fda163f"}, ] -mando = [] +mando = [ + {file = "mando-0.6.4-py2.py3-none-any.whl", hash = "sha256:4ce09faec7e5192ffc3c57830e26acba0fd6cd11e1ee81af0d4df0657463bd1c"}, + {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, +] markdown = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, @@ -1646,20 +1695,58 @@ markupsafe = [ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] -mccabe = [] -mergedeep = [] -mike = [] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mergedeep = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] +mike = [ + {file = "mike-0.6.0-py3-none-any.whl", hash = "sha256:cef9b9c803ff5c3fbb410f51f5ceb00902a9fe16d9fabd93b69c65cf481ab5a1"}, + {file = "mike-0.6.0.tar.gz", hash = "sha256:6d6239de2a60d733da2f34617e9b9a14c4b5437423b47e524f14dc96d6ce5f2f"}, +] mkdocs = [ {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, ] -mkdocs-git-revision-date-plugin = [] +mkdocs-git-revision-date-plugin = [ + {file = "mkdocs_git_revision_date_plugin-0.3.2-py3-none-any.whl", hash = "sha256:2e67956cb01823dd2418e2833f3623dee8604cdf223bddd005fe36226a56f6ef"}, +] mkdocs-material = [ {file = "mkdocs-material-8.4.2.tar.gz", hash = "sha256:704c64c3fff126a3923c2961d95f26b19be621342a6a4e49ed039f0bb7a5c540"}, {file = "mkdocs_material-8.4.2-py2.py3-none-any.whl", hash = "sha256:166287bb0e4197804906bf0389a852d5ced43182c30127ac8b48a4e497ecd7e5"}, ] -mkdocs-material-extensions = [] -mypy = [] +mkdocs-material-extensions = [ + {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, + {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, +] +mypy = [ + {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, + {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, + {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, + {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, + {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, + {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, + {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, + {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, + {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, + {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, + {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, + {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, + {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, + {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, + {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, + {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, + {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, + {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, + {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, +] mypy-boto3-appconfig = [ {file = "mypy-boto3-appconfig-1.24.36.post1.tar.gz", hash = "sha256:e1916b3754915cb411ef977083500e1f30f81f7b3aea6ff5eed1cec91944dea6"}, {file = "mypy_boto3_appconfig-1.24.36.post1-py3-none-any.whl", hash = "sha256:a5dbe549dbebf4bc7a6cfcbfa9dff89ceb4983c042b785763ee656504bdb49f6"}, @@ -1700,8 +1787,14 @@ mypy-boto3-xray = [ {file = "mypy-boto3-xray-1.24.36.post1.tar.gz", hash = "sha256:104f1ecf7f1f6278c582201e71a7ab64843d3a3fdc8f23295cf68788cc77e9bb"}, {file = "mypy_boto3_xray-1.24.36.post1-py3-none-any.whl", hash = "sha256:97b9f0686c717c8be99ac06cb52febaf71712b4e4cd0b61ed2eb5ed012a9b5fd"}, ] -mypy-extensions = [] -packaging = [] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] pathspec = [ {file = "pathspec-0.10.0-py3-none-any.whl", hash = "sha256:aefa80ac32d5bf1f96139dca67cefb69a431beff4e6bf1168468f37d7ab87015"}, {file = "pathspec-0.10.0.tar.gz", hash = "sha256:01eecd304ba0e6eeed188ae5fa568e99ef10265af7fd9ab737d6412b4ee0ab85"}, @@ -1710,15 +1803,29 @@ pbr = [ {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, ] -pdoc3 = [] +pdoc3 = [ + {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, + {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, +] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] -pluggy = [] -publication = [] -py = [] -py-cpuinfo = [] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +publication = [ + {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, + {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +py-cpuinfo = [ + {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"}, +] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, @@ -1769,22 +1876,46 @@ pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] -pymdown-extensions = [] -pyparsing = [] +pymdown-extensions = [ + {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"}, + {file = "pymdown_extensions-9.5.tar.gz", hash = "sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] pytest = [ {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, ] -pytest-asyncio = [] -pytest-benchmark = [] -pytest-cov = [] -pytest-forked = [] +pytest-asyncio = [ + {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, + {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, +] +pytest-benchmark = [ + {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, + {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, +] +pytest-cov = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] +pytest-forked = [ + {file = "pytest-forked-1.4.0.tar.gz", hash = "sha256:8b67587c8f98cbbadfdd804539ed5455b6ed03802203485dd2f53c1422d7440e"}, + {file = "pytest_forked-1.4.0-py3-none-any.whl", hash = "sha256:bbbb6717efc886b9d64537b41fb1497cfaf3c9601276be8da2cccfea5a3c8ad8"}, +] pytest-mock = [ {file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"}, {file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"}, ] -pytest-xdist = [] -python-dateutil = [] +pytest-xdist = [ + {file = "pytest-xdist-2.5.0.tar.gz", hash = "sha256:4580deca3ff04ddb2ac53eba39d76cb5dd5edeac050cb6fbc768b0dd712b4edf"}, + {file = "pytest_xdist-2.5.0-py3-none-any.whl", hash = "sha256:6fe5c74fec98906deb8f2d2b616b5c782022744978e7bd4695d39c8f42d0ce65"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] python-snappy = [ {file = "python-snappy-0.6.1.tar.gz", hash = "sha256:b6a107ab06206acc5359d4c5632bd9b22d448702a79b3169b0c62e0fb808bb2a"}, {file = "python_snappy-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b7f920eaf46ebf41bd26f9df51c160d40f9e00b7b48471c3438cb8d027f7fb9b"}, @@ -1870,15 +2001,58 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -pyyaml-env-tag = [] -radon = [] -requests = [] -retry = [] +pyyaml-env-tag = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] +radon = [ + {file = "radon-5.1.0-py2.py3-none-any.whl", hash = "sha256:fa74e018197f1fcb54578af0f675d8b8e2342bd8e0b72bef8197bc4c9e645f36"}, + {file = "radon-5.1.0.tar.gz", hash = "sha256:cb1d8752e5f862fb9e20d82b5f758cbc4fb1237c92c9a66450ea0ea7bf29aeee"}, +] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +retry = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] "ruamel.yaml" = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] -"ruamel.yaml.clib" = [] +"ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, + {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, +] s3transfer = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, @@ -1887,8 +2061,14 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -smmap = [] -stevedore = [] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +stevedore = [ + {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, + {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -2032,7 +2212,10 @@ wrapt = [ {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] -xenon = [] +xenon = [ + {file = "xenon-0.9.0-py2.py3-none-any.whl", hash = "sha256:994c80c7f1c6d40596b600b93734d85a5739208f31895ef99f1e4d362caf9e35"}, + {file = "xenon-0.9.0.tar.gz", hash = "sha256:d2b9cb6c6260f771a432c1e588e51fddb17858f88f73ef641e7532f7a5f58fb8"}, +] zipp = [ {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, diff --git a/pyproject.toml b/pyproject.toml index 6420a49f179..f6ec1fd902e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,7 @@ flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" flake8-fixme = "^1.1.1" flake8-isort = "^4.1.2" -flake8-variables-names = "^0.0.4" +flake8-variables-names = "^0.0.5" flake8-black = "^0.3.3" isort = "^5.10.1" pytest-cov = "^3.0.0" From 9eed7829692bf537c5f75bae07158a520cdcb111 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 06:59:36 +0200 Subject: [PATCH 40/49] chore(deps): bump pydantic from 1.10.0 to 1.10.1 (#1491) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 74 ++++++++++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/poetry.lock b/poetry.lock index aae9d7357d3..68582deb29f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -957,7 +957,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.0" +version = "1.10.1" description = "Data validation and settings management using python type hints" category = "main" optional = true @@ -1831,42 +1831,42 @@ pycodestyle = [ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydantic = [ - {file = "pydantic-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e34e46dd08dafd4c75b8378efe3eae7d8e5212950fcd894d86c1df2dcfb80fe"}, - {file = "pydantic-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4af55f33ae5be6cccecd4fa462630daffef1f161f60c3f194b24eca705d50748"}, - {file = "pydantic-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1856bc6640aced42886f7ee48f5ed1fa5adf35e34064b5f9532b52d5a3b8a0d3"}, - {file = "pydantic-1.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ae7e210929a1b7d288034835dd787e5b0597192d58ab7342bacbeec0f33df"}, - {file = "pydantic-1.10.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1192c17667d21652ab93b5eecd1a776cd0a4e384ea8c331bb830c9d130293af"}, - {file = "pydantic-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:026427be4e251f876e7519a63af37ae5ebb8b593ca8b02180bdc6becd1ea4ef4"}, - {file = "pydantic-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:d1dffae1f219d06a997ec78d1d2daafdbfecf243ad8eb36bfbcbc73e30e17385"}, - {file = "pydantic-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b549eebe8de4e50fc3b4f8c1f9cc2f731d91787fc3f7d031561668377b8679bc"}, - {file = "pydantic-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a0ba8710bfdaddb7424c05ad2dc1da04796003751eac6ad30c218ac1d68a174e"}, - {file = "pydantic-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0985ba95af937389c9ce8d747138417303569cb736bd12469646ef53cd66e1c"}, - {file = "pydantic-1.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d484fbbe6267b6c936a6d005d5170ab553f3f4367348c7e88d3e17f0a7179981"}, - {file = "pydantic-1.10.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9500586151cd56a20bacb8f1082df1b4489000120d1c7ddc44c8b20870e8adbd"}, - {file = "pydantic-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b5212604aaf5954e9a7cea8f0c60d6dbef996aa7b41edefd329e6b5011ce8cf"}, - {file = "pydantic-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:39212b3853eea165a3cda11075d5b7d09d4291fcbc3c0ecefd23797ee21b29e9"}, - {file = "pydantic-1.10.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b3e3aed33fbd9518cf508d5415a58af683743d53dc5e58953973d73605774f34"}, - {file = "pydantic-1.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed4e5c18cac70fadd4cf339f444c4f1795f0876dfd5b70cf0a841890b52f0001"}, - {file = "pydantic-1.10.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45a6d0a9fdaad2a27ea69aec4659705ed8f60a5664e892c73e2b977d8f5166cc"}, - {file = "pydantic-1.10.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:158f1479367da20914961b5406ac3b29dfe1d858ae2af96c444f73543defcf0c"}, - {file = "pydantic-1.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:172aaeeaff8fc3ac326fb8a2934a063ca0938586c5fe8848285052de83a240f7"}, - {file = "pydantic-1.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:231b19c010288bfbfdcd3f79df38b5ff893c6547cd8c7d006203435790b22815"}, - {file = "pydantic-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:22206c152f9b86c0ee169928f9c24e1c0c566edb2462600b298ccb04860961aa"}, - {file = "pydantic-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8ef840ef803ef17a7bd52480eb85faca0eed728d70233fd560f7d1066330247"}, - {file = "pydantic-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f99b4de6936a0f9fe255d1c7fdc447700ddd027c9ad38a612d453ed5fc7d6d0"}, - {file = "pydantic-1.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:134b4fd805737496ce4efd24ce2f8da0e08c66dcfc054fee1a19673eec780f2c"}, - {file = "pydantic-1.10.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c4c76af6ad47bc46cf16bd0e4a5e536a7a2bec0dec14ea08b712daa6645bf293"}, - {file = "pydantic-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e03402b0a6b23a2d0b9ee31e45d80612c95562b5af8b5c900171b9d9015ddc5f"}, - {file = "pydantic-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3a3a60fcb5ce08cab593b7978d02db67b8d153e9d582adab7c0b69d7200d78be"}, - {file = "pydantic-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d8e5c5a50821c55b76dcf422610225cb7e44685cdd81832d0d504fa8c9343f35"}, - {file = "pydantic-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:645b83297a9428a675c98c1f69a7237a381900e34f23245c0ea73d74e454bf68"}, - {file = "pydantic-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab3f31f35dc4f8fc85b04d13569e5fdc9de2d3050ae64c1fdc3430dfe7d92d"}, - {file = "pydantic-1.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e290915a0ed53d3c59d6071fc7d2c843ed04c33affcd752dd1f3daa859b44a76"}, - {file = "pydantic-1.10.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:af669da39ede365069dbc5de56564b011e3353f801acdbdd7145002a78abc3d9"}, - {file = "pydantic-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e796f915762dec4678fafc89b1f0441ab9209517a8a682ddb3f988f7ffe0827"}, - {file = "pydantic-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:652727f9e1d3ae30bd8a4dfbebcafd50df45277b97f3deabbbfedcf731f94aa5"}, - {file = "pydantic-1.10.0-py3-none-any.whl", hash = "sha256:4d2b9258f5bd2d129bd4cf2d31f9d40094b9ed6ef64896e2f7a70729b2d599ea"}, - {file = "pydantic-1.10.0.tar.gz", hash = "sha256:e13788fcad1baf5eb3236856b2a9a74f7dac6b3ea7ca1f60a4ad8bad4239cf4c"}, + {file = "pydantic-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:221166d99726238f71adc4fa9f3e94063a10787574b966f86a774559e709ac5a"}, + {file = "pydantic-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a90e85d95fd968cd7cae122e0d3e0e1f6613bc88c1ff3fe838ac9785ea4b1c4c"}, + {file = "pydantic-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2157aaf5718c648eaec9e654a34179ae42ffc363dc3ad058538a4f3ecbd9341"}, + {file = "pydantic-1.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6142246fc9adb51cadaeb84fb52a86f3adad4c6a7b0938a5dd0b1356b0088217"}, + {file = "pydantic-1.10.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60dad97a09b6f44690c05467a4f397b62bfc2c839ac39102819d6979abc2be0d"}, + {file = "pydantic-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6f5bcb59d33ec46621dae76e714c53035087666cac80c81c9047a84f3ff93d0"}, + {file = "pydantic-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:522906820cd60e63c7960ba83078bf2d2ad2dd0870bf68248039bcb1ec3eb0a4"}, + {file = "pydantic-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d545c89d88bdd5559db17aeb5a61a26799903e4bd76114779b3bf1456690f6ce"}, + {file = "pydantic-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad2374b5b3b771dcc6e2f6e0d56632ab63b90e9808b7a73ad865397fcdb4b2cd"}, + {file = "pydantic-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90e02f61b7354ed330f294a437d0bffac9e21a5d46cb4cc3c89d220e497db7ac"}, + {file = "pydantic-1.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc5ffe7bd0b4778fa5b7a5f825c52d6cfea3ae2d9b52b05b9b1d97e36dee23a8"}, + {file = "pydantic-1.10.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7acb7b66ffd2bc046eaff0063df84c83fc3826722d5272adaeadf6252e17f691"}, + {file = "pydantic-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7e6786ed5faa559dea5a77f6d2de9a08d18130de9344533535d945f34bdcd42e"}, + {file = "pydantic-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:c7bf8ff1d18186eb0cbe42bd9bfb4cbf7fde1fd01b8608925458990c21f202f0"}, + {file = "pydantic-1.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14a5babda137a294df7ad5f220986d79bbb87fdeb332c6ded61ce19da7f5f3bf"}, + {file = "pydantic-1.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5659cb9c6b3d27fc0067025c4f5a205f5e838232a4a929b412781117c2343d44"}, + {file = "pydantic-1.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d70fb91b03c32d2e857b071a22a5225e6b625ca82bd2cc8dd729d88e0bd200"}, + {file = "pydantic-1.10.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a93be313e40f12c6f2cb84533b226bbe23d0774872e38d83415e6890215e3a6"}, + {file = "pydantic-1.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d55aeb01bb7bd7c7e1bd904668a4a2ffcbb1c248e7ae9eb40a272fd7e67dd98b"}, + {file = "pydantic-1.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:43d41b6f13706488e854729955ba8f740e6ec375cd16b72b81dc24b9d84f0d15"}, + {file = "pydantic-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f31ffe0e38805a0e6410330f78147bb89193b136d7a5f79cae60d3e849b520a6"}, + {file = "pydantic-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8eee69eda7674977b079a21e7bf825b59d8bf15145300e8034ed3eb239ac444f"}, + {file = "pydantic-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f927bff6c319fc92e0a2cbeb2609b5c1cd562862f4b54ec905e353282b7c8b1"}, + {file = "pydantic-1.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1bc3f8fef6ba36977108505e90558911e7fbccb4e930805d5dd90891b56ff4"}, + {file = "pydantic-1.10.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96ab6ce1346d14c6e581a69c333bdd1b492df9cf85ad31ad77a8aa42180b7e09"}, + {file = "pydantic-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:444cf220a12134da1cd42fe4f45edff622139e10177ce3d8ef2b4f41db1291b2"}, + {file = "pydantic-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:dbfbff83565b4514dd8cebc8b8c81a12247e89427ff997ad0a9da7b2b1065c12"}, + {file = "pydantic-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5327406f4bfd5aee784e7ad2a6a5fdd7171c19905bf34cb1994a1ba73a87c468"}, + {file = "pydantic-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1072eae28bf034a311764c130784e8065201a90edbca10f495c906737b3bd642"}, + {file = "pydantic-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce901335667a68dfbc10dd2ee6c0d676b89210d754441c2469fbc37baf7ee2ed"}, + {file = "pydantic-1.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d6465cd2112441305faf5143a491b40de07a203116b5755a2108e36b25308d"}, + {file = "pydantic-1.10.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b5e5e7a0ec96704099e271911a1049321ba1afda92920df0769898a7e9a1298"}, + {file = "pydantic-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ae43704358304da45c1c3dd7056f173c618b252f91594bcb6d6f6b4c6c284dee"}, + {file = "pydantic-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:2d7da49229ffb1049779a5a6c1c50a26da164bd053cf8ee9042197dc08a98259"}, + {file = "pydantic-1.10.1-py3-none-any.whl", hash = "sha256:f8b10e59c035ff3dcc9791619d6e6c5141e0fa5cbe264e19e267b8d523b210bf"}, + {file = "pydantic-1.10.1.tar.gz", hash = "sha256:d41bb80347a8a2d51fbd6f1748b42aca14541315878447ba159617544712f770"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, From 30b8d71dc427a44c777e016ce238ccb035a469f7 Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Thu, 1 Sep 2022 15:04:44 +0200 Subject: [PATCH 41/49] refactor(batch): remove legacy sqs_batch_processor (#1492) --- .../utilities/batch/__init__.py | 3 - aws_lambda_powertools/utilities/batch/base.py | 8 +- .../utilities/batch/exceptions.py | 13 - aws_lambda_powertools/utilities/batch/sqs.py | 250 --------------- docs/upgrade.md | 87 ++++++ docs/utilities/batch.md | 214 +------------ tests/functional/test_utilities_batch.py | 291 +----------------- tests/unit/test_utilities_batch.py | 141 --------- 8 files changed, 99 insertions(+), 908 deletions(-) delete mode 100644 aws_lambda_powertools/utilities/batch/sqs.py delete mode 100644 tests/unit/test_utilities_batch.py diff --git a/aws_lambda_powertools/utilities/batch/__init__.py b/aws_lambda_powertools/utilities/batch/__init__.py index 463f6f7fbff..08c35560b3f 100644 --- a/aws_lambda_powertools/utilities/batch/__init__.py +++ b/aws_lambda_powertools/utilities/batch/__init__.py @@ -13,7 +13,6 @@ batch_processor, ) from aws_lambda_powertools.utilities.batch.exceptions import ExceptionInfo -from aws_lambda_powertools.utilities.batch.sqs import PartialSQSProcessor, sqs_batch_processor __all__ = ( "BatchProcessor", @@ -21,8 +20,6 @@ "ExceptionInfo", "EventType", "FailureResponse", - "PartialSQSProcessor", "SuccessResponse", "batch_processor", - "sqs_batch_processor", ) diff --git a/aws_lambda_powertools/utilities/batch/base.py b/aws_lambda_powertools/utilities/batch/base.py index 1122bea4c03..6e5a0ce1d85 100644 --- a/aws_lambda_powertools/utilities/batch/base.py +++ b/aws_lambda_powertools/utilities/batch/base.py @@ -170,19 +170,19 @@ def batch_processor( Lambda's Context record_handler: Callable Callable to process each record from the batch - processor: PartialSQSProcessor + processor: BasePartialProcessor Batch Processor to handle partial failure cases Examples -------- - **Processes Lambda's event with PartialSQSProcessor** + **Processes Lambda's event with a BasePartialProcessor** - >>> from aws_lambda_powertools.utilities.batch import batch_processor, PartialSQSProcessor + >>> from aws_lambda_powertools.utilities.batch import batch_processor, BatchProcessor >>> >>> def record_handler(record): >>> return record["body"] >>> - >>> @batch_processor(record_handler=record_handler, processor=PartialSQSProcessor()) + >>> @batch_processor(record_handler=record_handler, processor=BatchProcessor()) >>> def handler(event, context): >>> return {"StatusCode": 200} diff --git a/aws_lambda_powertools/utilities/batch/exceptions.py b/aws_lambda_powertools/utilities/batch/exceptions.py index d90c25f12bc..d541d18d18f 100644 --- a/aws_lambda_powertools/utilities/batch/exceptions.py +++ b/aws_lambda_powertools/utilities/batch/exceptions.py @@ -24,19 +24,6 @@ def format_exceptions(self, parent_exception_str): return "\n".join(exception_list) -class SQSBatchProcessingError(BaseBatchProcessingError): - """When at least one message within a batch could not be processed""" - - def __init__(self, msg="", child_exceptions: Optional[List[ExceptionInfo]] = None): - super().__init__(msg, child_exceptions) - - # Overriding this method so we can output all child exception tracebacks when we raise this exception to prevent - # errors being lost. See https://github.com/awslabs/aws-lambda-powertools-python/issues/275 - def __str__(self): - parent_exception_str = super(SQSBatchProcessingError, self).__str__() - return self.format_exceptions(parent_exception_str) - - class BatchProcessingError(BaseBatchProcessingError): """When all batch records failed to be processed""" diff --git a/aws_lambda_powertools/utilities/batch/sqs.py b/aws_lambda_powertools/utilities/batch/sqs.py deleted file mode 100644 index 7b234c1372e..00000000000 --- a/aws_lambda_powertools/utilities/batch/sqs.py +++ /dev/null @@ -1,250 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -Batch SQS utilities -""" -import logging -import math -import sys -import warnings -from concurrent.futures import ThreadPoolExecutor, as_completed -from typing import Any, Callable, Dict, List, Optional, Tuple, cast - -import boto3 -from botocore.config import Config - -from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord - -from ...middleware_factory import lambda_handler_decorator -from .base import BasePartialProcessor -from .exceptions import SQSBatchProcessingError - -logger = logging.getLogger(__name__) - - -class PartialSQSProcessor(BasePartialProcessor): - """ - Amazon SQS batch processor to delete successes from the Queue. - - The whole batch will be processed, even if failures occur. After all records are processed, - SQSBatchProcessingError will be raised if there were any failures, causing messages to - be returned to the SQS queue. This behaviour can be disabled by passing suppress_exception. - - Parameters - ---------- - config: Config - botocore config object - suppress_exception: bool, optional - Supress exception raised if any messages fail processing, by default False - boto3_session : boto3.session.Session, optional - Boto3 session to use for AWS API communication - - - Example - ------- - **Process batch triggered by SQS** - - >>> from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - >>> - >>> def record_handler(record): - >>> return record["body"] - >>> - >>> def handler(event, context): - >>> records = event["Records"] - >>> processor = PartialSQSProcessor() - >>> - >>> with processor(records=records, handler=record_handler): - >>> result = processor.process() - >>> - >>> # Case a partial failure occurred, all successful executions - >>> # have been deleted from the queue after context's exit. - >>> - >>> return result - - """ - - def __init__( - self, - config: Optional[Config] = None, - suppress_exception: bool = False, - boto3_session: Optional[boto3.session.Session] = None, - ): - """ - Initializes sqs client. - """ - config = config or Config() - session = boto3_session or boto3.session.Session() - self.client = session.client("sqs", config=config) - self.suppress_exception = suppress_exception - self.max_message_batch = 10 - - warnings.warn( - "The sqs_batch_processor decorator and PartialSQSProcessor class are now deprecated, " - "and will be removed in the next major version. " - "Please follow the upgrade guide at " - "https://awslabs.github.io/aws-lambda-powertools-python/latest/utilities/batch/#legacy " - "to use the native batch_processor decorator or BatchProcessor class." - ) - - super().__init__() - - def _get_queue_url(self) -> Optional[str]: - """ - Format QueueUrl from first records entry - """ - if not getattr(self, "records", None): - return None - - *_, account_id, queue_name = self.records[0]["eventSourceARN"].split(":") - return f"{self.client._endpoint.host}/{account_id}/{queue_name}" - - def _get_entries_to_clean(self) -> List[Dict[str, str]]: - """ - Format messages to use in batch deletion - """ - entries = [] - # success_messages has generic type of union of SQS, Dynamodb and Kinesis Streams records or Pydantic models. - # Here we get SQS Record only - messages = cast(List[SQSRecord], self.success_messages) - for msg in messages: - entries.append({"Id": msg["messageId"], "ReceiptHandle": msg["receiptHandle"]}) - return entries - - def _process_record(self, record) -> Tuple: - """ - Process a record with instance's handler - - Parameters - ---------- - record: Any - An object to be processed. - """ - try: - result = self.handler(record=record) - return self.success_handler(record=record, result=result) - except Exception: - return self.failure_handler(record=record, exception=sys.exc_info()) - - def _prepare(self): - """ - Remove results from previous execution. - """ - self.success_messages.clear() - self.fail_messages.clear() - - def _clean(self) -> Optional[List]: - """ - Delete messages from Queue in case of partial failure. - """ - - # If all messages were successful, fall back to the default SQS - - # Lambda behavior which deletes messages if Lambda responds successfully - if not self.fail_messages: - logger.debug(f"All {len(self.success_messages)} records successfully processed") - return None - - queue_url = self._get_queue_url() - if queue_url is None: - logger.debug("No queue url found") - return None - - entries_to_remove = self._get_entries_to_clean() - # Batch delete up to 10 messages at a time (SQS limit) - max_workers = math.ceil(len(entries_to_remove) / self.max_message_batch) - - if entries_to_remove: - with ThreadPoolExecutor(max_workers=max_workers) as executor: - futures, results = [], [] - while entries_to_remove: - futures.append( - executor.submit( - self._delete_messages, queue_url, entries_to_remove[: self.max_message_batch], self.client - ) - ) - entries_to_remove = entries_to_remove[self.max_message_batch :] - for future in as_completed(futures): - try: - logger.debug("Deleted batch of processed messages from SQS") - results.append(future.result()) - except Exception: - logger.exception("Couldn't remove batch of processed messages from SQS") - raise - if self.suppress_exception: - logger.debug(f"{len(self.fail_messages)} records failed processing, but exceptions are suppressed") - else: - logger.debug(f"{len(self.fail_messages)} records failed processing, raising exception") - raise SQSBatchProcessingError( - msg=f"Not all records processed successfully. {len(self.exceptions)} individual errors logged " - f"separately below.", - child_exceptions=self.exceptions, - ) - - return results - - def _delete_messages(self, queue_url: str, entries_to_remove: List, sqs_client: Any): - delete_message_response = sqs_client.delete_message_batch( - QueueUrl=queue_url, - Entries=entries_to_remove, - ) - return delete_message_response - - -@lambda_handler_decorator -def sqs_batch_processor( - handler: Callable, - event: Dict, - context: Dict, - record_handler: Callable, - config: Optional[Config] = None, - suppress_exception: bool = False, - boto3_session: Optional[boto3.session.Session] = None, -): - """ - Middleware to handle SQS batch event processing - - Parameters - ---------- - handler: Callable - Lambda's handler - event: Dict - Lambda's Event - context: Dict - Lambda's Context - record_handler: Callable - Callable to process each record from the batch - config: Config - botocore config object - suppress_exception: bool, optional - Supress exception raised if any messages fail processing, by default False - boto3_session : boto3.session.Session, optional - Boto3 session to use for AWS API communication - - Examples - -------- - **Processes Lambda's event with PartialSQSProcessor** - - >>> from aws_lambda_powertools.utilities.batch import sqs_batch_processor - >>> - >>> def record_handler(record): - >>> return record["body"] - >>> - >>> @sqs_batch_processor(record_handler=record_handler) - >>> def handler(event, context): - >>> return {"StatusCode": 200} - - Limitations - ----------- - * Async batch processors - - """ - config = config or Config() - session = boto3_session or boto3.session.Session() - - processor = PartialSQSProcessor(config=config, suppress_exception=suppress_exception, boto3_session=session) - - records = event["Records"] - - with processor(records, record_handler): - processor.process() - - return handler(event, context) diff --git a/docs/upgrade.md b/docs/upgrade.md index 91ad54e42d3..20cf4aa25a6 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -11,6 +11,7 @@ The transition from Powertools for Python v1 to v2 is as painless as possible, a Changes at a glance: * The API for **event handler's `Response`** has minor changes to support multi value headers and cookies. +* The **legacy SQS batch processor** was removed. ???+ important Powertools for Python v2 drops suport for Python 3.6, following the Python 3.6 End-Of-Life (EOL) reached on December 23, 2021. @@ -55,3 +56,89 @@ def get_todos(): cookies=["CookieName=CookieValue"] ) ``` + +## Legacy SQS Batch Processor + +The deprecated `PartialSQSProcessor` and `sqs_batch_processor` were removed. +You can migrate to the [native batch processing](https://aws.amazon.com/about-aws/whats-new/2021/11/aws-lambda-partial-batch-response-sqs-event-source/) capability by: + +1. If you use **`sqs_batch_decorator`** you can now use **`batch_processor`** decorator +2. If you use **`PartialSQSProcessor`** you can now use **`BatchProcessor`** +3. [Enable the functionality](../utilities/batch#required-resources) on SQS +4. Change your Lambda Handler to return the new response format + +=== "Decorator: Before" + + ```python hl_lines="1 6" + from aws_lambda_powertools.utilities.batch import sqs_batch_processor + + def record_handler(record): + return do_something_with(record["body"]) + + @sqs_batch_processor(record_handler=record_handler) + def lambda_handler(event, context): + return {"statusCode": 200} + ``` + +=== "Decorator: After" + + ```python hl_lines="3 5 11" + import json + + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + processor = BatchProcessor(event_type=EventType.SQS) + + + def record_handler(record): + return do_something_with(record["body"]) + + @batch_processor(record_handler=record_handler, processor=processor) + def lambda_handler(event, context): + return processor.response() + ``` + +=== "Context manager: Before" + + ```python hl_lines="1-2 4 14 19" + from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + from botocore.config import Config + + config = Config(region_name="us-east-1") + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + + def lambda_handler(event, context): + records = event["Records"] + + processor = PartialSQSProcessor(config=config) + + with processor(records, record_handler): + result = processor.process() + + return result + ``` + +=== "Context manager: After" + + ```python hl_lines="1 11" + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor + + + def record_handler(record): + return_value = do_something_with(record["body"]) + return return_value + + def lambda_handler(event, context): + records = event["Records"] + + processor = BatchProcessor(event_type=EventType.SQS) + + with processor(records, record_handler): + result = processor.process() + + return processor.response() + ``` diff --git a/docs/utilities/batch.md b/docs/utilities/batch.md index 6241179ed4e..2476a8d5ef5 100644 --- a/docs/utilities/batch.md +++ b/docs/utilities/batch.md @@ -5,11 +5,6 @@ description: Utility The batch processing utility handles partial failures when processing batches from Amazon SQS, Amazon Kinesis Data Streams, and Amazon DynamoDB Streams. -???+ warning - The legacy `sqs_batch_processor` decorator and `PartialSQSProcessor` class are deprecated and are going to be removed soon. - - Please check the [migration guide](#migration-guide) for more information. - ## Key Features * Reports batch item failures to reduce number of retries for a record upon errors @@ -1146,215 +1141,16 @@ class MyProcessor(BatchProcessor): return super().failure_handler(record, exception) ``` -## Legacy - -???+ tip - This is kept for historical purposes. Use the new [BatchProcessor](#processing-messages-from-sqs) instead. - -### Migration guide - -???+ info - Keep reading if you are using `sqs_batch_processor` or `PartialSQSProcessor`. - -[As of Nov 2021](https://aws.amazon.com/about-aws/whats-new/2021/11/aws-lambda-partial-batch-response-sqs-event-source/){target="_blank"}, this is no longer needed as both SQS, Kinesis, and DynamoDB Streams offer this capability natively with one caveat - it's an [opt-in feature](#required-resources). - -Being a native feature, we no longer need to instantiate boto3 nor other customizations like exception suppressing – this lowers the cost of your Lambda function as you can delegate deleting partial failures to Lambda. - -???+ tip - It's also easier to test since it's mostly a [contract based response](https://docs.aws.amazon.com/lambda/latest/dg/with-sqs.html#sqs-batchfailurereporting-syntax){target="_blank"}. - -You can migrate in three steps: - -1. If you are using **`sqs_batch_decorator`** you can now use **`batch_processor`** decorator -2. If you were using **`PartialSQSProcessor`** you can now use **`BatchProcessor`** -3. Change your Lambda Handler to return the new response format - -=== "Decorator: Before" - - ```python hl_lines="1 6" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - - def record_handler(record): - return do_something_with(record["body"]) - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -=== "Decorator: After" - - ```python hl_lines="3 5 11" - import json - - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - - processor = BatchProcessor(event_type=EventType.SQS) - - - def record_handler(record): - return do_something_with(record["body"]) - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - return processor.response() - ``` - -=== "Context manager: Before" - - ```python hl_lines="1-2 4 14 19" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(config=config) - - with processor(records, record_handler): - result = processor.process() - - return result - ``` - -=== "Context manager: After" - - ```python hl_lines="1 11" - from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor - - - def record_handler(record): - return_value = do_something_with(record["body"]) - return return_value - - def lambda_handler(event, context): - records = event["Records"] - - processor = BatchProcessor(event_type=EventType.SQS) - - with processor(records, record_handler): - result = processor.process() - - return processor.response() - ``` - -### Customizing boto configuration - -The **`config`** and **`boto3_session`** parameters enable you to pass in a custom [botocore config object](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) -or a custom [boto3 session](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) when using the `sqs_batch_processor` -decorator or `PartialSQSProcessor` class. - -> Custom config example - -=== "Decorator" - - ```python hl_lines="4 12" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - @sqs_batch_processor(record_handler=record_handler, config=config) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -=== "Context manager" - - ```python hl_lines="4 16" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - from botocore.config import Config - - config = Config(region_name="us-east-1") - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(config=config) - - with processor(records, record_handler): - result = processor.process() - - return result - ``` - -> Custom boto3 session example - -=== "Decorator" - - ```python hl_lines="4 12" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor - from botocore.config import Config - - session = boto3.session.Session() - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - @sqs_batch_processor(record_handler=record_handler, boto3_session=session) - def lambda_handler(event, context): - return {"statusCode": 200} - ``` - -=== "Context manager" - - ```python hl_lines="4 16" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor - import boto3 - - session = boto3.session.Session() - - def record_handler(record): - # This will be called for each individual message from a batch - # It should raise an exception if the message was not processed successfully - return_value = do_something_with(record["body"]) - return return_value - - - def lambda_handler(event, context): - records = event["Records"] - - processor = PartialSQSProcessor(boto3_session=session) - - with processor(records, record_handler): - result = processor.process() - - return result - ``` - ### Suppressing exceptions -If you want to disable the default behavior where `SQSBatchProcessingError` is raised if there are any errors, you can pass the `suppress_exception` boolean argument. +If you want to disable the default behavior where `BatchProcessingError` is raised if there are any errors, you can pass the `suppress_exception` boolean argument. === "Decorator" ```python hl_lines="3" - from aws_lambda_powertools.utilities.batch import sqs_batch_processor + from aws_lambda_powertools.utilities.batch import batch_processor - @sqs_batch_processor(record_handler=record_handler, config=config, suppress_exception=True) + @batch_processor(record_handler=record_handler, suppress_exception=True) def lambda_handler(event, context): return {"statusCode": 200} ``` @@ -1362,9 +1158,9 @@ If you want to disable the default behavior where `SQSBatchProcessingError` is r === "Context manager" ```python hl_lines="3" - from aws_lambda_powertools.utilities.batch import PartialSQSProcessor + from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType - processor = PartialSQSProcessor(config=config, suppress_exception=True) + processor = BatchProcessor(event_type=EventType.SQS, suppress_exception=True) with processor(records, record_handler): result = processor.process() diff --git a/tests/functional/test_utilities_batch.py b/tests/functional/test_utilities_batch.py index a5e1e706437..2a7b0d3375f 100644 --- a/tests/functional/test_utilities_batch.py +++ b/tests/functional/test_utilities_batch.py @@ -1,21 +1,12 @@ import json -import math from random import randint from typing import Callable, Dict, Optional -from unittest.mock import patch import pytest from botocore.config import Config -from botocore.stub import Stubber - -from aws_lambda_powertools.utilities.batch import ( - BatchProcessor, - EventType, - PartialSQSProcessor, - batch_processor, - sqs_batch_processor, -) -from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError, SQSBatchProcessingError + +from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, batch_processor +from aws_lambda_powertools.utilities.batch.exceptions import BatchProcessingError from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import DynamoDBRecord from aws_lambda_powertools.utilities.data_classes.kinesis_stream_event import KinesisStreamRecord from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord @@ -135,30 +126,6 @@ def config() -> Config: return Config(region_name="us-east-1") -@pytest.fixture(scope="function") -def partial_processor(config) -> PartialSQSProcessor: - return PartialSQSProcessor(config=config) - - -@pytest.fixture(scope="function") -def partial_processor_suppressed(config) -> PartialSQSProcessor: - return PartialSQSProcessor(config=config, suppress_exception=True) - - -@pytest.fixture(scope="function") -def stubbed_partial_processor(config) -> PartialSQSProcessor: - processor = PartialSQSProcessor(config=config) - with Stubber(processor.client) as stubber: - yield stubber, processor - - -@pytest.fixture(scope="function") -def stubbed_partial_processor_suppressed(config) -> PartialSQSProcessor: - processor = PartialSQSProcessor(config=config, suppress_exception=True) - with Stubber(processor.client) as stubber: - yield stubber, processor - - @pytest.fixture(scope="module") def order_event_factory() -> Callable: def factory(item: Dict) -> str: @@ -167,258 +134,6 @@ def factory(item: Dict) -> str: return factory -@pytest.mark.parametrize( - "success_messages_count", - ([1, 18, 34]), -) -def test_partial_sqs_processor_context_with_failure( - success_messages_count, sqs_event_factory, record_handler, partial_processor -): - """ - Test processor with one failing record and multiple processed records - """ - fail_record = sqs_event_factory("fail") - success_records = [sqs_event_factory("success") for i in range(0, success_messages_count)] - - records = [fail_record, *success_records] - - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor.client) as stubber: - for _ in range(0, math.ceil((success_messages_count / partial_processor.max_message_batch))): - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - with partial_processor(records, record_handler) as ctx: - ctx.process() - - assert len(error.value.child_exceptions) == 1 - stubber.assert_no_pending_responses() - - -def test_partial_sqs_processor_context_with_failure_exception(sqs_event_factory, record_handler, partial_processor): - """ - Test processor with one failing record - """ - fail_record = sqs_event_factory("fail") - success_record = sqs_event_factory("success") - - records = [fail_record, success_record] - - with Stubber(partial_processor.client) as stubber: - stubber.add_client_error( - method="delete_message_batch", service_error_code="ServiceUnavailable", http_status_code=503 - ) - with pytest.raises(Exception) as error: - with partial_processor(records, record_handler) as ctx: - ctx.process() - - assert "ServiceUnavailable" in str(error.value) - stubber.assert_no_pending_responses() - - -def test_partial_sqs_processor_context_only_success(sqs_event_factory, record_handler, partial_processor): - """ - Test processor without failure - """ - first_record = sqs_event_factory("success") - second_record = sqs_event_factory("success") - - records = [first_record, second_record] - - with partial_processor(records, record_handler) as ctx: - result = ctx.process() - - assert result == [ - ("success", first_record["body"], first_record), - ("success", second_record["body"], second_record), - ] - - -def test_partial_sqs_processor_context_multiple_calls(sqs_event_factory, record_handler, partial_processor): - """ - Test processor without failure - """ - first_record = sqs_event_factory("success") - second_record = sqs_event_factory("success") - - records = [first_record, second_record] - - with partial_processor(records, record_handler) as ctx: - ctx.process() - - with partial_processor([first_record], record_handler) as ctx: - ctx.process() - - assert partial_processor.success_messages == [first_record] - - -def test_batch_processor_middleware_with_partial_sqs_processor(sqs_event_factory, record_handler, partial_processor): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @batch_processor(record_handler=record_handler, processor=partial_processor) - def lambda_handler(event, context): - return True - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor.client) as stubber: - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - lambda_handler(event, {}) - - assert len(error.value.child_exceptions) == 2 - stubber.assert_no_pending_responses() - - -@patch("aws_lambda_powertools.utilities.batch.sqs.PartialSQSProcessor") -def test_sqs_batch_processor_middleware( - patched_sqs_processor, sqs_event_factory, record_handler, stubbed_partial_processor -): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return True - - stubber, processor = stubbed_partial_processor - patched_sqs_processor.return_value = processor - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - lambda_handler(event, {}) - - assert len(error.value.child_exceptions) == 1 - stubber.assert_no_pending_responses() - - -def test_batch_processor_middleware_with_custom_processor(capsys, sqs_event_factory, record_handler, config): - """ - Test middlewares' integration with custom batch processor - """ - - class CustomProcessor(PartialSQSProcessor): - def failure_handler(self, record, exception): - print("Oh no ! It's a failure.") - return super().failure_handler(record, exception) - - processor = CustomProcessor(config=config) - - @batch_processor(record_handler=record_handler, processor=processor) - def lambda_handler(event, context): - return True - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(processor.client) as stubber: - stubber.add_response("delete_message_batch", response) - with pytest.raises(SQSBatchProcessingError) as error: - lambda_handler(event, {}) - - stubber.assert_no_pending_responses() - - assert len(error.value.child_exceptions) == 1 - assert capsys.readouterr().out == "Oh no ! It's a failure.\n" - - -def test_batch_processor_middleware_suppressed_exceptions( - sqs_event_factory, record_handler, partial_processor_suppressed -): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @batch_processor(record_handler=record_handler, processor=partial_processor_suppressed) - def lambda_handler(event, context): - return True - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor_suppressed.client) as stubber: - stubber.add_response("delete_message_batch", response) - result = lambda_handler(event, {}) - - stubber.assert_no_pending_responses() - assert result is True - - -def test_partial_sqs_processor_suppressed_exceptions(sqs_event_factory, record_handler, partial_processor_suppressed): - """ - Test processor without failure - """ - - first_record = sqs_event_factory("success") - second_record = sqs_event_factory("fail") - records = [first_record, second_record] - - fail_record = sqs_event_factory("fail") - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - - with Stubber(partial_processor_suppressed.client) as stubber: - stubber.add_response("delete_message_batch", response) - with partial_processor_suppressed(records, record_handler) as ctx: - ctx.process() - - assert partial_processor_suppressed.success_messages == [first_record] - - -@patch("aws_lambda_powertools.utilities.batch.sqs.PartialSQSProcessor") -def test_sqs_batch_processor_middleware_suppressed_exception( - patched_sqs_processor, sqs_event_factory, record_handler, stubbed_partial_processor_suppressed -): - """ - Test middleware's integration with PartialSQSProcessor - """ - - @sqs_batch_processor(record_handler=record_handler) - def lambda_handler(event, context): - return True - - stubber, processor = stubbed_partial_processor_suppressed - patched_sqs_processor.return_value = processor - - fail_record = sqs_event_factory("fail") - - event = {"Records": [sqs_event_factory("fail"), sqs_event_factory("success")]} - response = {"Successful": [{"Id": fail_record["messageId"]}], "Failed": []} - stubber.add_response("delete_message_batch", response) - result = lambda_handler(event, {}) - - stubber.assert_no_pending_responses() - assert result is True - - -def test_partial_sqs_processor_context_only_failure(sqs_event_factory, record_handler, partial_processor): - """ - Test processor with only failures - """ - first_record = sqs_event_factory("fail") - second_record = sqs_event_factory("fail") - - records = [first_record, second_record] - with pytest.raises(SQSBatchProcessingError) as error: - with partial_processor(records, record_handler) as ctx: - ctx.process() - - assert len(error.value.child_exceptions) == 2 - - def test_batch_processor_middleware_success_only(sqs_event_factory, record_handler): # GIVEN first_record = SQSRecord(sqs_event_factory("success")) diff --git a/tests/unit/test_utilities_batch.py b/tests/unit/test_utilities_batch.py deleted file mode 100644 index 8cc4f0b0225..00000000000 --- a/tests/unit/test_utilities_batch.py +++ /dev/null @@ -1,141 +0,0 @@ -import pytest -from botocore.config import Config - -from aws_lambda_powertools.utilities.batch import PartialSQSProcessor -from aws_lambda_powertools.utilities.batch.exceptions import SQSBatchProcessingError - -# Maintenance: This will be deleted as part of legacy Batch deprecation - - -@pytest.fixture(scope="function") -def sqs_event(): - return { - "messageId": "059f36b4-87a3-44ab-83d2-661975830a7d", - "receiptHandle": "AQEBwJnKyrHigUMZj6rYigCgxlaS3SLy0a", - "body": "", - "attributes": {}, - "messageAttributes": {}, - "md5OfBody": "e4e68fb7bd0e697a0ae8f1bb342846b3", - "eventSource": "aws:sqs", - "eventSourceARN": "arn:aws:sqs:us-east-2:123456789012:my-queue", - "awsRegion": "us-east-1", - } - - -@pytest.fixture(scope="module") -def config() -> Config: - return Config(region_name="us-east-1") - - -@pytest.fixture(scope="function") -def partial_sqs_processor(config) -> PartialSQSProcessor: - return PartialSQSProcessor(config=config) - - -def test_partial_sqs_get_queue_url_with_records(mocker, sqs_event, partial_sqs_processor): - expected_url = "https://queue.amazonaws.com/123456789012/my-queue" - - records_mock = mocker.patch.object(PartialSQSProcessor, "records", create=True, new_callable=mocker.PropertyMock) - records_mock.return_value = [sqs_event] - - result = partial_sqs_processor._get_queue_url() - assert result == expected_url - - -def test_partial_sqs_get_queue_url_without_records(partial_sqs_processor): - assert partial_sqs_processor._get_queue_url() is None - - -def test_partial_sqs_get_entries_to_clean_with_success(mocker, sqs_event, partial_sqs_processor): - expected_entries = [{"Id": sqs_event["messageId"], "ReceiptHandle": sqs_event["receiptHandle"]}] - - success_messages_mock = mocker.patch.object( - PartialSQSProcessor, "success_messages", create=True, new_callable=mocker.PropertyMock - ) - success_messages_mock.return_value = [sqs_event] - - result = partial_sqs_processor._get_entries_to_clean() - - assert result == expected_entries - - -def test_partial_sqs_get_entries_to_clean_without_success(mocker, partial_sqs_processor): - expected_entries = [] - - success_messages_mock = mocker.patch.object( - PartialSQSProcessor, "success_messages", create=True, new_callable=mocker.PropertyMock - ) - success_messages_mock.return_value = [] - - result = partial_sqs_processor._get_entries_to_clean() - - assert result == expected_entries - - -def test_partial_sqs_process_record_success(mocker, partial_sqs_processor): - expected_value = mocker.sentinel.expected_value - - success_result = mocker.sentinel.success_result - record = mocker.sentinel.record - - handler_mock = mocker.patch.object(PartialSQSProcessor, "handler", create=True, return_value=success_result) - success_handler_mock = mocker.patch.object(PartialSQSProcessor, "success_handler", return_value=expected_value) - - result = partial_sqs_processor._process_record(record) - - handler_mock.assert_called_once_with(record=record) - success_handler_mock.assert_called_once_with(record=record, result=success_result) - - assert result == expected_value - - -def test_partial_sqs_process_record_failure(mocker, partial_sqs_processor): - expected_value = mocker.sentinel.expected_value - - failure_result = Exception() - record = mocker.sentinel.record - - handler_mock = mocker.patch.object(PartialSQSProcessor, "handler", create=True, side_effect=failure_result) - failure_handler_mock = mocker.patch.object(PartialSQSProcessor, "failure_handler", return_value=expected_value) - - result = partial_sqs_processor._process_record(record) - - handler_mock.assert_called_once_with(record=record) - - _, failure_handler_called_with_args = failure_handler_mock.call_args - failure_handler_mock.assert_called_once() - assert (failure_handler_called_with_args["record"]) == record - assert isinstance(failure_handler_called_with_args["exception"], tuple) - assert failure_handler_called_with_args["exception"][1] == failure_result - assert result == expected_value - - -def test_partial_sqs_prepare(mocker, partial_sqs_processor): - success_messages_mock = mocker.patch.object(partial_sqs_processor, "success_messages", spec=list) - failed_messages_mock = mocker.patch.object(partial_sqs_processor, "fail_messages", spec=list) - - partial_sqs_processor._prepare() - - success_messages_mock.clear.assert_called_once() - failed_messages_mock.clear.assert_called_once() - - -def test_partial_sqs_clean(monkeypatch, mocker, partial_sqs_processor): - records = [mocker.sentinel.record] - - monkeypatch.setattr(partial_sqs_processor, "fail_messages", records) - monkeypatch.setattr(partial_sqs_processor, "success_messages", records) - - queue_url_mock = mocker.patch.object(PartialSQSProcessor, "_get_queue_url") - entries_to_clean_mock = mocker.patch.object(PartialSQSProcessor, "_get_entries_to_clean") - - queue_url_mock.return_value = mocker.sentinel.queue_url - entries_to_clean_mock.return_value = [mocker.sentinel.entries_to_clean] - - client_mock = mocker.patch.object(partial_sqs_processor, "client", autospec=True) - with pytest.raises(SQSBatchProcessingError): - partial_sqs_processor._clean() - - client_mock.delete_message_batch.assert_called_once_with( - QueueUrl=mocker.sentinel.queue_url, Entries=[mocker.sentinel.entries_to_clean] - ) From 0c52e280a534e6e569f33f78b61634a4435df626 Mon Sep 17 00:00:00 2001 From: Release bot Date: Thu, 1 Sep 2022 13:05:12 +0000 Subject: [PATCH 42/49] update changelog with latest changes --- CHANGELOG.md | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef29bb20d79..06793521924 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,10 @@ * **ci:** event resolution for on_label_added workflow * **event_handler:** fix bug with previous array implementation +## Code Refactoring + +* **batch:** remove legacy sqs_batch_processor ([#1492](https://github.com/awslabs/aws-lambda-powertools-python/issues/1492)) + ## Documentation * **homepage:** note about v2 version @@ -27,8 +31,10 @@ ## Maintenance * **bandit:** update baseline -* **ci:** add missing description fields +* **ci:** remove dangling debug step * **ci:** limit E2E workflow run for source code change +* **ci:** sync package version with pypi +* **ci:** remove unused and undeclared OS matrix env * **ci:** create adhoc docs workflow for v2 * **ci:** create adhoc docs workflow for v2 * **ci:** create docs workflow for v2 @@ -37,16 +43,16 @@ * **ci:** add note for state persistence on comment_large_pr * **ci:** destructure assignment on comment_large_pr * **ci:** record pr details upon labeling -* **ci:** sync package version with pypi -* **ci:** remove unused and undeclared OS matrix env +* **ci:** add missing description fields +* **ci:** fix invalid dependency leftover * **ci:** enable ci checks for v2 * **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) * **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) -* **ci:** remove dangling debug step -* **ci:** fix invalid dependency leftover -* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) +* **deps:** bump pydantic from 1.10.0 to 1.10.1 ([#1491](https://github.com/awslabs/aws-lambda-powertools-python/issues/1491)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) * **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) +* **deps-dev:** bump flake8-variables-names from 0.0.4 to 0.0.5 ([#1490](https://github.com/awslabs/aws-lambda-powertools-python/issues/1490)) * **maintainers:** update release workflow link From ace020f743f55e21fdb077d6426dd2bb0e0a9c34 Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Thu, 1 Sep 2022 16:07:51 +0200 Subject: [PATCH 43/49] chore(maintenance): add discord link to first PR and first issue (#1493) Co-authored-by: Heitor Lessa --- .github/boring-cyborg.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml index ecd67fc20d9..38760c85de6 100644 --- a/.github/boring-cyborg.yml +++ b/.github/boring-cyborg.yml @@ -95,6 +95,8 @@ labelPRBasedOnFilePath: firstPRWelcomeComment: > Thanks a lot for your first contribution! Please check out our contributing guidelines and don't hesitate to ask whatever you need. + In the meantime, check out the #python channel on our AWS Lambda Powertools Discord: [Invite link](https://discord.gg/B8zZKbbyET) + # Comment to be posted to congratulate user on their first merged PR firstPRMergeComment: > Awesome work, congrats on your first merged pull request and thank you for helping improve everyone's experience! @@ -103,6 +105,8 @@ firstPRMergeComment: > firstIssueWelcomeComment: > Thanks for opening your first issue here! We'll come back to you as soon as we can. + In the meantime, check out the #python channel on our AWS Lambda Powertools Discord: [Invite link](https://discord.gg/B8zZKbbyET) + ###### IssueLink Adder ################################################################################################# # Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. #insertIssueLinkInPrDescription: From a8ca8f31365eadda95a0e9a3f84c279dfcadfd36 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 20:28:14 +0000 Subject: [PATCH 44/49] chore(deps-dev): bump aws-cdk-lib from 2.39.1 to 2.40.0 (#1495) Bumps [aws-cdk-lib](https://github.com/aws/aws-cdk) from 2.39.1 to 2.40.0. - [Release notes](https://github.com/aws/aws-cdk/releases) - [Changelog](https://github.com/aws/aws-cdk/blob/main/CHANGELOG.v2.md) - [Commits](https://github.com/aws/aws-cdk/compare/v2.39.1...v2.40.0) --- updated-dependencies: - dependency-name: aws-cdk-lib dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 68582deb29f..9f9c37a982d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -22,7 +22,7 @@ tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy [[package]] name = "aws-cdk-lib" -version = "2.39.1" +version = "2.40.0" description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false @@ -30,7 +30,7 @@ python-versions = "~=3.7" [package.dependencies] constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.65.1,<2.0.0" +jsii = ">=1.66.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -1387,7 +1387,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.7.4" -content-hash = "e3d80709960638644c1d61cdcf0715bc684215ed48e1321eb29a32545cd32d11" +content-hash = "a5a1edc9bf18a4f3a67604f239ccab4c48df02f1ba6ff719060c5e0ae0f57d82" [metadata.files] atomicwrites = [ @@ -1398,8 +1398,8 @@ attrs = [ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] aws-cdk-lib = [ - {file = "aws-cdk-lib-2.39.1.tar.gz", hash = "sha256:41a97713b52cc558a53269f97665dea263ee635159455b29c2ca64a8f4772bc6"}, - {file = "aws_cdk_lib-2.39.1-py3-none-any.whl", hash = "sha256:c22cafee652238dcc7dfab14849f45783503a27d76f9f7fcb51232d017e8576a"}, + {file = "aws-cdk-lib-2.40.0.tar.gz", hash = "sha256:0a4c2c9ad95e126b4a157134f968c7d0bcd74b6bd91f208be72aa562952114ce"}, + {file = "aws_cdk_lib-2.40.0-py3-none-any.whl", hash = "sha256:376c64aefe5971c121c6098ab7fd7efa7a61a9caa8f1b9deeeb87c1a5a768318"}, ] "aws-cdk.aws-apigatewayv2-alpha" = [ {file = "aws-cdk.aws-apigatewayv2-alpha-2.39.1a0.tar.gz", hash = "sha256:2a506e8e9015f1cf15f951b4dbc09ffee17d96aa77491b84ca1ab4b790388bdc"}, diff --git a/pyproject.toml b/pyproject.toml index f6ec1fd902e..658385b1a55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ mike = "^0.6.0" mypy = "^0.971" retry = "^0.9.2" pytest-xdist = "^2.5.0" -aws-cdk-lib = "^2.38.1" +aws-cdk-lib = "^2.40.0" "aws-cdk.aws-apigatewayv2-alpha" = "^2.38.1-alpha.0" "aws-cdk.aws-apigatewayv2-integrations-alpha" = "^2.38.1-alpha.0" pytest-benchmark = "^3.4.1" From d24c6c5172716c366f2e50154a8670652de0da11 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 23:02:04 +0200 Subject: [PATCH 45/49] chore(deps-dev): bump black from 22.6.0 to 22.8.0 (#1494) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 50 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9f9c37a982d..8c9c03b80fd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -98,7 +98,7 @@ yaml = ["pyyaml"] [[package]] name = "black" -version = "22.6.0" +version = "22.8.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -1387,7 +1387,7 @@ pydantic = ["pydantic", "email-validator"] [metadata] lock-version = "1.1" python-versions = "^3.7.4" -content-hash = "a5a1edc9bf18a4f3a67604f239ccab4c48df02f1ba6ff719060c5e0ae0f57d82" +content-hash = "d68b1574701298104c70f376423ce805bad8b04a29e6c604d3ed3995fc15efa7" [metadata.files] atomicwrites = [ @@ -1418,29 +1418,29 @@ bandit = [ {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] black = [ - {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, - {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, - {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, - {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, - {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, - {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, - {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, - {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, - {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, - {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, - {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, - {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, - {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, - {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, - {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, - {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, - {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, - {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, - {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, + {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, + {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, + {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, + {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, + {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, + {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, + {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, + {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, + {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, + {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, + {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, + {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, + {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, + {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, + {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, + {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, + {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, ] boto3 = [ {file = "boto3-1.24.63-py3-none-any.whl", hash = "sha256:719bfafbe4e076055aa1a51269ffdbe9c61446679b67f31d61c237976661154c"}, diff --git a/pyproject.toml b/pyproject.toml index 658385b1a55..9ff3685324f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ email-validator = {version = "*", optional = true } # issue #1148 coverage = {extras = ["toml"], version = "^6.2"} pytest = "^7.0.1" -black = "^22.6.0" +black = "^22.8.0" flake8-builtins = "^1.5.3" flake8-comprehensions = "^3.7.0" flake8-debugger = "^4.0.0" From 246738bb635fc9357fb93f867c61f78b6e0a6b22 Mon Sep 17 00:00:00 2001 From: Ruben Fonseca Date: Fri, 2 Sep 2022 17:21:50 +0200 Subject: [PATCH 46/49] feat(event_handler): add cookies as 1st class citizen in v2 (#1487) Co-authored-by: Heitor Lessa Co-authored-by: Heitor Lessa --- .../event_handler/api_gateway.py | 5 +- aws_lambda_powertools/shared/cookies.py | 118 +++++++++++ .../shared/headers_serializer.py | 16 +- docs/core/event_handler/api_gateway.md | 2 +- docs/upgrade.md | 2 +- .../src/fine_grained_responses.py | 3 +- .../src/fine_grained_responses_output.json | 2 +- .../e2e/event_handler/handlers/alb_handler.py | 20 +- .../handlers/api_gateway_http_handler.py | 20 +- .../handlers/api_gateway_rest_handler.py | 20 +- .../handlers/lambda_function_url_handler.py | 20 +- tests/e2e/event_handler/infrastructure.py | 4 +- .../event_handler/test_header_serializer.py | 188 ++++++++++++------ .../event_handler/test_api_gateway.py | 9 +- .../event_handler/test_lambda_function_url.py | 5 +- 15 files changed, 333 insertions(+), 101 deletions(-) create mode 100644 aws_lambda_powertools/shared/cookies.py diff --git a/aws_lambda_powertools/event_handler/api_gateway.py b/aws_lambda_powertools/event_handler/api_gateway.py index 2d315fcc434..126eee8b0aa 100644 --- a/aws_lambda_powertools/event_handler/api_gateway.py +++ b/aws_lambda_powertools/event_handler/api_gateway.py @@ -15,6 +15,7 @@ from aws_lambda_powertools.event_handler import content_types from aws_lambda_powertools.event_handler.exceptions import NotFoundError, ServiceError from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.shared.functions import resolve_truthy_env_var_choice from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.data_classes import ( @@ -147,7 +148,7 @@ def __init__( content_type: Optional[str], body: Union[str, bytes, None], headers: Optional[Dict[str, Union[str, List[str]]]] = None, - cookies: Optional[List[str]] = None, + cookies: Optional[List[Cookie]] = None, ): """ @@ -162,7 +163,7 @@ def __init__( Optionally set the response body. Note: bytes body will be automatically base64 encoded headers: dict[str, Union[str, List[str]]] Optionally set specific http headers. Setting "Content-Type" here would override the `content_type` value. - cookies: list[str] + cookies: list[Cookie] Optionally set cookies. """ self.status_code = status_code diff --git a/aws_lambda_powertools/shared/cookies.py b/aws_lambda_powertools/shared/cookies.py new file mode 100644 index 00000000000..944bcb5dc9f --- /dev/null +++ b/aws_lambda_powertools/shared/cookies.py @@ -0,0 +1,118 @@ +from datetime import datetime +from enum import Enum +from io import StringIO +from typing import List, Optional + + +class SameSite(Enum): + """ + SameSite allows a server to define a cookie attribute making it impossible for + the browser to send this cookie along with cross-site requests. The main + goal is to mitigate the risk of cross-origin information leakage, and provide + some protection against cross-site request forgery attacks. + + See https://tools.ietf.org/html/draft-ietf-httpbis-cookie-same-site-00 for details. + """ + + DEFAULT_MODE = "" + LAX_MODE = "Lax" + STRICT_MODE = "Strict" + NONE_MODE = "None" + + +def _format_date(timestamp: datetime) -> str: + # Specification example: Wed, 21 Oct 2015 07:28:00 GMT + return timestamp.strftime("%a, %d %b %Y %H:%M:%S GMT") + + +class Cookie: + """ + A Cookie represents an HTTP cookie as sent in the Set-Cookie header of an + HTTP response or the Cookie header of an HTTP request. + + See https://tools.ietf.org/html/rfc6265 for details. + """ + + def __init__( + self, + name: str, + value: str, + path: str = "", + domain: str = "", + secure: bool = True, + http_only: bool = False, + max_age: Optional[int] = None, + expires: Optional[datetime] = None, + same_site: Optional[SameSite] = None, + custom_attributes: Optional[List[str]] = None, + ): + """ + + Parameters + ---------- + name: str + The name of this cookie, for example session_id + value: str + The cookie value, for instance an uuid + path: str + The path for which this cookie is valid. Optional + domain: str + The domain for which this cookie is valid. Optional + secure: bool + Marks the cookie as secure, only sendable to the server with an encrypted request over the HTTPS protocol + http_only: bool + Enabling this attribute makes the cookie inaccessible to the JavaScript `Document.cookie` API + max_age: Optional[int] + Defines the period of time after which the cookie is invalid. Use negative values to force cookie deletion. + expires: Optional[datetime] + Defines a date where the permanent cookie expires. + same_site: Optional[SameSite] + Determines if the cookie should be sent to third party websites + custom_attributes: Optional[List[str]] + List of additional custom attributes to set on the cookie + """ + self.name = name + self.value = value + self.path = path + self.domain = domain + self.secure = secure + self.expires = expires + self.max_age = max_age + self.http_only = http_only + self.same_site = same_site + self.custom_attributes = custom_attributes + + def __str__(self) -> str: + payload = StringIO() + payload.write(f"{self.name}={self.value}") + + if self.path: + payload.write(f"; Path={self.path}") + + if self.domain: + payload.write(f"; Domain={self.domain}") + + if self.expires: + payload.write(f"; Expires={_format_date(self.expires)}") + + if self.max_age: + if self.max_age > 0: + payload.write(f"; MaxAge={self.max_age}") + else: + # negative or zero max-age should be set to 0 + payload.write("; MaxAge=0") + + if self.http_only: + payload.write("; HttpOnly") + + if self.secure: + payload.write("; Secure") + + if self.same_site: + payload.write(f"; SameSite={self.same_site.value}") + + if self.custom_attributes: + for attr in self.custom_attributes: + payload.write(f"; {attr}") + + return payload.getvalue() diff --git a/aws_lambda_powertools/shared/headers_serializer.py b/aws_lambda_powertools/shared/headers_serializer.py index 4db7effe81b..796fd9aeae3 100644 --- a/aws_lambda_powertools/shared/headers_serializer.py +++ b/aws_lambda_powertools/shared/headers_serializer.py @@ -2,6 +2,8 @@ from collections import defaultdict from typing import Any, Dict, List, Union +from aws_lambda_powertools.shared.cookies import Cookie + class BaseHeadersSerializer: """ @@ -9,7 +11,7 @@ class BaseHeadersSerializer: ALB and Lambda Function URL response payload. """ - def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: """ Serializes headers and cookies according to the request type. Returns a dict that can be merged with the response payload. @@ -25,7 +27,7 @@ def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str class HttpApiHeadersSerializer(BaseHeadersSerializer): - def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: """ When using HTTP APIs or LambdaFunctionURLs, everything is taken care automatically for us. We can directly assign a list of cookies and a dict of headers to the response payload, and the @@ -44,11 +46,11 @@ def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str else: combined_headers[key] = ", ".join(values) - return {"headers": combined_headers, "cookies": cookies} + return {"headers": combined_headers, "cookies": list(map(str, cookies))} class MultiValueHeadersSerializer(BaseHeadersSerializer): - def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: """ When using REST APIs, headers can be encoded using the `multiValueHeaders` key on the response. This is also the case when using an ALB integration with the `multiValueHeaders` option enabled. @@ -69,13 +71,13 @@ def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str if cookies: payload.setdefault("Set-Cookie", []) for cookie in cookies: - payload["Set-Cookie"].append(cookie) + payload["Set-Cookie"].append(str(cookie)) return {"multiValueHeaders": payload} class SingleValueHeadersSerializer(BaseHeadersSerializer): - def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str]) -> Dict[str, Any]: + def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[Cookie]) -> Dict[str, Any]: """ The ALB integration has `multiValueHeaders` disabled by default. If we try to set multiple headers with the same key, or more than one cookie, print a warning. @@ -93,7 +95,7 @@ def serialize(self, headers: Dict[str, Union[str, List[str]]], cookies: List[str ) # We can only send one cookie, send the last one - payload["headers"]["Set-Cookie"] = cookies[-1] + payload["headers"]["Set-Cookie"] = str(cookies[-1]) for key, values in headers.items(): if isinstance(values, str): diff --git a/docs/core/event_handler/api_gateway.md b/docs/core/event_handler/api_gateway.md index c4cae718289..934465d6b96 100644 --- a/docs/core/event_handler/api_gateway.md +++ b/docs/core/event_handler/api_gateway.md @@ -323,7 +323,7 @@ You can use the `Response` class to have full control over the response. For exa === "fine_grained_responses.py" - ```python hl_lines="7 24-29" + ```python hl_lines="7 25-30" --8<-- "examples/event_handler_rest/src/fine_grained_responses.py" ``` diff --git a/docs/upgrade.md b/docs/upgrade.md index 20cf4aa25a6..3d1257f1c12 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -53,7 +53,7 @@ def get_todos(): return Response( # ... headers={"Content-Type": ["text/plain"]}, - cookies=["CookieName=CookieValue"] + cookies=[Cookie(name="session_id", value="12345", secure=True, http_only=True)], ) ``` diff --git a/examples/event_handler_rest/src/fine_grained_responses.py b/examples/event_handler_rest/src/fine_grained_responses.py index 4892de9c798..639b6a5b120 100644 --- a/examples/event_handler_rest/src/fine_grained_responses.py +++ b/examples/event_handler_rest/src/fine_grained_responses.py @@ -6,6 +6,7 @@ from aws_lambda_powertools import Logger, Tracer from aws_lambda_powertools.event_handler import APIGatewayRestResolver, Response, content_types from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.utilities.typing import LambdaContext tracer = Tracer() @@ -26,7 +27,7 @@ def get_todos(): content_type=content_types.APPLICATION_JSON, body=todos.json()[:10], headers=custom_headers, - cookies=["=; Secure; Expires="], + cookies=[Cookie(name="session_id", value="12345")], ) diff --git a/examples/event_handler_rest/src/fine_grained_responses_output.json b/examples/event_handler_rest/src/fine_grained_responses_output.json index 1ce606839b1..0b33bd91542 100644 --- a/examples/event_handler_rest/src/fine_grained_responses_output.json +++ b/examples/event_handler_rest/src/fine_grained_responses_output.json @@ -3,7 +3,7 @@ "multiValueHeaders": { "Content-Type": ["application/json"], "X-Transaction-Id": ["3490eea9-791b-47a0-91a4-326317db61a9"], - "Set-Cookie": ["=; Secure; Expires="] + "Set-Cookie": ["session_id=12345; Secure"] }, "body": "{\"todos\":[{\"userId\":1,\"id\":1,\"title\":\"delectus aut autem\",\"completed\":false},{\"userId\":1,\"id\":2,\"title\":\"quis ut nam facilis et officia qui\",\"completed\":false},{\"userId\":1,\"id\":3,\"title\":\"fugiat veniam minus\",\"completed\":false},{\"userId\":1,\"id\":4,\"title\":\"et porro tempora\",\"completed\":true},{\"userId\":1,\"id\":5,\"title\":\"laboriosam mollitia et enim quasi adipisci quia provident illum\",\"completed\":false},{\"userId\":1,\"id\":6,\"title\":\"qui ullam ratione quibusdam voluptatem quia omnis\",\"completed\":false},{\"userId\":1,\"id\":7,\"title\":\"illo expedita consequatur quia in\",\"completed\":false},{\"userId\":1,\"id\":8,\"title\":\"quo adipisci enim quam ut ab\",\"completed\":true},{\"userId\":1,\"id\":9,\"title\":\"molestiae perspiciatis ipsa\",\"completed\":false},{\"userId\":1,\"id\":10,\"title\":\"illo est ratione doloremque quia maiores aut\",\"completed\":true}]}", "isBase64Encoded": false diff --git a/tests/e2e/event_handler/handlers/alb_handler.py b/tests/e2e/event_handler/handlers/alb_handler.py index 4c3f4f9dac3..0e386c82c51 100644 --- a/tests/e2e/event_handler/handlers/alb_handler.py +++ b/tests/e2e/event_handler/handlers/alb_handler.py @@ -3,14 +3,22 @@ app = ALBResolver() -@app.get("/todos") +@app.post("/todos") def hello(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + return Response( - status_code=200, - content_type=content_types.TEXT_PLAIN, - body="Hello world", - cookies=["CookieMonster", "MonsterCookie"], - headers={"Foo": ["bar", "zbr"]}, + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, ) diff --git a/tests/e2e/event_handler/handlers/api_gateway_http_handler.py b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py index 1a20b730285..990761cd3b9 100644 --- a/tests/e2e/event_handler/handlers/api_gateway_http_handler.py +++ b/tests/e2e/event_handler/handlers/api_gateway_http_handler.py @@ -3,14 +3,22 @@ app = APIGatewayHttpResolver() -@app.get("/todos") +@app.post("/todos") def hello(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + return Response( - status_code=200, - content_type=content_types.TEXT_PLAIN, - body="Hello world", - cookies=["CookieMonster", "MonsterCookie"], - headers={"Foo": ["bar", "zbr"]}, + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, ) diff --git a/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py index 2f5ad0b94fa..0aa836cfe74 100644 --- a/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py +++ b/tests/e2e/event_handler/handlers/api_gateway_rest_handler.py @@ -3,14 +3,22 @@ app = APIGatewayRestResolver() -@app.get("/todos") +@app.post("/todos") def hello(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + return Response( - status_code=200, - content_type=content_types.TEXT_PLAIN, - body="Hello world", - cookies=["CookieMonster", "MonsterCookie"], - headers={"Foo": ["bar", "zbr"]}, + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, ) diff --git a/tests/e2e/event_handler/handlers/lambda_function_url_handler.py b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py index 3fd4b46ea28..c9c825c38d2 100644 --- a/tests/e2e/event_handler/handlers/lambda_function_url_handler.py +++ b/tests/e2e/event_handler/handlers/lambda_function_url_handler.py @@ -3,14 +3,22 @@ app = LambdaFunctionUrlResolver() -@app.get("/todos") +@app.post("/todos") def hello(): + payload = app.current_event.json_body + + body = payload.get("body", "Hello World") + status_code = payload.get("status_code", 200) + headers = payload.get("headers", {}) + cookies = payload.get("cookies", []) + content_type = headers.get("Content-Type", content_types.TEXT_PLAIN) + return Response( - status_code=200, - content_type=content_types.TEXT_PLAIN, - body="Hello world", - cookies=["CookieMonster", "MonsterCookie"], - headers={"Foo": ["bar", "zbr"]}, + status_code=status_code, + content_type=content_type, + body=body, + cookies=cookies, + headers=headers, ) diff --git a/tests/e2e/event_handler/infrastructure.py b/tests/e2e/event_handler/infrastructure.py index 62421b8aac9..735261138f3 100644 --- a/tests/e2e/event_handler/infrastructure.py +++ b/tests/e2e/event_handler/infrastructure.py @@ -61,7 +61,7 @@ def _create_api_gateway_http(self, function: Function): apigw = apigwv2.HttpApi(self.stack, "APIGatewayHTTP", create_default_stage=True) apigw.add_routes( path="/todos", - methods=[apigwv2.HttpMethod.GET], + methods=[apigwv2.HttpMethod.POST], integration=apigwv2integrations.HttpLambdaIntegration("TodosIntegration", function), ) @@ -71,7 +71,7 @@ def _create_api_gateway_rest(self, function: Function): apigw = apigwv1.RestApi(self.stack, "APIGatewayRest", deploy_options=apigwv1.StageOptions(stage_name="dev")) todos = apigw.root.add_resource("todos") - todos.add_method("GET", apigwv1.LambdaIntegration(function, proxy=True)) + todos.add_method("POST", apigwv1.LambdaIntegration(function, proxy=True)) CfnOutput(self.stack, "APIGatewayRestUrl", value=apigw.url) diff --git a/tests/e2e/event_handler/test_header_serializer.py b/tests/e2e/event_handler/test_header_serializer.py index 2b1d51bfb3d..eedb69ccaad 100644 --- a/tests/e2e/event_handler/test_header_serializer.py +++ b/tests/e2e/event_handler/test_header_serializer.py @@ -1,6 +1,9 @@ +from uuid import uuid4 + import pytest from requests import Request +from aws_lambda_powertools.shared.cookies import Cookie from tests.e2e.utils import data_fetcher @@ -36,106 +39,179 @@ def lambda_function_url_endpoint(infrastructure: dict) -> str: def test_alb_headers_serializer(alb_basic_listener_endpoint): # GIVEN url = f"{alb_basic_listener_endpoint}/todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] + last_cookie = cookies[-1] # WHEN - response = data_fetcher.get_http_response(Request(method="GET", url=url)) + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) # THEN - assert response.status_code == 200 - assert response.content == b"Hello world" - assert response.headers["content-type"] == "text/plain" + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body - # Only the last header for key "Foo" should be set - assert response.headers["Foo"] == "zbr" + # Only the last header should be set + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else sorted(value)[-1] + assert response.headers[key] == value # Only the last cookie should be set - assert "MonsterCookie" in response.cookies.keys() - assert "CookieMonster" not in response.cookies.keys() + assert len(response.cookies.items()) == 1 + assert last_cookie.name in response.cookies + assert response.cookies.get(last_cookie.name) == last_cookie.value def test_alb_multi_value_headers_serializer(alb_multi_value_header_listener_endpoint): # GIVEN url = f"{alb_multi_value_header_listener_endpoint}/todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] # WHEN - response = data_fetcher.get_http_response(Request(method="GET", url=url)) + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) # THEN - assert response.status_code == 200 - assert response.content == b"Hello world" - assert response.headers["content-type"] == "text/plain" + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body - # Only the last header for key "Foo" should be set - assert "Foo" in response.headers - foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] - assert sorted(foo_headers) == ["bar", "zbr"] + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) - # Only the last cookie should be set - assert "MonsterCookie" in response.cookies.keys() - assert "CookieMonster" in response.cookies.keys() + # ALB sorts the header values randomly, so we have to re-order them for comparison here + returned_value = ", ".join(sorted(response.headers[key].split(", "))) + assert returned_value == value + + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value def test_api_gateway_rest_headers_serializer(apigw_rest_endpoint): # GIVEN - url = f"{apigw_rest_endpoint}/todos" + url = f"{apigw_rest_endpoint}todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] # WHEN - response = data_fetcher.get_http_response(Request(method="GET", url=url)) + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) # THEN - assert response.status_code == 200 - assert response.content == b"Hello world" - assert response.headers["content-type"] == "text/plain" + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body - # Only the last header for key "Foo" should be set - assert "Foo" in response.headers - foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] - assert sorted(foo_headers) == ["bar", "zbr"] + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + assert response.headers[key] == value - # Only the last cookie should be set - assert "MonsterCookie" in response.cookies.keys() - assert "CookieMonster" in response.cookies.keys() + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value def test_api_gateway_http_headers_serializer(apigw_http_endpoint): # GIVEN - url = f"{apigw_http_endpoint}/todos" + url = f"{apigw_http_endpoint}todos" + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] # WHEN - response = data_fetcher.get_http_response(Request(method="GET", url=url)) + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) # THEN - assert response.status_code == 200 - assert response.content == b"Hello world" - assert response.headers["content-type"] == "text/plain" + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body - # Only the last header for key "Foo" should be set - assert "Foo" in response.headers - foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] - assert sorted(foo_headers) == ["bar", "zbr"] + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + assert response.headers[key] == value - # Only the last cookie should be set - assert "MonsterCookie" in response.cookies.keys() - assert "CookieMonster" in response.cookies.keys() + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value def test_lambda_function_url_headers_serializer(lambda_function_url_endpoint): # GIVEN url = f"{lambda_function_url_endpoint}todos" # the function url endpoint already has the trailing / + body = "Hello World" + status_code = 200 + headers = {"Content-Type": "text/plain", "Vary": ["Accept-Encoding", "User-Agent"]} + cookies = [ + Cookie(name="session_id", value=str(uuid4()), secure=True, http_only=True), + Cookie(name="ab_experiment", value="3"), + ] # WHEN - response = data_fetcher.get_http_response(Request(method="GET", url=url)) + response = data_fetcher.get_http_response( + Request( + method="POST", + url=url, + json={"body": body, "status_code": status_code, "headers": headers, "cookies": list(map(str, cookies))}, + ) + ) # THEN - assert response.status_code == 200 - assert response.content == b"Hello world" - assert response.headers["content-type"] == "text/plain" - - # Only the last header for key "Foo" should be set - assert "Foo" in response.headers - foo_headers = [x.strip() for x in response.headers["Foo"].split(",")] - assert sorted(foo_headers) == ["bar", "zbr"] - - # Only the last cookie should be set - assert "MonsterCookie" in response.cookies.keys() - assert "CookieMonster" in response.cookies.keys() + assert response.status_code == status_code + # response.content is a binary string, needs to be decoded to compare with the real string + assert response.content.decode("ascii") == body + + for key, value in headers.items(): + assert key in response.headers + value = value if isinstance(value, str) else ", ".join(sorted(value)) + assert response.headers[key] == value + + for cookie in cookies: + assert cookie.name in response.cookies + assert response.cookies.get(cookie.name) == cookie.value diff --git a/tests/functional/event_handler/test_api_gateway.py b/tests/functional/event_handler/test_api_gateway.py index 125a0f8c147..989475a934e 100644 --- a/tests/functional/event_handler/test_api_gateway.py +++ b/tests/functional/event_handler/test_api_gateway.py @@ -30,6 +30,7 @@ UnauthorizedError, ) from aws_lambda_powertools.shared import constants +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.shared.json_encoder import Encoder from aws_lambda_powertools.utilities.data_classes import ( ALBEvent, @@ -98,7 +99,7 @@ def get_lambda() -> Response: def test_api_gateway_v1_cookies(): # GIVEN a Http API V1 proxy type event app = APIGatewayRestResolver() - cookie = "CookieMonster" + cookie = Cookie(name="CookieMonster", value="MonsterCookie") @app.get("/my/path") def get_lambda() -> Response: @@ -111,7 +112,7 @@ def get_lambda() -> Response: # THEN process event correctly # AND set the current_event type as APIGatewayProxyEvent assert result["statusCode"] == 200 - assert result["multiValueHeaders"]["Set-Cookie"] == [cookie] + assert result["multiValueHeaders"]["Set-Cookie"] == ["CookieMonster=MonsterCookie; Secure"] def test_api_gateway(): @@ -158,7 +159,7 @@ def my_path() -> Response: def test_api_gateway_v2_cookies(): # GIVEN a Http API V2 proxy type event app = APIGatewayHttpResolver() - cookie = "CookieMonster" + cookie = Cookie(name="CookieMonster", value="MonsterCookie") @app.post("/my/path") def my_path() -> Response: @@ -172,7 +173,7 @@ def my_path() -> Response: # AND set the current_event type as APIGatewayProxyEventV2 assert result["statusCode"] == 200 assert result["headers"]["Content-Type"] == content_types.TEXT_PLAIN - assert result["cookies"] == [cookie] + assert result["cookies"] == ["CookieMonster=MonsterCookie; Secure"] def test_include_rule_matching(): diff --git a/tests/functional/event_handler/test_lambda_function_url.py b/tests/functional/event_handler/test_lambda_function_url.py index ae0a231d46b..c87d0ecb854 100644 --- a/tests/functional/event_handler/test_lambda_function_url.py +++ b/tests/functional/event_handler/test_lambda_function_url.py @@ -1,4 +1,5 @@ from aws_lambda_powertools.event_handler import LambdaFunctionUrlResolver, Response, content_types +from aws_lambda_powertools.shared.cookies import Cookie from aws_lambda_powertools.utilities.data_classes import LambdaFunctionUrlEvent from tests.functional.utils import load_event @@ -28,7 +29,7 @@ def foo(): def test_lambda_function_url_event_with_cookies(): # GIVEN a Lambda Function Url type event app = LambdaFunctionUrlResolver() - cookie = "CookieMonster" + cookie = Cookie(name="CookieMonster", value="MonsterCookie") @app.get("/") def foo(): @@ -42,7 +43,7 @@ def foo(): # THEN process event correctly # AND set the current_event type as LambdaFunctionUrlEvent assert result["statusCode"] == 200 - assert result["cookies"] == [cookie] + assert result["cookies"] == ["CookieMonster=MonsterCookie; Secure"] def test_lambda_function_url_no_matches(): From 522bae0fbba6b58b6b3526029be35e15ff30e4d9 Mon Sep 17 00:00:00 2001 From: Release bot Date: Fri, 2 Sep 2022 15:22:16 +0000 Subject: [PATCH 47/49] update changelog with latest changes --- CHANGELOG.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 06793521924..ca8a1242ebd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,16 +25,20 @@ ## Features * **ci:** add actionlint in pre-commit hook +* **event_handler:** add cookies as 1st class citizen in v2 ([#1487](https://github.com/awslabs/aws-lambda-powertools-python/issues/1487)) * **event_handler:** improved support for headers and cookies in v2 ([#1455](https://github.com/awslabs/aws-lambda-powertools-python/issues/1455)) * **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) ## Maintenance * **bandit:** update baseline -* **ci:** remove dangling debug step +* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) * **ci:** limit E2E workflow run for source code change -* **ci:** sync package version with pypi +* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) +* **ci:** add missing description fields * **ci:** remove unused and undeclared OS matrix env +* **ci:** sync package version with pypi +* **ci:** fix invalid dependency leftover * **ci:** create adhoc docs workflow for v2 * **ci:** create adhoc docs workflow for v2 * **ci:** create docs workflow for v2 @@ -43,17 +47,17 @@ * **ci:** add note for state persistence on comment_large_pr * **ci:** destructure assignment on comment_large_pr * **ci:** record pr details upon labeling -* **ci:** add missing description fields -* **ci:** fix invalid dependency leftover +* **ci:** remove dangling debug step * **ci:** enable ci checks for v2 -* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) -* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) * **deps:** bump pydantic from 1.10.0 to 1.10.1 ([#1491](https://github.com/awslabs/aws-lambda-powertools-python/issues/1491)) +* **deps-dev:** bump black from 22.6.0 to 22.8.0 ([#1494](https://github.com/awslabs/aws-lambda-powertools-python/issues/1494)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) * **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) * **deps-dev:** bump flake8-variables-names from 0.0.4 to 0.0.5 ([#1490](https://github.com/awslabs/aws-lambda-powertools-python/issues/1490)) +* **deps-dev:** bump aws-cdk-lib from 2.39.1 to 2.40.0 ([#1495](https://github.com/awslabs/aws-lambda-powertools-python/issues/1495)) * **maintainers:** update release workflow link +* **maintenance:** add discord link to first PR and first issue ([#1493](https://github.com/awslabs/aws-lambda-powertools-python/issues/1493)) From f2a6c9b362cdeaa9cf4a83e21db8fdc80e6f336f Mon Sep 17 00:00:00 2001 From: Release bot Date: Sun, 4 Sep 2022 05:30:15 +0000 Subject: [PATCH 48/49] update changelog with latest changes --- CHANGELOG.md | 2065 ++++++++++++++++++-------------------------------- 1 file changed, 742 insertions(+), 1323 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ca8a1242ebd..13f12eb68b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,1364 +6,823 @@ ## Bug Fixes -* **ci:** pass core fns to large pr workflow script -* **ci:** on_label permissioning model & workflow execution -* **ci:** ensure PR_AUTHOR is present for large_pr_split workflow -* **ci:** gracefully and successful exit changelog upon no changes +* remove apigw contract when using event-handler, apigw tracing +* path to artefact +* Fix issue with strip_prefixes ([#647](https://github.com/awslabs/aws-lambda-powertools-python/issues/647)) +* removed ambiguous quotes from labels. +* change supported python version from 3.6.1 to 3.6.2, bump black ([#807](https://github.com/awslabs/aws-lambda-powertools-python/issues/807)) +* mathc the name of the cdk synth from the build phase +* package_logger as const over logger instance +* repurpose test to cover parent loggers case +* use decorators, split cold start to ease reading +* use addHandler over monkeypatch +* add entire ARN role instead of account and role name +* download artefact into the layer dir +* remove unused json import +* incorrect log keys, indentation, snippet consistency +* remove f-strings that doesn't evaluate expr +* no need to cache npm since we only install cdk cli and don't have .lock files +* typo in input for layer workflow +* sight, yes a whitespace character breaks the build +* unzip the right artifact name +* **api-gateway:** route regression non-word and unsafe URI chars ([#556](https://github.com/awslabs/aws-lambda-powertools-python/issues/556)) +* **api-gateway:** incorrect plain text mimetype [#506](https://github.com/awslabs/aws-lambda-powertools-python/issues/506) +* **api-gateway:** non-greedy route pattern regex ([#533](https://github.com/awslabs/aws-lambda-powertools-python/issues/533)) +* **api-gateway:** HTTP API strip stage name from request path ([#622](https://github.com/awslabs/aws-lambda-powertools-python/issues/622)) +* **api_gateway:** allow whitespace in routes' path parameter ([#1099](https://github.com/awslabs/aws-lambda-powertools-python/issues/1099)) +* **api_gateway:** allow whitespace in routes' path parameter ([#1099](https://github.com/awslabs/aws-lambda-powertools-python/issues/1099)) +* **apigateway:** support [@app](https://github.com/app).not_found() syntax & housekeeping ([#926](https://github.com/awslabs/aws-lambda-powertools-python/issues/926)) +* **apigateway:** allow list of HTTP methods in route method ([#838](https://github.com/awslabs/aws-lambda-powertools-python/issues/838)) +* **apigateway:** remove indentation in debug_mode ([#987](https://github.com/awslabs/aws-lambda-powertools-python/issues/987)) +* **batch:** report multiple failures ([#967](https://github.com/awslabs/aws-lambda-powertools-python/issues/967)) +* **batch:** delete >10 messages in legacy sqs processor ([#818](https://github.com/awslabs/aws-lambda-powertools-python/issues/818)) +* **batch:** missing space in BatchProcessingError message ([#1201](https://github.com/awslabs/aws-lambda-powertools-python/issues/1201)) +* **batch:** docstring fix for success_handler() record parameter ([#1202](https://github.com/awslabs/aws-lambda-powertools-python/issues/1202)) +* **batch:** bugfix to clear exceptions between executions ([#1022](https://github.com/awslabs/aws-lambda-powertools-python/issues/1022)) +* **ci:** move from pip-tools to poetry on layers reusable workflow +* **ci:** lambda layer workflow release version and conditionals ([#1316](https://github.com/awslabs/aws-lambda-powertools-python/issues/1316)) +* **ci:** fetch all git info so we can check tags +* **ci:** lambda layer workflow release version and conditionals ([#1316](https://github.com/awslabs/aws-lambda-powertools-python/issues/1316)) +* **ci:** remove additional quotes in PR action ([#1317](https://github.com/awslabs/aws-lambda-powertools-python/issues/1317)) +* **ci:** install poetry before calling setup/python with cache ([#1315](https://github.com/awslabs/aws-lambda-powertools-python/issues/1315)) +* **ci:** keep layer version permission ([#1318](https://github.com/awslabs/aws-lambda-powertools-python/issues/1318)) +* **ci:** pr label regex for special chars in title +* **ci:** address conditional type on_merge +* **ci:** unexpected symbol due to double quotes... +* **ci:** address gh-actions additional quotes; remove debug +* **ci:** regex group name for on_merge workflow +* **ci:** escape outputs as certain PRs can break GH Actions expressions +* **ci:** move conditionals from yaml to code; leftover +* **ci:** move conditionals from yaml to code +* **ci:** accept core arg in label related issue workflow +* **ci:** match the name of the cdk synth from the build phase +* **ci:** regex to catch combination of related issues workflow +* **ci:** checkout project before validating related issue workflow +* **ci:** remove unsupported env in workflow_call +* **ci:** accept core arg in label related issue workflow +* **ci:** only event is resolved in cond +* **ci:** cond doesnt support two expr w/ env +* **ci:** label_related_issue unresolved var from history mixup +* **ci:** remove unused secret +* **ci:** allow inherit secrets for reusable workflow +* **ci:** add missing oidc token generation permission +* **ci:** job permissions +* **ci:** add additional input to accurately describe intent on skip +* **ci:** regex to catch combination of related issues workflow +* **ci:** checkout project before validating related issue workflow +* **ci:** changelog workflow must receive git tags too +* **ci:** improve msg visibility on closed issues +* **ci:** add explicit origin to fix release detached head +* **ci:** disable merged_pr workflow +* **ci:** merged_pr add issues write access +* **ci:** quote prBody GH expr on_opened_pr +* **ci:** remove utf-8 body in octokit body req +* **ci:** reusable workflow secrets param +* **ci:** temporarily disable changelog upon release +* **ci:** move from pip-tools to poetry on layers +* **ci:** add cdk v2 dep for layers workflow +* **ci:** disable poetry venv for layer workflow as cdk ignores venv +* **ci:** use poetry to resolve layer deps; pip for CDK +* **ci:** typo and bust gh actions cache +* **ci:** move from pip-tools to poetry on layers to fix conflicts +* **ci:** del flake8 direct dep over py3.6 conflicts and docs failure +* **ci:** fixes typos and small issues on github scripts ([#1302](https://github.com/awslabs/aws-lambda-powertools-python/issues/1302)) +* **ci:** calculate parallel jobs based on infrastructure needs ([#1475](https://github.com/awslabs/aws-lambda-powertools-python/issues/1475)) * **ci:** event resolution for on_label_added workflow +* **ci:** comment custom publish version checker +* **ci:** gracefully and successful exit changelog upon no changes +* **ci:** ensure PR_AUTHOR is present for large_pr_split workflow +* **ci:** on_label permissioning model & workflow execution +* **ci:** pass core fns to large pr workflow script +* **ci:** address pr title semantic not found logic +* **ci:** skip sync master on docs hotfix +* **data-classes:** use correct asdict funciton ([#666](https://github.com/awslabs/aws-lambda-powertools-python/issues/666)) +* **data-classes:** docstring typos and clean up ([#937](https://github.com/awslabs/aws-lambda-powertools-python/issues/937)) +* **data-classes:** include milliseconds in scalar types ([#504](https://github.com/awslabs/aws-lambda-powertools-python/issues/504)) +* **data-classes:** underscore support in api gateway authorizer resource name ([#969](https://github.com/awslabs/aws-lambda-powertools-python/issues/969)) +* **data-classes:** Add missing SES fields and ([#1045](https://github.com/awslabs/aws-lambda-powertools-python/issues/1045)) +* **deps:** correct py36 marker for jmespath +* **deps:** Ignore boto3 changes until needed ([#1151](https://github.com/awslabs/aws-lambda-powertools-python/issues/1151)) +* **deps:** Bump aws-xray-sdk from 2.6.0 to 2.8.0 ([#413](https://github.com/awslabs/aws-lambda-powertools-python/issues/413)) +* **deps:** correct mypy types as dev dependency ([#1322](https://github.com/awslabs/aws-lambda-powertools-python/issues/1322)) +* **deps:** bump poetry to latest ([#592](https://github.com/awslabs/aws-lambda-powertools-python/issues/592)) +* **deps:** update jmespath marker to support 1.0 and py3.6 ([#1139](https://github.com/awslabs/aws-lambda-powertools-python/issues/1139)) +* **deps-dev:** remove jmespath due to dev deps conflict ([#1148](https://github.com/awslabs/aws-lambda-powertools-python/issues/1148)) +* **docs:** Use updated names for ProxyEventType ([#424](https://github.com/awslabs/aws-lambda-powertools-python/issues/424)) +* **docs:** remove Slack link ([#1210](https://github.com/awslabs/aws-lambda-powertools-python/issues/1210)) +* **docs:** correct feature_flags link and json exmaples ([#605](https://github.com/awslabs/aws-lambda-powertools-python/issues/605)) +* **docs:** workflow to include api ref in latest alias ([#408](https://github.com/awslabs/aws-lambda-powertools-python/issues/408)) +* **event-handler:** body to empty string in CORS preflight (ALB non-compliant) ([#1249](https://github.com/awslabs/aws-lambda-powertools-python/issues/1249)) +* **event-sources:** Pass authorizer data to APIGatewayEventAuthorizer ([#897](https://github.com/awslabs/aws-lambda-powertools-python/issues/897)) +* **event-sources:** handle dynamodb null type as none, not bool ([#929](https://github.com/awslabs/aws-lambda-powertools-python/issues/929)) +* **event-sources:** handle claimsOverrideDetails set to null ([#878](https://github.com/awslabs/aws-lambda-powertools-python/issues/878)) +* **event_handler:** docs snippets, high-level import CorsConfig ([#1019](https://github.com/awslabs/aws-lambda-powertools-python/issues/1019)) +* **event_handler:** exception_handler to handle ServiceError exceptions ([#1160](https://github.com/awslabs/aws-lambda-powertools-python/issues/1160)) * **event_handler:** fix bug with previous array implementation +* **event_handler:** Allow for event_source support ([#1159](https://github.com/awslabs/aws-lambda-powertools-python/issues/1159)) +* **event_handlers:** ImportError when importing Response from top-level event_handler ([#1388](https://github.com/awslabs/aws-lambda-powertools-python/issues/1388)) +* **event_handlers:** handle lack of headers when using auto-compression feature ([#1325](https://github.com/awslabs/aws-lambda-powertools-python/issues/1325)) +* **event_sources:** add test for Function URL AuthZ ([#1421](https://github.com/awslabs/aws-lambda-powertools-python/issues/1421)) +* **feature-flags:** bug handling multiple conditions ([#599](https://github.com/awslabs/aws-lambda-powertools-python/issues/599)) +* **feature-flags:** rules should evaluate with an AND op ([#724](https://github.com/awslabs/aws-lambda-powertools-python/issues/724)) +* **feature-toggles:** correct cdk example ([#601](https://github.com/awslabs/aws-lambda-powertools-python/issues/601)) +* **governance:** update label in names in issues +* **idempotency:** include decorated fn name in hash ([#869](https://github.com/awslabs/aws-lambda-powertools-python/issues/869)) +* **idempotency:** pass by value on idem key to guard inadvert mutations ([#1090](https://github.com/awslabs/aws-lambda-powertools-python/issues/1090)) +* **idempotency:** sorting keys before hashing ([#722](https://github.com/awslabs/aws-lambda-powertools-python/issues/722)) +* **idempotency:** sorting keys before hashing +* **idempotency:** sorting keys before hashing ([#639](https://github.com/awslabs/aws-lambda-powertools-python/issues/639)) +* **jmespath_util:** snappy as dev dep and typing example ([#1446](https://github.com/awslabs/aws-lambda-powertools-python/issues/1446)) +* **lambda-authorizer:** allow proxy resources path in arn ([#1051](https://github.com/awslabs/aws-lambda-powertools-python/issues/1051)) +* **logger:** support additional args for handlers when injecting lambda context ([#1276](https://github.com/awslabs/aws-lambda-powertools-python/issues/1276)) +* **logger:** clear_state regression on absent standard keys ([#1088](https://github.com/awslabs/aws-lambda-powertools-python/issues/1088)) +* **logger:** preserve std keys when using custom formatters ([#1264](https://github.com/awslabs/aws-lambda-powertools-python/issues/1264)) +* **logger:** ensure state is cleared for custom formatters ([#1072](https://github.com/awslabs/aws-lambda-powertools-python/issues/1072)) +* **logger:** clear_state should keep custom key formats ([#1095](https://github.com/awslabs/aws-lambda-powertools-python/issues/1095)) +* **logger:** test generates logfile +* **logger:** push extra keys to the end ([#722](https://github.com/awslabs/aws-lambda-powertools-python/issues/722)) +* **logger:** preserve std keys when using custom formatters ([#1264](https://github.com/awslabs/aws-lambda-powertools-python/issues/1264)) +* **logger:** exclude source_logger in copy_config_to_registered_loggers ([#1001](https://github.com/awslabs/aws-lambda-powertools-python/issues/1001)) +* **logger-utils:** regression on exclude set leading to no formatter ([#1080](https://github.com/awslabs/aws-lambda-powertools-python/issues/1080)) +* **metrics:** flush upon a single metric 100th data point ([#1046](https://github.com/awslabs/aws-lambda-powertools-python/issues/1046)) +* **metrics:** explicit type to single_metric ctx manager ([#865](https://github.com/awslabs/aws-lambda-powertools-python/issues/865)) +* **metrics:** raise SchemaValidationError for >8 metric dimensions ([#1240](https://github.com/awslabs/aws-lambda-powertools-python/issues/1240)) +* **middleware_factory:** ret type annotation for handler dec ([#1066](https://github.com/awslabs/aws-lambda-powertools-python/issues/1066)) +* **mypy:** a few return types, type signatures, and untyped areas ([#718](https://github.com/awslabs/aws-lambda-powertools-python/issues/718)) +* **mypy:** fixes to resolve no implicit optional errors ([#521](https://github.com/awslabs/aws-lambda-powertools-python/issues/521)) +* **parameters:** appconfig transform and return types ([#877](https://github.com/awslabs/aws-lambda-powertools-python/issues/877)) +* **parameters:** appconfig internal _get docstrings ([#934](https://github.com/awslabs/aws-lambda-powertools-python/issues/934)) +* **parser:** overload parse when using envelope ([#885](https://github.com/awslabs/aws-lambda-powertools-python/issues/885)) +* **parser:** Improve types for parser.py ([#419](https://github.com/awslabs/aws-lambda-powertools-python/issues/419)) +* **parser:** apigw wss validation check_message_id; housekeeping ([#553](https://github.com/awslabs/aws-lambda-powertools-python/issues/553)) +* **parser:** kinesis sequence number is str, not int ([#907](https://github.com/awslabs/aws-lambda-powertools-python/issues/907)) +* **parser:** Make ApiGateway version, authorizer fields optional ([#532](https://github.com/awslabs/aws-lambda-powertools-python/issues/532)) +* **parser:** body/QS can be null or omitted in apigw v1/v2 ([#820](https://github.com/awslabs/aws-lambda-powertools-python/issues/820)) +* **parser:** raise ValidationError when SNS->SQS keys are intentionally missing ([#1299](https://github.com/awslabs/aws-lambda-powertools-python/issues/1299)) +* **parser:** mypy support for payload type override as models ([#883](https://github.com/awslabs/aws-lambda-powertools-python/issues/883)) +* **parser:** Add missing fields for SESEvent ([#1027](https://github.com/awslabs/aws-lambda-powertools-python/issues/1027)) +* **tracer:** add warm start annotation (ColdStart=False) ([#851](https://github.com/awslabs/aws-lambda-powertools-python/issues/851)) +* **tracer:** mypy generic to preserve decorated method signature ([#529](https://github.com/awslabs/aws-lambda-powertools-python/issues/529)) +* **validator:** handle built-in custom formats correctly ([#498](https://github.com/awslabs/aws-lambda-powertools-python/issues/498)) +* **validator:** event type annotation as any in validate fn ([#405](https://github.com/awslabs/aws-lambda-powertools-python/issues/405)) +* **warning:** future distutils deprecation ([#921](https://github.com/awslabs/aws-lambda-powertools-python/issues/921)) ## Code Refactoring +* simplify custom formatter for minor changes ([#417](https://github.com/awslabs/aws-lambda-powertools-python/issues/417)) +* rename to clear_state +* rename to remove_custom_keys +* **apigateway:** Add BaseRouter and duplicate route check ([#757](https://github.com/awslabs/aws-lambda-powertools-python/issues/757)) * **batch:** remove legacy sqs_batch_processor ([#1492](https://github.com/awslabs/aws-lambda-powertools-python/issues/1492)) +* **data-classes:** clean up internal logic for APIGatewayAuthorizerResponse ([#643](https://github.com/awslabs/aws-lambda-powertools-python/issues/643)) +* **event-handler:** api gateway handler review changes ([#420](https://github.com/awslabs/aws-lambda-powertools-python/issues/420)) +* **event-handler:** Add ResponseBuilder and more docs ([#412](https://github.com/awslabs/aws-lambda-powertools-python/issues/412)) +* **event_handler:** match to match_results; 3.10 new keyword ([#616](https://github.com/awslabs/aws-lambda-powertools-python/issues/616)) +* **feature-flags:** add debug for all features evaluation" ([#590](https://github.com/awslabs/aws-lambda-powertools-python/issues/590)) +* **feature-toggles:** Code coverage and housekeeping ([#530](https://github.com/awslabs/aws-lambda-powertools-python/issues/530)) +* **feature_flags:** optimize UX and maintenance ([#563](https://github.com/awslabs/aws-lambda-powertools-python/issues/563)) +* **logger:** BYOFormatter and Handler, UTC support, and more ([#404](https://github.com/awslabs/aws-lambda-powertools-python/issues/404)) ## Documentation +* fix indentation of SAM snippets in install section ([#778](https://github.com/awslabs/aws-lambda-powertools-python/issues/778)) +* add team behind it and email +* consistency around admonitions and snippets ([#919](https://github.com/awslabs/aws-lambda-powertools-python/issues/919)) +* Added GraphQL Sample API to Examples section of README.md ([#930](https://github.com/awslabs/aws-lambda-powertools-python/issues/930)) +* add new public layer ARNs ([#746](https://github.com/awslabs/aws-lambda-powertools-python/issues/746)) +* external reference to cloudformation custom resource helper ([#914](https://github.com/awslabs/aws-lambda-powertools-python/issues/914)) +* update list of features +* add final consideration section +* disable search blur in non-prod env +* include new public roadmap ([#452](https://github.com/awslabs/aws-lambda-powertools-python/issues/452)) +* add amplify-cli instructions for public layer ([#754](https://github.com/awslabs/aws-lambda-powertools-python/issues/754)) +* fix anchor +* rename to tutorial given the size +* enable dark mode switch ([#471](https://github.com/awslabs/aws-lambda-powertools-python/issues/471)) +* add Layers example for Serverless framework & CDK ([#500](https://github.com/awslabs/aws-lambda-powertools-python/issues/500)) +* rename quickstart to tutorial in readme +* improve public lambda layer wording, clipboard buttons ([#762](https://github.com/awslabs/aws-lambda-powertools-python/issues/762)) +* add new public Slack invite +* update Lambda Layers version +* add better BDD coments +* Terraform reference for SAR Lambda Layer ([#716](https://github.com/awslabs/aws-lambda-powertools-python/issues/716)) +* Idiomatic tenet updated to Progressive +* fix syntax errors and line highlights ([#1004](https://github.com/awslabs/aws-lambda-powertools-python/issues/1004)) +* use higher contrast font ([#822](https://github.com/awslabs/aws-lambda-powertools-python/issues/822)) +* use higher contrast font +* updated Lambda Layers definition & limitations. ([#775](https://github.com/awslabs/aws-lambda-powertools-python/issues/775)) +* **api-gateway:** document new HTTP service error exceptions ([#546](https://github.com/awslabs/aws-lambda-powertools-python/issues/546)) +* **api-gateway:** add support for new router feature ([#767](https://github.com/awslabs/aws-lambda-powertools-python/issues/767)) +* **api-gateway:** add new API mapping support +* **api_gateway:** new event handler for API Gateway and ALB ([#418](https://github.com/awslabs/aws-lambda-powertools-python/issues/418)) +* **apigateway:** fix sample layout provided ([#864](https://github.com/awslabs/aws-lambda-powertools-python/issues/864)) +* **apigateway:** removes duplicate admonition ([#1426](https://github.com/awslabs/aws-lambda-powertools-python/issues/1426)) +* **apigateway:** add new not_found feature ([#915](https://github.com/awslabs/aws-lambda-powertools-python/issues/915)) +* **apigateway:** removes duplicate admonition ([#1426](https://github.com/awslabs/aws-lambda-powertools-python/issues/1426)) +* **apigateway:** re-add sample layout, add considerations ([#826](https://github.com/awslabs/aws-lambda-powertools-python/issues/826)) +* **appsync:** fix typo +* **appsync:** add new router feature ([#821](https://github.com/awslabs/aws-lambda-powertools-python/issues/821)) +* **appsync:** fix users.py typo to locations [#830](https://github.com/awslabs/aws-lambda-powertools-python/issues/830) +* **batch:** remove leftover from legacy +* **batch:** snippet typo on batch processed messages iteration ([#951](https://github.com/awslabs/aws-lambda-powertools-python/issues/951)) +* **batch:** fix typo in context manager keyword ([#938](https://github.com/awslabs/aws-lambda-powertools-python/issues/938)) +* **contributing:** operational excellence pause +* **core:** match code snippet name with filename ([#1286](https://github.com/awslabs/aws-lambda-powertools-python/issues/1286)) +* **data-class:** fix invalid syntax in new AppSync Authorizer +* **data-classes:** make authorizer concise; use enum ([#630](https://github.com/awslabs/aws-lambda-powertools-python/issues/630)) +* **data_classes:** fix missing dynamodb stream get_type/value +* **event-handler:** improve testing section for graphql ([#996](https://github.com/awslabs/aws-lambda-powertools-python/issues/996)) +* **event-handler:** snippets split, improved, and lint ([#1279](https://github.com/awslabs/aws-lambda-powertools-python/issues/1279)) +* **event-handler:** new custom serializer option +* **event-handler:** snippets split, improved, and lint ([#1279](https://github.com/awslabs/aws-lambda-powertools-python/issues/1279)) +* **event-handler:** document catch-all routes ([#705](https://github.com/awslabs/aws-lambda-powertools-python/issues/705)) +* **event_handler:** remove beta flag from new HTTP utility +* **event_handler:** fix closing brackets in CORS sample +* **event_handler:** add missing note on trimmed responses +* **examples:** enforce and fix all mypy errors ([#1393](https://github.com/awslabs/aws-lambda-powertools-python/issues/1393)) +* **feature-flags:** fix sample feature name in evaluate +* **feature-flags:** add guidance when to use vs env vars vs parameters +* **feature-flags:** create concrete documentation ([#594](https://github.com/awslabs/aws-lambda-powertools-python/issues/594)) +* **feature-toggles:** correct docs and typing ([#588](https://github.com/awslabs/aws-lambda-powertools-python/issues/588)) +* **feature_flags:** fix SAM infra, convert CDK to Python +* **governance:** typos on PR template fixes [#1314](https://github.com/awslabs/aws-lambda-powertools-python/issues/1314) +* **governance:** link roadmap and maintainers doc +* **governance:** add security doc to the root +* **graphql:** snippets split, improved, and lint ([#1287](https://github.com/awslabs/aws-lambda-powertools-python/issues/1287)) +* **home:** add discord invitation link ([#1471](https://github.com/awslabs/aws-lambda-powertools-python/issues/1471)) +* **home:** fix discord syntax and add Discord badge +* **homepage:** emphasize additional powertools languages ([#1292](https://github.com/awslabs/aws-lambda-powertools-python/issues/1292)) +* **homepage:** link to typescript version ([#950](https://github.com/awslabs/aws-lambda-powertools-python/issues/950)) * **homepage:** note about v2 version -* **layer:** upgrade to 1.28.0 (v33) - -## Features - -* **ci:** add actionlint in pre-commit hook -* **event_handler:** add cookies as 1st class citizen in v2 ([#1487](https://github.com/awslabs/aws-lambda-powertools-python/issues/1487)) -* **event_handler:** improved support for headers and cookies in v2 ([#1455](https://github.com/awslabs/aws-lambda-powertools-python/issues/1455)) -* **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) - -## Maintenance - -* **bandit:** update baseline -* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) -* **ci:** limit E2E workflow run for source code change -* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) -* **ci:** add missing description fields -* **ci:** remove unused and undeclared OS matrix env -* **ci:** sync package version with pypi -* **ci:** fix invalid dependency leftover -* **ci:** create adhoc docs workflow for v2 -* **ci:** create adhoc docs workflow for v2 -* **ci:** create docs workflow for v2 -* **ci:** create reusable docs publishing workflow ([#1482](https://github.com/awslabs/aws-lambda-powertools-python/issues/1482)) -* **ci:** format comment on comment_large_pr script -* **ci:** add note for state persistence on comment_large_pr -* **ci:** destructure assignment on comment_large_pr -* **ci:** record pr details upon labeling -* **ci:** remove dangling debug step -* **ci:** enable ci checks for v2 -* **deps:** bump pydantic from 1.10.0 to 1.10.1 ([#1491](https://github.com/awslabs/aws-lambda-powertools-python/issues/1491)) -* **deps-dev:** bump black from 22.6.0 to 22.8.0 ([#1494](https://github.com/awslabs/aws-lambda-powertools-python/issues/1494)) -* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) -* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) -* **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) -* **deps-dev:** bump flake8-variables-names from 0.0.4 to 0.0.5 ([#1490](https://github.com/awslabs/aws-lambda-powertools-python/issues/1490)) -* **deps-dev:** bump aws-cdk-lib from 2.39.1 to 2.40.0 ([#1495](https://github.com/awslabs/aws-lambda-powertools-python/issues/1495)) -* **maintainers:** update release workflow link -* **maintenance:** add discord link to first PR and first issue ([#1493](https://github.com/awslabs/aws-lambda-powertools-python/issues/1493)) - - - -## [v1.28.0] - 2022-08-25 -## Bug Fixes - -* **ci:** calculate parallel jobs based on infrastructure needs ([#1475](https://github.com/awslabs/aws-lambda-powertools-python/issues/1475)) -* **ci:** del flake8 direct dep over py3.6 conflicts and docs failure -* **ci:** move from pip-tools to poetry on layers reusable workflow -* **ci:** move from pip-tools to poetry on layers to fix conflicts -* **ci:** typo and bust gh actions cache -* **ci:** use poetry to resolve layer deps; pip for CDK -* **ci:** disable poetry venv for layer workflow as cdk ignores venv -* **ci:** add cdk v2 dep for layers workflow -* **ci:** move from pip-tools to poetry on layers -* **ci:** temporarily disable changelog upon release -* **ci:** add explicit origin to fix release detached head -* **jmespath_util:** snappy as dev dep and typing example ([#1446](https://github.com/awslabs/aws-lambda-powertools-python/issues/1446)) - -## Documentation - -* **apigateway:** removes duplicate admonition ([#1426](https://github.com/awslabs/aws-lambda-powertools-python/issues/1426)) -* **home:** fix discord syntax and add Discord badge -* **home:** add discord invitation link ([#1471](https://github.com/awslabs/aws-lambda-powertools-python/issues/1471)) +* **idempotency:** add support for DynamoDB composite keys ([#808](https://github.com/awslabs/aws-lambda-powertools-python/issues/808)) +* **idempotency:** fix misleading idempotent examples ([#661](https://github.com/awslabs/aws-lambda-powertools-python/issues/661)) +* **idempotency:** remove beta flag +* **idempotency:** remove old todo +* **install:** new lambda layer for 1.24.0 release +* **install:** instructions to reduce pydantic package size ([#1077](https://github.com/awslabs/aws-lambda-powertools-python/issues/1077)) +* **jmespath:** clarify envelope terminology * **jmespath_util:** snippets split, improved, and lint ([#1419](https://github.com/awslabs/aws-lambda-powertools-python/issues/1419)) +* **lambda_layer:** fix CDK layer syntax +* **layer:** bump Lambda Layer to version 6 * **layer:** upgrade to 1.27.0 * **layer:** upgrade to 1.27.0 +* **layer:** upgrade to 1.26.7 +* **layer:** update to 1.25.7 +* **layer:** update to 1.25.6; cosmetic changes +* **layer:** upgrade to 1.25.10 +* **layer:** upgrade to 1.25.9 +* **layer:** remove link from clipboard button ([#1135](https://github.com/awslabs/aws-lambda-powertools-python/issues/1135)) +* **layer:** update to 1.24.1 +* **layer:** update to 1.24.2 +* **layer:** upgrade to 1.28.0 (v33) +* **layer:** update to 1.25.1 +* **layer:** update to 1.25.3 +* **layer:** bump to 1.25.5 +* **lint:** add markdownlint rules and automation ([#1256](https://github.com/awslabs/aws-lambda-powertools-python/issues/1256)) +* **logger:** document enriching logs with logrecord attributes ([#1271](https://github.com/awslabs/aws-lambda-powertools-python/issues/1271)) +* **logger:** document new get_correlation_id method ([#545](https://github.com/awslabs/aws-lambda-powertools-python/issues/545)) +* **logger:** improvements extensibility & new features ([#415](https://github.com/awslabs/aws-lambda-powertools-python/issues/415)) +* **logger:** snippets split, improved, and lint ([#1262](https://github.com/awslabs/aws-lambda-powertools-python/issues/1262)) +* **logger:** add FAQ for cross-account searches ([#501](https://github.com/awslabs/aws-lambda-powertools-python/issues/501)) +* **maintainers:** initial maintainers playbook ([#1222](https://github.com/awslabs/aws-lambda-powertools-python/issues/1222)) +* **metrics:** snippets split, improved, and lint ([#1272](https://github.com/awslabs/aws-lambda-powertools-python/issues/1272)) +* **metrics:** snippets split, improved, and lint +* **metrics:** keep it consistent with other sections, update metric names * **middleware-factory:** snippets split, improved, and lint ([#1451](https://github.com/awslabs/aws-lambda-powertools-python/issues/1451)) +* **nav:** make REST and GraphQL event handlers more explicit ([#959](https://github.com/awslabs/aws-lambda-powertools-python/issues/959)) +* **parameters:** add testing your code section ([#1017](https://github.com/awslabs/aws-lambda-powertools-python/issues/1017)) +* **parameters:** auto-transforming values based on suffix ([#573](https://github.com/awslabs/aws-lambda-powertools-python/issues/573)) +* **parser:** minor grammar fix ([#1427](https://github.com/awslabs/aws-lambda-powertools-python/issues/1427)) +* **parser:** fix incorrect import in root_validator example ([#735](https://github.com/awslabs/aws-lambda-powertools-python/issues/735)) * **parser:** minor grammar fix ([#1427](https://github.com/awslabs/aws-lambda-powertools-python/issues/1427)) +* **parser:** APIGatewayProxyEvent to APIGatewayProxyEventModel ([#1061](https://github.com/awslabs/aws-lambda-powertools-python/issues/1061)) +* **parser:** fix table and heading syntax +* **plugin:** add mermaid to create diagram as code ([#1070](https://github.com/awslabs/aws-lambda-powertools-python/issues/1070)) +* **quickstart:** make section agnostic to json lib +* **quickstart:** expand on intro line +* **quickstart:** tidy requirements up +* **quickstart:** sentence fragmentation, tidy up +* **quickstart:** same process for Logger +* **quickstart:** add sub-sections, fix highlight & code +* **readme:** add code coverage badge ([#577](https://github.com/awslabs/aws-lambda-powertools-python/issues/577)) +* **roadmap:** use pinned pause issue instead +* **roadmap:** add new roadmap section ([#1204](https://github.com/awslabs/aws-lambda-powertools-python/issues/1204)) +* **tenets:** make core, non-core more explicit +* **tenets:** update Idiomatic tenet to Progressive ([#823](https://github.com/awslabs/aws-lambda-powertools-python/issues/823)) +* **theme:** upgrade mkdocs-material to 8.x ([#1002](https://github.com/awslabs/aws-lambda-powertools-python/issues/1002)) +* **tracer:** warning to note on local traces +* **tracer:** update ServiceLens image w/ API GW, copywriting +* **tracer:** new ignore_endpoint feature ([#931](https://github.com/awslabs/aws-lambda-powertools-python/issues/931)) +* **tracer:** split and lint code snippets ([#1260](https://github.com/awslabs/aws-lambda-powertools-python/issues/1260)) +* **tracer:** snippets split, improved, and lint ([#1261](https://github.com/awslabs/aws-lambda-powertools-python/issues/1261)) +* **tracer:** additional scenario when to disable auto-capture ([#499](https://github.com/awslabs/aws-lambda-powertools-python/issues/499)) +* **tracer:** add annotation, metadata, and image +* **tracer:** add initial image, requirements +* **tracer:** Fix line highlighting ([#395](https://github.com/awslabs/aws-lambda-powertools-python/issues/395)) +* **tracer:** update wording that it auto-disables on non-Lambda env +* **tutorial:** fix broken internal links ([#1000](https://github.com/awslabs/aws-lambda-powertools-python/issues/1000)) +* **tutorial:** fix path to images ([#963](https://github.com/awslabs/aws-lambda-powertools-python/issues/963)) * **typing:** snippets split, improved, and lint ([#1465](https://github.com/awslabs/aws-lambda-powertools-python/issues/1465)) * **validation:** snippets split, improved, and lint ([#1449](https://github.com/awslabs/aws-lambda-powertools-python/issues/1449)) ## Features -* **parser:** add support for Lambda Function URL ([#1442](https://github.com/awslabs/aws-lambda-powertools-python/issues/1442)) - -## Maintenance - -* **batch:** deprecate sqs_batch_processor ([#1463](https://github.com/awslabs/aws-lambda-powertools-python/issues/1463)) -* **ci:** prevent concurrent git update in critical workflows ([#1478](https://github.com/awslabs/aws-lambda-powertools-python/issues/1478)) -* **ci:** disable e2e py version matrix due to concurrent locking -* **ci:** revert e2e py version matrix -* **ci:** temp disable e2e matrix -* **ci:** update changelog with latest changes -* **ci:** update changelog with latest changes -* **ci:** reduce payload and only send prod notification -* **ci:** remove area/utilities conflicting label -* **ci:** include py version in stack and cache lock -* **ci:** remove conventional changelog commit to reduce noise -* **ci:** update changelog with latest changes -* **deps:** bump release-drafter/release-drafter from 5.20.0 to 5.20.1 ([#1458](https://github.com/awslabs/aws-lambda-powertools-python/issues/1458)) -* **deps:** bump pydantic from 1.9.1 to 1.9.2 ([#1448](https://github.com/awslabs/aws-lambda-powertools-python/issues/1448)) -* **deps-dev:** bump flake8-bugbear from 22.8.22 to 22.8.23 ([#1473](https://github.com/awslabs/aws-lambda-powertools-python/issues/1473)) -* **deps-dev:** bump types-requests from 2.28.7 to 2.28.8 ([#1423](https://github.com/awslabs/aws-lambda-powertools-python/issues/1423)) -* **maintainer:** add Leandro as maintainer ([#1468](https://github.com/awslabs/aws-lambda-powertools-python/issues/1468)) -* **tests:** build and deploy Lambda Layer stack once ([#1466](https://github.com/awslabs/aws-lambda-powertools-python/issues/1466)) -* **tests:** refactor E2E test mechanics to ease maintenance, writing tests and parallelization ([#1444](https://github.com/awslabs/aws-lambda-powertools-python/issues/1444)) -* **tests:** enable end-to-end test workflow ([#1470](https://github.com/awslabs/aws-lambda-powertools-python/issues/1470)) -* **tests:** refactor E2E logger to ease maintenance, writing tests and parallelization ([#1460](https://github.com/awslabs/aws-lambda-powertools-python/issues/1460)) -* **tests:** refactor E2E tracer to ease maintenance, writing tests and parallelization ([#1457](https://github.com/awslabs/aws-lambda-powertools-python/issues/1457)) - -## Reverts -* fix(ci): add explicit origin to fix release detached head - - - -## [v1.27.0] - 2022-08-05 -## Bug Fixes - -* **ci:** changelog workflow must receive git tags too -* **ci:** add additional input to accurately describe intent on skip -* **ci:** job permissions -* **event_sources:** add test for Function URL AuthZ ([#1421](https://github.com/awslabs/aws-lambda-powertools-python/issues/1421)) - -## Documentation - -* **layer:** upgrade to 1.26.7 - -## Features - +* expose jmespath powertools functions ([#736](https://github.com/awslabs/aws-lambda-powertools-python/issues/736)) +* add support to persist default dimensions ([#410](https://github.com/awslabs/aws-lambda-powertools-python/issues/410)) +* add get_raw_configuration property in store; expose store +* boto3 sessions in batch, parameters & idempotency ([#717](https://github.com/awslabs/aws-lambda-powertools-python/issues/717)) +* **api-gateway:** add support for custom serializer ([#568](https://github.com/awslabs/aws-lambda-powertools-python/issues/568)) +* **api-gateway:** add debug mode ([#507](https://github.com/awslabs/aws-lambda-powertools-python/issues/507)) +* **api-gateway:** add common service errors ([#506](https://github.com/awslabs/aws-lambda-powertools-python/issues/506)) +* **apigateway:** access parent api resolver from router ([#842](https://github.com/awslabs/aws-lambda-powertools-python/issues/842)) +* **apigateway:** add exception_handler support ([#898](https://github.com/awslabs/aws-lambda-powertools-python/issues/898)) +* **apigateway:** add Router to allow large routing composition ([#645](https://github.com/awslabs/aws-lambda-powertools-python/issues/645)) +* **appsync:** add Router to allow large resolver composition ([#776](https://github.com/awslabs/aws-lambda-powertools-python/issues/776)) +* **batch:** new BatchProcessor for SQS, DynamoDB, Kinesis ([#886](https://github.com/awslabs/aws-lambda-powertools-python/issues/886)) +* **ci:** auto-notify & close issues on release +* **ci:** add actionlint in pre-commit hook * **ci:** create reusable changelog generation ([#1418](https://github.com/awslabs/aws-lambda-powertools-python/issues/1418)) -* **ci:** include changelog generation on docs build * **ci:** create reusable changelog generation +* **ci:** include changelog generation on docs build +* **data-classes:** decorator to instantiate data_classes and docs updates ([#442](https://github.com/awslabs/aws-lambda-powertools-python/issues/442)) +* **data-classes:** decode json_body if based64 encoded ([#560](https://github.com/awslabs/aws-lambda-powertools-python/issues/560)) +* **data-classes:** support for code pipeline job event ([#416](https://github.com/awslabs/aws-lambda-powertools-python/issues/416)) +* **data-classes:** AppSync Lambda authorizer event ([#610](https://github.com/awslabs/aws-lambda-powertools-python/issues/610)) +* **data-classes:** ActiveMQ and RabbitMQ support ([#770](https://github.com/awslabs/aws-lambda-powertools-python/issues/770)) +* **data-classes:** decode base64 encoded body ([#425](https://github.com/awslabs/aws-lambda-powertools-python/issues/425)) +* **data-classes:** authorizer for http api and rest api ([#620](https://github.com/awslabs/aws-lambda-powertools-python/issues/620)) +* **data-classes:** add AttributeValueType to DynamoDBStreamEvent ([#462](https://github.com/awslabs/aws-lambda-powertools-python/issues/462)) +* **data-classes:** data_as_bytes prop KinesisStreamRecordPayload ([#628](https://github.com/awslabs/aws-lambda-powertools-python/issues/628)) +* **event-handle:** allow for cors=None setting ([#421](https://github.com/awslabs/aws-lambda-powertools-python/issues/421)) +* **event-handler:** Support AppSyncResolverEvent subclassing ([#526](https://github.com/awslabs/aws-lambda-powertools-python/issues/526)) +* **event-handler:** prefixes to strip for custom mappings ([#579](https://github.com/awslabs/aws-lambda-powertools-python/issues/579)) +* **event-handler:** new resolvers to fix current_event typing ([#978](https://github.com/awslabs/aws-lambda-powertools-python/issues/978)) +* **event-handler:** add http ProxyEvent handler ([#369](https://github.com/awslabs/aws-lambda-powertools-python/issues/369)) +* **event-sources:** cache parsed json in data class ([#909](https://github.com/awslabs/aws-lambda-powertools-python/issues/909)) +* **event_handler:** improved support for headers and cookies in v2 ([#1455](https://github.com/awslabs/aws-lambda-powertools-python/issues/1455)) +* **event_handler:** add cookies as 1st class citizen in v2 ([#1487](https://github.com/awslabs/aws-lambda-powertools-python/issues/1487)) * **event_handlers:** Add support for Lambda Function URLs ([#1408](https://github.com/awslabs/aws-lambda-powertools-python/issues/1408)) +* **event_sources:** add CloudWatch dashboard custom widget event ([#1474](https://github.com/awslabs/aws-lambda-powertools-python/issues/1474)) +* **feat-toggle:** New simple feature toggles rule engine (WIP) ([#494](https://github.com/awslabs/aws-lambda-powertools-python/issues/494)) +* **feature flags:** Add not_in action and rename contains to in ([#589](https://github.com/awslabs/aws-lambda-powertools-python/issues/589)) +* **feature-flags:** get_raw_configuration property in Store ([#720](https://github.com/awslabs/aws-lambda-powertools-python/issues/720)) +* **feature-flags:** improve "IN/NOT_IN"; new rule actions ([#710](https://github.com/awslabs/aws-lambda-powertools-python/issues/710)) +* **feature-flags:** Bring your own logger for debug ([#709](https://github.com/awslabs/aws-lambda-powertools-python/issues/709)) +* **feature_flags:** support beyond boolean values (JSON values) ([#804](https://github.com/awslabs/aws-lambda-powertools-python/issues/804)) +* **feature_flags:** Added inequality conditions ([#721](https://github.com/awslabs/aws-lambda-powertools-python/issues/721)) +* **general:** support for Python 3.9 ([#626](https://github.com/awslabs/aws-lambda-powertools-python/issues/626)) +* **idempotency:** handle lambda timeout scenarios for INPROGRESS records ([#1387](https://github.com/awslabs/aws-lambda-powertools-python/issues/1387)) +* **idempotency:** support dataclasses & pydantic models payloads ([#908](https://github.com/awslabs/aws-lambda-powertools-python/issues/908)) +* **idempotency:** support for any synchronous function ([#625](https://github.com/awslabs/aws-lambda-powertools-python/issues/625)) +* **idempotency:** makes customers unit testing easier ([#719](https://github.com/awslabs/aws-lambda-powertools-python/issues/719)) +* **logger:** allow handler with custom kwargs signature ([#913](https://github.com/awslabs/aws-lambda-powertools-python/issues/913)) +* **logger:** add option to clear state per invocation +* **logger:** add ALB correlation ID support ([#816](https://github.com/awslabs/aws-lambda-powertools-python/issues/816)) +* **logger:** add option to clear state per invocation ([#467](https://github.com/awslabs/aws-lambda-powertools-python/issues/467)) +* **logger:** support use_datetime_directive for timestamps ([#920](https://github.com/awslabs/aws-lambda-powertools-python/issues/920)) +* **logger:** log_event support event data classes (e.g. S3Event) ([#984](https://github.com/awslabs/aws-lambda-powertools-python/issues/984)) +* **logger:** clone powertools logger config to any Python logger ([#927](https://github.com/awslabs/aws-lambda-powertools-python/issues/927)) +* **logger:** add get_correlation_id method ([#516](https://github.com/awslabs/aws-lambda-powertools-python/issues/516)) * **metrics:** update max user-defined dimensions from 9 to 29 ([#1417](https://github.com/awslabs/aws-lambda-powertools-python/issues/1417)) +* **mypy:** complete mypy support for the entire codebase ([#943](https://github.com/awslabs/aws-lambda-powertools-python/issues/943)) +* **mypy:** add mypy support to makefile ([#508](https://github.com/awslabs/aws-lambda-powertools-python/issues/508)) +* **parameters:** add clear_cache method for providers ([#1194](https://github.com/awslabs/aws-lambda-powertools-python/issues/1194)) +* **parameters:** accept boto3_client to support private endpoints and ease testing ([#1096](https://github.com/awslabs/aws-lambda-powertools-python/issues/1096)) +* **params:** expose high level max_age, raise_on_transform_error ([#567](https://github.com/awslabs/aws-lambda-powertools-python/issues/567)) +* **parser:** security issue in Pydantic [#436](https://github.com/awslabs/aws-lambda-powertools-python/issues/436) ([#437](https://github.com/awslabs/aws-lambda-powertools-python/issues/437)) +* **parser:** Support for API GW v1 proxy schema & envelope ([#403](https://github.com/awslabs/aws-lambda-powertools-python/issues/403)) +* **parser:** add support for Lambda Function URL ([#1442](https://github.com/awslabs/aws-lambda-powertools-python/issues/1442)) +* **parser:** add support for API Gateway HTTP API [#434](https://github.com/awslabs/aws-lambda-powertools-python/issues/434) ([#441](https://github.com/awslabs/aws-lambda-powertools-python/issues/441)) +* **tracer:** disable tracer when for non-Lambda envs ([#598](https://github.com/awslabs/aws-lambda-powertools-python/issues/598)) +* **tracer:** ignore tracing for certain hostname(s) or url(s) ([#910](https://github.com/awslabs/aws-lambda-powertools-python/issues/910)) +* **tracer:** add service annotation when service is set ([#861](https://github.com/awslabs/aws-lambda-powertools-python/issues/861)) +* **validator:** include missing data elements from a validation error ([#686](https://github.com/awslabs/aws-lambda-powertools-python/issues/686)) ## Maintenance -* **ci:** sync area labels to prevent dedup -* **ci:** update changelog with latest changes -* **ci:** update changelog with latest changes -* **ci:** add manual trigger for docs -* **ci:** update changelog with latest changes -* **ci:** temporarily disable changelog push on release -* **ci:** update changelog with latest changes -* **ci:** move changelog generation to rebuild_latest_doc workflow -* **ci:** update project with version -* **ci:** update release automated activities -* **ci:** readd changelog step on release -* **ci:** move changelog generation to rebuild_latest_doc workflow -* **ci:** drop 3.6 from workflows -* **deps:** bump constructs from 10.1.1 to 10.1.60 ([#1399](https://github.com/awslabs/aws-lambda-powertools-python/issues/1399)) -* **deps:** bump constructs from 10.1.1 to 10.1.66 ([#1414](https://github.com/awslabs/aws-lambda-powertools-python/issues/1414)) -* **deps:** bump jsii from 1.57.0 to 1.63.2 ([#1400](https://github.com/awslabs/aws-lambda-powertools-python/issues/1400)) -* **deps:** bump constructs from 10.1.1 to 10.1.64 ([#1405](https://github.com/awslabs/aws-lambda-powertools-python/issues/1405)) -* **deps:** bump attrs from 21.4.0 to 22.1.0 ([#1397](https://github.com/awslabs/aws-lambda-powertools-python/issues/1397)) -* **deps:** bump constructs from 10.1.1 to 10.1.63 ([#1402](https://github.com/awslabs/aws-lambda-powertools-python/issues/1402)) -* **deps:** bump constructs from 10.1.1 to 10.1.65 ([#1407](https://github.com/awslabs/aws-lambda-powertools-python/issues/1407)) -* **deps-dev:** bump types-requests from 2.28.5 to 2.28.6 ([#1401](https://github.com/awslabs/aws-lambda-powertools-python/issues/1401)) -* **deps-dev:** bump types-requests from 2.28.6 to 2.28.7 ([#1406](https://github.com/awslabs/aws-lambda-powertools-python/issues/1406)) -* **docs:** remove pause sentence from roadmap ([#1409](https://github.com/awslabs/aws-lambda-powertools-python/issues/1409)) -* **docs:** update site name to test ci changelog -* **docs:** update CHANGELOG for v1.26.7 -* **docs:** update description to trigger changelog generation -* **governance:** remove devcontainer in favour of gitpod.io ([#1411](https://github.com/awslabs/aws-lambda-powertools-python/issues/1411)) -* **governance:** add pre-configured dev environment with GitPod.io to ease contributions ([#1403](https://github.com/awslabs/aws-lambda-powertools-python/issues/1403)) -* **layers:** upgrade cdk dep hashes to prevent ci fail - - - -## [v1.26.7] - 2022-07-29 -## Bug Fixes - -* **ci:** add missing oidc token generation permission -* **event_handlers:** ImportError when importing Response from top-level event_handler ([#1388](https://github.com/awslabs/aws-lambda-powertools-python/issues/1388)) - -## Documentation - -* **examples:** enforce and fix all mypy errors ([#1393](https://github.com/awslabs/aws-lambda-powertools-python/issues/1393)) - -## Features - -* **idempotency:** handle lambda timeout scenarios for INPROGRESS records ([#1387](https://github.com/awslabs/aws-lambda-powertools-python/issues/1387)) - -## Maintenance - -* **ci:** increase skip_pypi logic to cover tests/changelog on re-run failures -* **ci:** update project with version 1.26.6 -* **ci:** drop 3.6 from workflows ([#1395](https://github.com/awslabs/aws-lambda-powertools-python/issues/1395)) -* **ci:** add conditional to skip pypi release ([#1366](https://github.com/awslabs/aws-lambda-powertools-python/issues/1366)) -* **ci:** remove leftover logic from on_merged_pr workflow -* **ci:** update project with version 1.26.6 -* **ci:** update project with version 1.26.6 -* **deps:** bump jsii from 1.57.0 to 1.63.1 ([#1390](https://github.com/awslabs/aws-lambda-powertools-python/issues/1390)) -* **deps:** bump constructs from 10.1.1 to 10.1.59 ([#1396](https://github.com/awslabs/aws-lambda-powertools-python/issues/1396)) -* **deps-dev:** bump flake8-isort from 4.1.1 to 4.1.2.post0 ([#1384](https://github.com/awslabs/aws-lambda-powertools-python/issues/1384)) -* **layers:** bump to 1.26.6 using layer v26 -* **maintainers:** add Ruben as a maintainer ([#1392](https://github.com/awslabs/aws-lambda-powertools-python/issues/1392)) - - - -## [v1.26.6] - 2022-07-25 -## Bug Fixes - -* **ci:** remove unsupported env in workflow_call -* **ci:** allow inherit secrets for reusable workflow -* **ci:** remove unused secret -* **ci:** label_related_issue unresolved var from history mixup -* **ci:** cond doesnt support two expr w/ env -* **ci:** only event is resolved in cond -* **ci:** unexpected symbol due to double quotes... -* **event_handlers:** handle lack of headers when using auto-compression feature ([#1325](https://github.com/awslabs/aws-lambda-powertools-python/issues/1325)) - -## Maintenance - +* bump to 1.20.2 +* bump to 1.15.1 +* bump to 1.16.0 +* bump to 1.16.1 +* trial boring cyborg automation +* include regression in changelog +* bump to 1.25.10 +* assited changelog pre-generation, auto-label PR ([#443](https://github.com/awslabs/aws-lambda-powertools-python/issues/443)) +* bump to 1.21.0 +* bump to 1.25.9 +* bump to 1.25.8 +* bump to 1.25.7 +* bump to 1.25.6 +* bump to 1.25.5 +* bump to 1.25.4 +* bump to 1.26.2 +* enable dependabot for dep upgrades ([#444](https://github.com/awslabs/aws-lambda-powertools-python/issues/444)) +* enable mergify ([#450](https://github.com/awslabs/aws-lambda-powertools-python/issues/450)) +* remove duplicate test +* comment reason for change +* remove unnecessary test +* lint unused import +* bump to 1.25.3 +* bump version 1.26.1 +* bump to 1.25.2 +* add sam build gitignore +* dependabot/mergify guardrail for major versions +* move to approach B for multiple IaC +* bump to 1.25.1 +* bump to 1.25.0 +* correct docs +* correct docs +* fix dependabot commit messages prefix +* fix dependabot unique set config +* bump mkdocs-material from 7.1.5 to 7.1.6 ([#451](https://github.com/awslabs/aws-lambda-powertools-python/issues/451)) +* bump boto3 from 1.17.78 to 1.17.84 ([#449](https://github.com/awslabs/aws-lambda-powertools-python/issues/449)) +* bump to version 1.26.3 +* bump xenon from 0.7.1 to 0.7.3 ([#446](https://github.com/awslabs/aws-lambda-powertools-python/issues/446)) +* bump actions/setup-python from 1 to 2.2.2 ([#445](https://github.com/awslabs/aws-lambda-powertools-python/issues/445)) +* update mergify to require approval on dependabot ([#456](https://github.com/awslabs/aws-lambda-powertools-python/issues/456)) +* fix path for PR auto-labelling +* use isinstance over type +* bump to 1.24.2 +* reintroduce codecov token +* ignore codecov upload +* bump to 1.24.1 +* bump to 1.24.1 +* bump to 1.24.0 +* bump to 1.23.0 +* minor housekeeping before release ([#912](https://github.com/awslabs/aws-lambda-powertools-python/issues/912)) +* correct pr label order +* bump to 1.22.0 +* conditional to publish docs only attempt 3 +* conditional to publish docs only attempt 2 +* conditional to publish docs only +* remove Lambda Layer version tag +* fix var expr +* bump to 1.21.1 +* include dependencies label under maintenance +* include regression in changelog +* fix changelog file redirection +* bump version to 1.17.0 +* ignore constants in test cov ([#745](https://github.com/awslabs/aws-lambda-powertools-python/issues/745)) +* ignore constants in tests cov +* bump 1.17.1 ([#502](https://github.com/awslabs/aws-lambda-powertools-python/issues/502)) +* bump 1.18.0 ([#547](https://github.com/awslabs/aws-lambda-powertools-python/issues/547)) +* bump 1.18.1 +* bump to 1.15.0 ([#422](https://github.com/awslabs/aws-lambda-powertools-python/issues/422)) +* enable autolabel based on PR title +* only build docs on docs path +* bump to 1.19.0 +* update pypi description, keywords +* include feature-flags docs hotfix +* add support for publishing fallback * dummy for PR test * print full event depth * print full workflow event depth * debug full event +* bump to 1.20.0 +* update changelog to reflect out-of-band commits +* add python 3.9 support +* update new changelog version tag +* setup codespaces ([#637](https://github.com/awslabs/aws-lambda-powertools-python/issues/637)) +* markdown linter fixes ([#636](https://github.com/awslabs/aws-lambda-powertools-python/issues/636)) * remove leftover from fork one more time -* **ci:** test env expr -* **ci:** test upstream job skip +* include public layers changelog +* bump to 1.20.1 +* bump to 1.26.0 +* **actions:** include new labels +* **api-docs:** enable allow_reuse to fix the docs ([#612](https://github.com/awslabs/aws-lambda-powertools-python/issues/612)) +* **bandit:** update baseline +* **batch:** deprecate sqs_batch_processor ([#1463](https://github.com/awslabs/aws-lambda-powertools-python/issues/1463)) +* **ci:** remove leftover logic from on_merged_pr workflow +* **ci:** update project with version 1.26.6 +* **ci:** use gh environment for beta and prod layer deploy ([#1356](https://github.com/awslabs/aws-lambda-powertools-python/issues/1356)) +* **ci:** remove core group from codeowners ([#1358](https://github.com/awslabs/aws-lambda-powertools-python/issues/1358)) +* **ci:** use OIDC and encrypt release secrets ([#1355](https://github.com/awslabs/aws-lambda-powertools-python/issues/1355)) +* **ci:** introduce codeowners ([#1352](https://github.com/awslabs/aws-lambda-powertools-python/issues/1352)) +* **ci:** drop 3.6 from workflows ([#1395](https://github.com/awslabs/aws-lambda-powertools-python/issues/1395)) * **ci:** lockdown workflow_run by origin ([#1350](https://github.com/awslabs/aws-lambda-powertools-python/issues/1350)) +* **ci:** test upstream job skip +* **ci:** test env expr * **ci:** test default env * **ci:** experiment hardening origin -* **ci:** experiment hardening origin -* **ci:** introduce codeowners ([#1352](https://github.com/awslabs/aws-lambda-powertools-python/issues/1352)) -* **ci:** use OIDC and encrypt release secrets ([#1355](https://github.com/awslabs/aws-lambda-powertools-python/issues/1355)) -* **ci:** remove core group from codeowners ([#1358](https://github.com/awslabs/aws-lambda-powertools-python/issues/1358)) +* **ci:** temporarily disable changelog push on release +* **ci:** move changelog generation to rebuild_latest_doc workflow +* **ci:** update project with version +* **ci:** move changelog generation to rebuild_latest_doc workflow +* **ci:** drop 3.6 from workflows * **ci:** confirm workflow_run event -* **ci:** use gh environment for beta and prod layer deploy ([#1356](https://github.com/awslabs/aws-lambda-powertools-python/issues/1356)) +* **ci:** update project with version 1.26.6 +* **ci:** readd changelog step on release +* **ci:** update release automated activities +* **ci:** update changelog with latest changes +* **ci:** update changelog with latest changes +* **ci:** add conditional to skip pypi release ([#1366](https://github.com/awslabs/aws-lambda-powertools-python/issues/1366)) +* **ci:** add manual trigger for docs +* **ci:** update changelog with latest changes * **ci:** update project with version 1.26.5 -* **deps:** bump constructs from 10.1.1 to 10.1.52 ([#1343](https://github.com/awslabs/aws-lambda-powertools-python/issues/1343)) -* **deps-dev:** bump mypy-boto3-cloudwatch from 1.24.0 to 1.24.35 ([#1342](https://github.com/awslabs/aws-lambda-powertools-python/issues/1342)) -* **governance:** update wording tech debt to summary in maintenance template -* **governance:** add new maintenance issue template for tech debt ([#1326](https://github.com/awslabs/aws-lambda-powertools-python/issues/1326)) -* **layers:** layer canary stack should not hardcode resource name -* **layers:** replace layers account secret ([#1329](https://github.com/awslabs/aws-lambda-powertools-python/issues/1329)) -* **layers:** expand to all aws commercial regions ([#1324](https://github.com/awslabs/aws-lambda-powertools-python/issues/1324)) -* **layers:** bump to 1.26.5 - -## Pull Requests - -* Merge pull request [#285](https://github.com/awslabs/aws-lambda-powertools-python/issues/285) from heitorlessa/chore/skip-dep-workflow -* Merge pull request [#284](https://github.com/awslabs/aws-lambda-powertools-python/issues/284) from heitorlessa/chore/dummy - - - -## [v1.26.5] - 2022-07-20 -## Bug Fixes - -* mathc the name of the cdk synth from the build phase -* typo in input for layer workflow -* no need to cache npm since we only install cdk cli and don't have .lock files -* add entire ARN role instead of account and role name -* path to artefact -* unzip the right artifact name -* download artefact into the layer dir -* sight, yes a whitespace character breaks the build -* **ci:** checkout project before validating related issue workflow -* **ci:** install poetry before calling setup/python with cache ([#1315](https://github.com/awslabs/aws-lambda-powertools-python/issues/1315)) -* **ci:** remove additional quotes in PR action ([#1317](https://github.com/awslabs/aws-lambda-powertools-python/issues/1317)) -* **ci:** lambda layer workflow release version and conditionals ([#1316](https://github.com/awslabs/aws-lambda-powertools-python/issues/1316)) -* **ci:** fetch all git info so we can check tags -* **ci:** lambda layer workflow release version and conditionals ([#1316](https://github.com/awslabs/aws-lambda-powertools-python/issues/1316)) -* **ci:** keep layer version permission ([#1318](https://github.com/awslabs/aws-lambda-powertools-python/issues/1318)) -* **ci:** regex to catch combination of related issues workflow -* **deps:** correct mypy types as dev dependency ([#1322](https://github.com/awslabs/aws-lambda-powertools-python/issues/1322)) -* **logger:** preserve std keys when using custom formatters ([#1264](https://github.com/awslabs/aws-lambda-powertools-python/issues/1264)) - -## Documentation - -* **event-handler:** snippets split, improved, and lint ([#1279](https://github.com/awslabs/aws-lambda-powertools-python/issues/1279)) -* **governance:** typos on PR template fixes [#1314](https://github.com/awslabs/aws-lambda-powertools-python/issues/1314) -* **governance:** add security doc to the root - -## Maintenance - -* **ci:** limits concurrency for docs workflow +* **ci:** update changelog with latest changes +* **ci:** sync area labels to prevent dedup +* **ci:** update changelog with latest changes * **ci:** adds caching when installing python dependencies ([#1311](https://github.com/awslabs/aws-lambda-powertools-python/issues/1311)) * **ci:** update project with version 1.26.4 -* **ci:** fix reference error in related_issue -* **deps:** bump constructs from 10.1.1 to 10.1.51 ([#1323](https://github.com/awslabs/aws-lambda-powertools-python/issues/1323)) -* **deps-dev:** bump mypy from 0.961 to 0.971 ([#1320](https://github.com/awslabs/aws-lambda-powertools-python/issues/1320)) -* **governance:** fix typo on semantic commit link introduced in [#1](https://github.com/awslabs/aws-lambda-powertools-python/issues/1)aef4 -* **layers:** add release pipeline in GitHub Actions ([#1278](https://github.com/awslabs/aws-lambda-powertools-python/issues/1278)) -* **layers:** bump to 22 for 1.26.3 - - - -## [v1.26.4] - 2022-07-18 -## Bug Fixes - -* **ci:** checkout project before validating related issue workflow -* **ci:** fixes typos and small issues on github scripts ([#1302](https://github.com/awslabs/aws-lambda-powertools-python/issues/1302)) -* **ci:** address conditional type on_merge -* **ci:** address pr title semantic not found logic -* **ci:** address gh-actions additional quotes; remove debug -* **ci:** regex group name for on_merge workflow -* **ci:** escape outputs as certain PRs can break GH Actions expressions -* **ci:** move conditionals from yaml to code; leftover -* **ci:** move conditionals from yaml to code -* **ci:** accept core arg in label related issue workflow -* **ci:** match the name of the cdk synth from the build phase -* **ci:** regex to catch combination of related issues workflow -* **logger:** preserve std keys when using custom formatters ([#1264](https://github.com/awslabs/aws-lambda-powertools-python/issues/1264)) -* **parser:** raise ValidationError when SNS->SQS keys are intentionally missing ([#1299](https://github.com/awslabs/aws-lambda-powertools-python/issues/1299)) - -## Documentation - -* **event-handler:** snippets split, improved, and lint ([#1279](https://github.com/awslabs/aws-lambda-powertools-python/issues/1279)) -* **graphql:** snippets split, improved, and lint ([#1287](https://github.com/awslabs/aws-lambda-powertools-python/issues/1287)) -* **homepage:** emphasize additional powertools languages ([#1292](https://github.com/awslabs/aws-lambda-powertools-python/issues/1292)) -* **metrics:** snippets split, improved, and lint - -## Maintenance - * **ci:** increase release automation and limit to one manual step ([#1297](https://github.com/awslabs/aws-lambda-powertools-python/issues/1297)) -* **ci:** make export PR reusable -* **ci:** auto-merge cdk lib and lambda layer construct -* **ci:** convert inline gh-script to file +* **ci:** update changelog with latest changes +* **ci:** split latest docs workflow +* **ci:** update changelog with latest changes * **ci:** lockdown 3rd party workflows to pin sha ([#1301](https://github.com/awslabs/aws-lambda-powertools-python/issues/1301)) * **ci:** automatically add area label based on title ([#1300](https://github.com/awslabs/aws-lambda-powertools-python/issues/1300)) -* **ci:** disable output debugging as pr body isnt accepted +* **ci:** reduce payload and only send prod notification * **ci:** experiment with conditional on outputs * **ci:** improve error handling for non-issue numbers * **ci:** add end to end testing mechanism ([#1247](https://github.com/awslabs/aws-lambda-powertools-python/issues/1247)) +* **ci:** update changelog with latest changes +* **ci:** update changelog with latest changes +* **ci:** safely label PR based on title +* **ci:** update mergify bot breaking change +* **ci:** remove mergify legacy key +* **ci:** disable output debugging as pr body isnt accepted +* **ci:** add queue name in mergify +* **ci:** fix mergify dependabot queue +* **ci:** auto-merge cdk lib and lambda layer construct +* **ci:** run codeql analysis on push only +* **ci:** move all scripts under .github/scripts +* **ci:** move error prone env to code as constants +* **ci:** make export PR reusable +* **ci:** convert inline gh-script to file +* **ci:** fix reference error in related_issue +* **ci:** update changelog with latest changes * **ci:** limits concurrency for docs workflow +* **ci:** update changelog with latest changes * **ci:** fix reference error in related_issue -* **ci:** move error prone env to code as constants -* **ci:** move all scripts under .github/scripts -* **deps:** bump cdk-lambda-powertools-python-layer ([#1284](https://github.com/awslabs/aws-lambda-powertools-python/issues/1284)) -* **deps:** bump jsii from 1.61.0 to 1.62.0 ([#1294](https://github.com/awslabs/aws-lambda-powertools-python/issues/1294)) -* **deps:** bump constructs from 10.1.1 to 10.1.46 ([#1306](https://github.com/awslabs/aws-lambda-powertools-python/issues/1306)) -* **deps:** bump actions/setup-node from 2 to 3 ([#1281](https://github.com/awslabs/aws-lambda-powertools-python/issues/1281)) -* **deps:** bump fastjsonschema from 2.15.3 to 2.16.1 ([#1309](https://github.com/awslabs/aws-lambda-powertools-python/issues/1309)) -* **deps:** bump constructs from 10.1.1 to 10.1.49 ([#1308](https://github.com/awslabs/aws-lambda-powertools-python/issues/1308)) -* **deps:** bump attrs from 21.2.0 to 21.4.0 ([#1282](https://github.com/awslabs/aws-lambda-powertools-python/issues/1282)) -* **deps:** bump aws-cdk-lib from 2.29.0 to 2.31.1 ([#1290](https://github.com/awslabs/aws-lambda-powertools-python/issues/1290)) -* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.12 to 1.24.27 ([#1293](https://github.com/awslabs/aws-lambda-powertools-python/issues/1293)) -* **deps-dev:** bump mypy-boto3-appconfig from 1.24.0 to 1.24.29 ([#1295](https://github.com/awslabs/aws-lambda-powertools-python/issues/1295)) -* **governance:** remove any step relying on master branch -* **governance:** update emeritus affiliation -* **layers:** add release pipeline in GitHub Actions ([#1278](https://github.com/awslabs/aws-lambda-powertools-python/issues/1278)) -* **layers:** bump to 22 for 1.26.3 - - - -## [v1.26.3] - 2022-07-04 -## Bug Fixes - -* **ci:** remove utf-8 body in octokit body req -* **ci:** improve msg visibility on closed issues -* **ci:** disable merged_pr workflow -* **ci:** merged_pr add issues write access -* **ci:** quote prBody GH expr on_opened_pr -* **ci:** reusable workflow secrets param -* **logger:** support additional args for handlers when injecting lambda context ([#1276](https://github.com/awslabs/aws-lambda-powertools-python/issues/1276)) -* **logger:** preserve std keys when using custom formatters ([#1264](https://github.com/awslabs/aws-lambda-powertools-python/issues/1264)) - -## Documentation - -* **lint:** add markdownlint rules and automation ([#1256](https://github.com/awslabs/aws-lambda-powertools-python/issues/1256)) -* **logger:** document enriching logs with logrecord attributes ([#1271](https://github.com/awslabs/aws-lambda-powertools-python/issues/1271)) -* **logger:** snippets split, improved, and lint ([#1262](https://github.com/awslabs/aws-lambda-powertools-python/issues/1262)) -* **metrics:** snippets split, improved, and lint ([#1272](https://github.com/awslabs/aws-lambda-powertools-python/issues/1272)) -* **tracer:** snippets split, improved, and lint ([#1261](https://github.com/awslabs/aws-lambda-powertools-python/issues/1261)) -* **tracer:** split and lint code snippets ([#1260](https://github.com/awslabs/aws-lambda-powertools-python/issues/1260)) - -## Maintenance - -* move to approach B for multiple IaC -* add sam build gitignore -* bump to version 1.26.3 +* **ci:** remove area/utilities conflicting label +* **ci:** limits concurrency for docs workflow +* **ci:** remove conventional changelog commit to reduce noise +* **ci:** update project with version 1.26.6 * **ci:** reactivate on_merged_pr workflow +* **ci:** temp disable e2e matrix +* **ci:** include py version in stack and cache lock +* **ci:** revert e2e py version matrix +* **ci:** disable e2e py version matrix due to concurrent locking * **ci:** improve wording on closed issues action * **ci:** deactivate on_merged_pr workflow -* **deps:** bump aws-xray-sdk from 2.9.0 to 2.10.0 ([#1270](https://github.com/awslabs/aws-lambda-powertools-python/issues/1270)) +* **ci:** prevent concurrent git update in critical workflows ([#1478](https://github.com/awslabs/aws-lambda-powertools-python/issues/1478)) +* **ci:** limit E2E workflow run for source code change +* **ci:** remove unused and undeclared OS matrix env +* **ci:** fix invalid dependency leftover +* **ci:** remove dangling debug step +* **ci:** add linter for GitHub Actions as pre-commit hook ([#1479](https://github.com/awslabs/aws-lambda-powertools-python/issues/1479)) +* **ci:** add workflow to suggest splitting large PRs ([#1480](https://github.com/awslabs/aws-lambda-powertools-python/issues/1480)) +* **ci:** enable ci checks for v2 +* **ci:** record pr details upon labeling +* **ci:** destructure assignment on comment_large_pr +* **ci:** add note for state persistence on comment_large_pr +* **ci:** format comment on comment_large_pr script +* **ci:** create reusable docs publishing workflow ([#1482](https://github.com/awslabs/aws-lambda-powertools-python/issues/1482)) +* **ci:** create docs workflow for v2 +* **ci:** create adhoc docs workflow for v2 +* **ci:** disable mergify configuration after breaking changes ([#1188](https://github.com/awslabs/aws-lambda-powertools-python/issues/1188)) +* **ci:** create adhoc docs workflow for v2 +* **ci:** changelog pre-generation to fetch tags from origin +* **ci:** sync package version with pypi +* **ci:** increase skip_pypi logic to cover tests/changelog on re-run failures +* **ci:** add missing description fields +* **ci:** post release on tagged issues too +* **ci:** experiment hardening origin +* **deps:** bump jsii from 1.61.0 to 1.62.0 ([#1294](https://github.com/awslabs/aws-lambda-powertools-python/issues/1294)) +* **deps:** bump boto3 from 1.18.21 to 1.18.22 ([#614](https://github.com/awslabs/aws-lambda-powertools-python/issues/614)) +* **deps:** bump boto3 from 1.18.22 to 1.18.24 ([#619](https://github.com/awslabs/aws-lambda-powertools-python/issues/619)) +* **deps:** bump pydantic from 1.10.0 to 1.10.1 ([#1491](https://github.com/awslabs/aws-lambda-powertools-python/issues/1491)) +* **deps:** bump jsii from 1.57.0 to 1.63.2 ([#1400](https://github.com/awslabs/aws-lambda-powertools-python/issues/1400)) +* **deps:** bump boto3 from 1.18.24 to 1.18.25 ([#623](https://github.com/awslabs/aws-lambda-powertools-python/issues/623)) +* **deps:** bump boto3 from 1.18.25 to 1.18.26 ([#627](https://github.com/awslabs/aws-lambda-powertools-python/issues/627)) +* **deps:** bump constructs from 10.1.1 to 10.1.63 ([#1402](https://github.com/awslabs/aws-lambda-powertools-python/issues/1402)) +* **deps:** bump attrs from 21.4.0 to 22.1.0 ([#1397](https://github.com/awslabs/aws-lambda-powertools-python/issues/1397)) +* **deps:** bump jsii from 1.57.0 to 1.63.1 ([#1390](https://github.com/awslabs/aws-lambda-powertools-python/issues/1390)) +* **deps:** bump boto3 from 1.18.15 to 1.18.17 ([#597](https://github.com/awslabs/aws-lambda-powertools-python/issues/597)) +* **deps:** bump constructs from 10.1.1 to 10.1.59 ([#1396](https://github.com/awslabs/aws-lambda-powertools-python/issues/1396)) +* **deps:** bump pydantic from 1.9.0 to 1.9.1 ([#1221](https://github.com/awslabs/aws-lambda-powertools-python/issues/1221)) +* **deps:** bump email-validator from 1.1.3 to 1.2.1 ([#1199](https://github.com/awslabs/aws-lambda-powertools-python/issues/1199)) +* **deps:** bump boto3 from 1.18.1 to 1.18.15 ([#591](https://github.com/awslabs/aws-lambda-powertools-python/issues/591)) +* **deps:** bump boto3 from 1.18.26 to 1.18.32 ([#663](https://github.com/awslabs/aws-lambda-powertools-python/issues/663)) +* **deps:** bump codecov/codecov-action from 2.0.1 to 2.0.2 ([#558](https://github.com/awslabs/aws-lambda-powertools-python/issues/558)) +* **deps:** bump constructs from 10.1.1 to 10.1.64 ([#1405](https://github.com/awslabs/aws-lambda-powertools-python/issues/1405)) +* **deps:** bump boto3 from 1.18.32 to 1.18.38 ([#671](https://github.com/awslabs/aws-lambda-powertools-python/issues/671)) +* **deps:** bump boto3 from 1.18.17 to 1.18.21 ([#608](https://github.com/awslabs/aws-lambda-powertools-python/issues/608)) +* **deps:** bump github/codeql-action from 1 to 2 ([#1154](https://github.com/awslabs/aws-lambda-powertools-python/issues/1154)) +* **deps:** bump boto3 from 1.18.38 to 1.18.41 ([#677](https://github.com/awslabs/aws-lambda-powertools-python/issues/677)) +* **deps:** bump actions/setup-python from 3 to 4 ([#1244](https://github.com/awslabs/aws-lambda-powertools-python/issues/1244)) +* **deps:** bump codecov/codecov-action from 2.0.2 to 2.1.0 ([#675](https://github.com/awslabs/aws-lambda-powertools-python/issues/675)) +* **deps:** bump codecov/codecov-action from 3.0.0 to 3.1.0 ([#1143](https://github.com/awslabs/aws-lambda-powertools-python/issues/1143)) +* **deps:** bump boto3 from 1.18.41 to 1.18.49 ([#703](https://github.com/awslabs/aws-lambda-powertools-python/issues/703)) +* **deps:** bump boto3 from 1.18.49 to 1.18.51 ([#713](https://github.com/awslabs/aws-lambda-powertools-python/issues/713)) +* **deps:** bump actions/checkout from 2 to 3 ([#1052](https://github.com/awslabs/aws-lambda-powertools-python/issues/1052)) +* **deps:** bump boto3 from 1.18.51 to 1.18.54 ([#733](https://github.com/awslabs/aws-lambda-powertools-python/issues/733)) +* **deps:** bump constructs from 10.1.1 to 10.1.52 ([#1343](https://github.com/awslabs/aws-lambda-powertools-python/issues/1343)) +* **deps:** bump boto3 from 1.18.54 to 1.18.56 ([#742](https://github.com/awslabs/aws-lambda-powertools-python/issues/742)) +* **deps:** bump codecov/codecov-action from 2.1.0 to 3.0.0 ([#1102](https://github.com/awslabs/aws-lambda-powertools-python/issues/1102)) +* **deps:** bump actions/upload-artifact from 2 to 3 ([#1103](https://github.com/awslabs/aws-lambda-powertools-python/issues/1103)) +* **deps:** bump release-drafter/release-drafter from 5.20.0 to 5.20.1 ([#1458](https://github.com/awslabs/aws-lambda-powertools-python/issues/1458)) +* **deps:** bump constructs from 10.1.1 to 10.1.65 ([#1407](https://github.com/awslabs/aws-lambda-powertools-python/issues/1407)) +* **deps:** bump boto3 from 1.17.84 to 1.17.85 ([#455](https://github.com/awslabs/aws-lambda-powertools-python/issues/455)) +* **deps:** bump boto3 from 1.17.85 to 1.17.86 ([#458](https://github.com/awslabs/aws-lambda-powertools-python/issues/458)) +* **deps:** bump boto3 from 1.17.86 to 1.17.87 ([#459](https://github.com/awslabs/aws-lambda-powertools-python/issues/459)) +* **deps:** bump boto3 from 1.18.56 to 1.18.58 ([#755](https://github.com/awslabs/aws-lambda-powertools-python/issues/755)) +* **deps:** bump boto3 from 1.18.58 to 1.18.59 ([#760](https://github.com/awslabs/aws-lambda-powertools-python/issues/760)) +* **deps:** bump boto3 from 1.17.88 to 1.17.89 ([#466](https://github.com/awslabs/aws-lambda-powertools-python/issues/466)) +* **deps:** bump pydantic from 1.9.1 to 1.9.2 ([#1448](https://github.com/awslabs/aws-lambda-powertools-python/issues/1448)) +* **deps:** bump constructs from 10.1.1 to 10.1.51 ([#1323](https://github.com/awslabs/aws-lambda-powertools-python/issues/1323)) +* **deps:** bump constructs from 10.1.1 to 10.1.60 ([#1399](https://github.com/awslabs/aws-lambda-powertools-python/issues/1399)) +* **deps:** bump constructs from 10.1.1 to 10.1.66 ([#1414](https://github.com/awslabs/aws-lambda-powertools-python/issues/1414)) +* **deps:** bump boto3 from 1.17.89 to 1.17.91 ([#473](https://github.com/awslabs/aws-lambda-powertools-python/issues/473)) +* **deps:** bump boto3 from 1.18.59 to 1.18.61 ([#766](https://github.com/awslabs/aws-lambda-powertools-python/issues/766)) +* **deps:** bump email-validator from 1.1.2 to 1.1.3 ([#478](https://github.com/awslabs/aws-lambda-powertools-python/issues/478)) +* **deps:** bump boto3 from 1.17.91 to 1.17.101 ([#490](https://github.com/awslabs/aws-lambda-powertools-python/issues/490)) +* **deps:** bump boto3 from 1.18.61 to 1.19.6 ([#783](https://github.com/awslabs/aws-lambda-powertools-python/issues/783)) +* **deps:** bump urllib3 from 1.26.4 to 1.26.5 ([#787](https://github.com/awslabs/aws-lambda-powertools-python/issues/787)) +* **deps:** bump boto3 from 1.19.6 to 1.20.3 ([#809](https://github.com/awslabs/aws-lambda-powertools-python/issues/809)) +* **deps:** bump boto3 from 1.20.3 to 1.20.5 ([#817](https://github.com/awslabs/aws-lambda-powertools-python/issues/817)) +* **deps:** bump constructs from 10.1.1 to 10.1.49 ([#1308](https://github.com/awslabs/aws-lambda-powertools-python/issues/1308)) +* **deps:** bump actions/setup-python from 2.3.1 to 3 ([#1048](https://github.com/awslabs/aws-lambda-powertools-python/issues/1048)) +* **deps:** bump fastjsonschema from 2.15.3 to 2.16.1 ([#1309](https://github.com/awslabs/aws-lambda-powertools-python/issues/1309)) +* **deps:** bump actions/github-script from 5 to 6 ([#1023](https://github.com/awslabs/aws-lambda-powertools-python/issues/1023)) +* **deps:** bump fastjsonschema from 2.15.2 to 2.15.3 ([#949](https://github.com/awslabs/aws-lambda-powertools-python/issues/949)) * **deps:** bump dependabot/fetch-metadata from 1.1.1 to 1.3.2 ([#1269](https://github.com/awslabs/aws-lambda-powertools-python/issues/1269)) +* **deps:** bump boto3 from 1.17.101 to 1.17.102 ([#493](https://github.com/awslabs/aws-lambda-powertools-python/issues/493)) +* **deps:** bump aws-xray-sdk from 2.9.0 to 2.10.0 ([#1270](https://github.com/awslabs/aws-lambda-powertools-python/issues/1270)) +* **deps:** bump actions/setup-python from 2.2.2 to 2.3.0 ([#831](https://github.com/awslabs/aws-lambda-powertools-python/issues/831)) +* **deps:** bump actions/setup-python from 2.3.0 to 2.3.1 ([#852](https://github.com/awslabs/aws-lambda-powertools-python/issues/852)) * **deps:** bump dependabot/fetch-metadata from 1.3.2 to 1.3.3 ([#1273](https://github.com/awslabs/aws-lambda-powertools-python/issues/1273)) +* **deps:** bump boto3 from 1.17.102 to 1.17.110 ([#523](https://github.com/awslabs/aws-lambda-powertools-python/issues/523)) +* **deps:** support arm64 when developing locally ([#862](https://github.com/awslabs/aws-lambda-powertools-python/issues/862)) +* **deps:** bump aws-xray-sdk from 2.8.0 to 2.9.0 ([#876](https://github.com/awslabs/aws-lambda-powertools-python/issues/876)) +* **deps:** bump boto3 from 1.17.110 to 1.18.0 ([#527](https://github.com/awslabs/aws-lambda-powertools-python/issues/527)) +* **deps:** bump boto3 from 1.18.0 to 1.18.1 ([#528](https://github.com/awslabs/aws-lambda-powertools-python/issues/528)) +* **deps:** bump codecov/codecov-action from 1 to 2.0.1 ([#539](https://github.com/awslabs/aws-lambda-powertools-python/issues/539)) +* **deps:** bump actions/setup-node from 2 to 3 ([#1281](https://github.com/awslabs/aws-lambda-powertools-python/issues/1281)) +* **deps:** bump constructs from 10.1.1 to 10.1.46 ([#1306](https://github.com/awslabs/aws-lambda-powertools-python/issues/1306)) +* **deps:** bump aws-cdk-lib from 2.29.0 to 2.31.1 ([#1290](https://github.com/awslabs/aws-lambda-powertools-python/issues/1290)) +* **deps:** bump cdk-lambda-powertools-python-layer ([#1284](https://github.com/awslabs/aws-lambda-powertools-python/issues/1284)) +* **deps:** bump pydantic from 1.8.2 to 1.9.0 ([#933](https://github.com/awslabs/aws-lambda-powertools-python/issues/933)) +* **deps:** bump fastjsonschema from 2.15.1 to 2.15.2 ([#891](https://github.com/awslabs/aws-lambda-powertools-python/issues/891)) +* **deps:** bump attrs from 21.2.0 to 21.4.0 ([#1282](https://github.com/awslabs/aws-lambda-powertools-python/issues/1282)) +* **deps:** bump boto3 from 1.17.87 to 1.17.88 ([#463](https://github.com/awslabs/aws-lambda-powertools-python/issues/463)) +* **deps-dev:** bump mypy from 0.910 to 0.920 ([#903](https://github.com/awslabs/aws-lambda-powertools-python/issues/903)) +* **deps-dev:** bump mypy-boto3-appconfig from 1.24.0 to 1.24.29 ([#1295](https://github.com/awslabs/aws-lambda-powertools-python/issues/1295)) +* **deps-dev:** bump mypy from 0.920 to 0.930 ([#925](https://github.com/awslabs/aws-lambda-powertools-python/issues/925)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.12 to 1.24.27 ([#1293](https://github.com/awslabs/aws-lambda-powertools-python/issues/1293)) +* **deps-dev:** bump mypy from 0.930 to 0.931 ([#941](https://github.com/awslabs/aws-lambda-powertools-python/issues/941)) +* **deps-dev:** bump mkdocs-material from 7.1.10 to 7.1.11 ([#542](https://github.com/awslabs/aws-lambda-powertools-python/issues/542)) +* **deps-dev:** bump black from 21.11b1 to 21.12b0 ([#872](https://github.com/awslabs/aws-lambda-powertools-python/issues/872)) +* **deps-dev:** bump mkdocs-material from 7.1.9 to 7.1.10 ([#522](https://github.com/awslabs/aws-lambda-powertools-python/issues/522)) +* **deps-dev:** bump flake8-bugbear from 21.11.29 to 22.1.11 ([#955](https://github.com/awslabs/aws-lambda-powertools-python/issues/955)) +* **deps-dev:** bump flake8 from 3.9.2 to 4.0.1 ([#789](https://github.com/awslabs/aws-lambda-powertools-python/issues/789)) * **deps-dev:** bump flake8-bugbear from 22.6.22 to 22.7.1 ([#1274](https://github.com/awslabs/aws-lambda-powertools-python/issues/1274)) +* **deps-dev:** bump black from 21.10b0 to 21.11b1 ([#839](https://github.com/awslabs/aws-lambda-powertools-python/issues/839)) +* **deps-dev:** bump isort from 5.9.1 to 5.9.2 ([#514](https://github.com/awslabs/aws-lambda-powertools-python/issues/514)) +* **deps-dev:** bump flake8-eradicate from 1.0.0 to 1.1.0 ([#492](https://github.com/awslabs/aws-lambda-powertools-python/issues/492)) +* **deps-dev:** bump isort from 5.9.3 to 5.10.1 ([#811](https://github.com/awslabs/aws-lambda-powertools-python/issues/811)) +* **deps-dev:** bump coverage from 6.0.2 to 6.1.2 ([#810](https://github.com/awslabs/aws-lambda-powertools-python/issues/810)) +* **deps-dev:** bump mkdocs-material from 8.1.9 to 8.2.4 ([#1054](https://github.com/awslabs/aws-lambda-powertools-python/issues/1054)) * **deps-dev:** bump flake8-bugbear from 22.4.25 to 22.6.22 ([#1258](https://github.com/awslabs/aws-lambda-powertools-python/issues/1258)) * **deps-dev:** bump mypy-boto3-dynamodb from 1.24.0 to 1.24.12 ([#1255](https://github.com/awslabs/aws-lambda-powertools-python/issues/1255)) +* **deps-dev:** bump mkdocs-material from 7.3.5 to 7.3.6 ([#791](https://github.com/awslabs/aws-lambda-powertools-python/issues/791)) +* **deps-dev:** bump mkdocs-material from 7.3.3 to 7.3.5 ([#781](https://github.com/awslabs/aws-lambda-powertools-python/issues/781)) +* **deps-dev:** bump flake8-isort from 4.0.0 to 4.1.1 ([#785](https://github.com/awslabs/aws-lambda-powertools-python/issues/785)) * **deps-dev:** bump mypy-boto3-secretsmanager ([#1252](https://github.com/awslabs/aws-lambda-powertools-python/issues/1252)) -* **governance:** fix on_merged_pr workflow syntax -* **governance:** warn message on closed issues -* **layers:** bump to 21 for 1.26.2 -* **test-perf:** use pytest-benchmark to improve reliability ([#1250](https://github.com/awslabs/aws-lambda-powertools-python/issues/1250)) - - - -## [v1.26.2] - 2022-06-16 -## Bug Fixes - -* **event-handler:** body to empty string in CORS preflight (ALB non-compliant) ([#1249](https://github.com/awslabs/aws-lambda-powertools-python/issues/1249)) - -## Code Refactoring - -* rename to clear_state -* rename to remove_custom_keys - -## Documentation - -* fix anchor - -## Features - -* **logger:** add option to clear state per invocation - -## Maintenance - -* bump to 1.26.2 -* **deps:** bump actions/setup-python from 3 to 4 ([#1244](https://github.com/awslabs/aws-lambda-powertools-python/issues/1244)) +* **deps-dev:** bump flake8-eradicate from 1.1.0 to 1.2.0 ([#784](https://github.com/awslabs/aws-lambda-powertools-python/issues/784)) +* **deps-dev:** bump isort from 5.8.0 to 5.9.1 ([#487](https://github.com/awslabs/aws-lambda-powertools-python/issues/487)) +* **deps-dev:** bump pytest-asyncio from 0.15.1 to 0.16.0 ([#782](https://github.com/awslabs/aws-lambda-powertools-python/issues/782)) +* **deps-dev:** bump coverage from 6.0.1 to 6.0.2 ([#764](https://github.com/awslabs/aws-lambda-powertools-python/issues/764)) +* **deps-dev:** bump mkdocs-material from 7.1.7 to 7.1.9 ([#491](https://github.com/awslabs/aws-lambda-powertools-python/issues/491)) +* **deps-dev:** bump flake8-black from 0.2.1 to 0.2.3 ([#541](https://github.com/awslabs/aws-lambda-powertools-python/issues/541)) +* **deps-dev:** bump mypy from 0.961 to 0.971 ([#1320](https://github.com/awslabs/aws-lambda-powertools-python/issues/1320)) +* **deps-dev:** bump types-requests from 2.28.7 to 2.28.8 ([#1423](https://github.com/awslabs/aws-lambda-powertools-python/issues/1423)) +* **deps-dev:** bump mkdocs-material from 7.3.2 to 7.3.3 ([#758](https://github.com/awslabs/aws-lambda-powertools-python/issues/758)) +* **deps-dev:** bump flake8-comprehensions from 3.6.1 to 3.7.0 ([#759](https://github.com/awslabs/aws-lambda-powertools-python/issues/759)) +* **deps-dev:** bump flake8-eradicate from 1.2.0 to 1.2.1 ([#1158](https://github.com/awslabs/aws-lambda-powertools-python/issues/1158)) +* **deps-dev:** bump coverage from 6.0 to 6.0.1 ([#751](https://github.com/awslabs/aws-lambda-powertools-python/issues/751)) +* **deps-dev:** bump mkdocs-material from 7.1.6 to 7.1.7 ([#464](https://github.com/awslabs/aws-lambda-powertools-python/issues/464)) +* **deps-dev:** bump mkdocs-material from 7.2.4 to 7.2.6 ([#665](https://github.com/awslabs/aws-lambda-powertools-python/issues/665)) +* **deps-dev:** bump mkdocs-material from 7.1.11 to 7.2.0 ([#551](https://github.com/awslabs/aws-lambda-powertools-python/issues/551)) +* **deps-dev:** bump mkdocs-material from 7.2.0 to 7.2.1 ([#566](https://github.com/awslabs/aws-lambda-powertools-python/issues/566)) +* **deps-dev:** bump pytest-cov from 2.12.0 to 2.12.1 ([#454](https://github.com/awslabs/aws-lambda-powertools-python/issues/454)) +* **deps-dev:** bump pdoc3 from 0.9.2 to 0.10.0 ([#584](https://github.com/awslabs/aws-lambda-powertools-python/issues/584)) +* **deps-dev:** bump pytest from 6.2.5 to 7.0.1 ([#1063](https://github.com/awslabs/aws-lambda-powertools-python/issues/1063)) +* **deps-dev:** bump mkdocs-material from 7.3.1 to 7.3.2 ([#741](https://github.com/awslabs/aws-lambda-powertools-python/issues/741)) +* **deps-dev:** bump types-requests from 2.28.6 to 2.28.7 ([#1406](https://github.com/awslabs/aws-lambda-powertools-python/issues/1406)) +* **deps-dev:** bump mypy-boto3-cloudwatch from 1.24.0 to 1.24.35 ([#1342](https://github.com/awslabs/aws-lambda-powertools-python/issues/1342)) +* **deps-dev:** bump mkdocs-material from 8.2.4 to 8.2.7 ([#1131](https://github.com/awslabs/aws-lambda-powertools-python/issues/1131)) +* **deps-dev:** bump isort from 5.9.2 to 5.9.3 ([#574](https://github.com/awslabs/aws-lambda-powertools-python/issues/574)) +* **deps-dev:** bump pytest-cov from 2.12.1 to 3.0.0 ([#730](https://github.com/awslabs/aws-lambda-powertools-python/issues/730)) +* **deps-dev:** bump coverage from 5.5 to 6.0 ([#732](https://github.com/awslabs/aws-lambda-powertools-python/issues/732)) +* **deps-dev:** bump flake8-bugbear from 22.8.22 to 22.8.23 ([#1473](https://github.com/awslabs/aws-lambda-powertools-python/issues/1473)) +* **deps-dev:** bump mkdocs-material from 7.3.0 to 7.3.1 ([#731](https://github.com/awslabs/aws-lambda-powertools-python/issues/731)) +* **deps-dev:** bump flake8-bugbear from 21.9.1 to 21.9.2 ([#712](https://github.com/awslabs/aws-lambda-powertools-python/issues/712)) +* **deps-dev:** bump mypy from 0.931 to 0.942 ([#1133](https://github.com/awslabs/aws-lambda-powertools-python/issues/1133)) +* **deps-dev:** bump xenon from 0.8.0 to 0.9.0 ([#1145](https://github.com/awslabs/aws-lambda-powertools-python/issues/1145)) +* **deps-dev:** bump mkdocs-git-revision-date-plugin ([#1146](https://github.com/awslabs/aws-lambda-powertools-python/issues/1146)) +* **deps-dev:** bump black from 22.6.0 to 22.8.0 ([#1494](https://github.com/awslabs/aws-lambda-powertools-python/issues/1494)) +* **deps-dev:** bump mkdocs-material from 7.2.8 to 7.3.0 ([#695](https://github.com/awslabs/aws-lambda-powertools-python/issues/695)) +* **deps-dev:** bump mkdocs-material from 7.2.6 to 7.2.8 ([#682](https://github.com/awslabs/aws-lambda-powertools-python/issues/682)) +* **deps-dev:** bump flake8-bugbear from 21.4.3 to 21.9.1 ([#676](https://github.com/awslabs/aws-lambda-powertools-python/issues/676)) +* **deps-dev:** bump mypy-boto3-appconfig from 1.23.0.post1 to 1.24.0 ([#1233](https://github.com/awslabs/aws-lambda-powertools-python/issues/1233)) +* **deps-dev:** bump radon from 4.5.2 to 5.1.0 ([#673](https://github.com/awslabs/aws-lambda-powertools-python/issues/673)) +* **deps-dev:** bump mypy from 0.942 to 0.950 ([#1162](https://github.com/awslabs/aws-lambda-powertools-python/issues/1162)) +* **deps-dev:** bump xenon from 0.7.3 to 0.8.0 ([#669](https://github.com/awslabs/aws-lambda-powertools-python/issues/669)) +* **deps-dev:** bump flake8-bugbear from 22.1.11 to 22.4.25 ([#1156](https://github.com/awslabs/aws-lambda-powertools-python/issues/1156)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#1481](https://github.com/awslabs/aws-lambda-powertools-python/issues/1481)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.23.0.post1 to 1.24.0 ([#1234](https://github.com/awslabs/aws-lambda-powertools-python/issues/1234)) +* **deps-dev:** bump pytest from 6.2.4 to 6.2.5 ([#662](https://github.com/awslabs/aws-lambda-powertools-python/issues/662)) +* **deps-dev:** bump mypy-boto3-dynamodb from 1.24.55.post1 to 1.24.60 ([#306](https://github.com/awslabs/aws-lambda-powertools-python/issues/306)) +* **deps-dev:** bump mkdocs-material from 7.2.2 to 7.2.3 ([#596](https://github.com/awslabs/aws-lambda-powertools-python/issues/596)) +* **deps-dev:** bump flake8-isort from 4.1.1 to 4.1.2.post0 ([#1384](https://github.com/awslabs/aws-lambda-powertools-python/issues/1384)) +* **deps-dev:** bump mypy-boto3-ssm from 1.21.34 to 1.23.0.post1 ([#1220](https://github.com/awslabs/aws-lambda-powertools-python/issues/1220)) +* **deps-dev:** bump mypy-boto3-appconfig from 1.21.34 to 1.23.0.post1 ([#1219](https://github.com/awslabs/aws-lambda-powertools-python/issues/1219)) +* **deps-dev:** bump mypy-boto3-secretsmanager from 1.21.34 to 1.23.0.post1 ([#1218](https://github.com/awslabs/aws-lambda-powertools-python/issues/1218)) +* **deps-dev:** bump mkdocs-material from 8.4.1 to 8.4.2 ([#1483](https://github.com/awslabs/aws-lambda-powertools-python/issues/1483)) +* **deps-dev:** bump mypy-boto3-secretsmanager from 1.23.0.post1 to 1.23.8 ([#1225](https://github.com/awslabs/aws-lambda-powertools-python/issues/1225)) +* **deps-dev:** bump mypy from 0.950 to 0.960 ([#1224](https://github.com/awslabs/aws-lambda-powertools-python/issues/1224)) +* **deps-dev:** bump mypy-boto3-secretsmanager from 1.23.8 to 1.24.0 ([#1232](https://github.com/awslabs/aws-lambda-powertools-python/issues/1232)) +* **deps-dev:** bump flake8-variables-names from 0.0.4 to 0.0.5 ([#1490](https://github.com/awslabs/aws-lambda-powertools-python/issues/1490)) +* **deps-dev:** bump aws-cdk-lib from 2.39.1 to 2.40.0 ([#1495](https://github.com/awslabs/aws-lambda-powertools-python/issues/1495)) +* **deps-dev:** bump flake8-comprehensions from 3.6.0 to 3.6.1 ([#615](https://github.com/awslabs/aws-lambda-powertools-python/issues/615)) * **deps-dev:** bump mypy from 0.960 to 0.961 ([#1241](https://github.com/awslabs/aws-lambda-powertools-python/issues/1241)) +* **deps-dev:** bump types-requests from 2.28.5 to 2.28.6 ([#1401](https://github.com/awslabs/aws-lambda-powertools-python/issues/1401)) +* **deps-dev:** bump mkdocs-material from 7.2.3 to 7.2.4 ([#607](https://github.com/awslabs/aws-lambda-powertools-python/issues/607)) * **deps-dev:** bump mypy-boto3-ssm from 1.23.0.post1 to 1.24.0 ([#1231](https://github.com/awslabs/aws-lambda-powertools-python/issues/1231)) -* **deps-dev:** bump mypy-boto3-secretsmanager from 1.23.8 to 1.24.0 ([#1232](https://github.com/awslabs/aws-lambda-powertools-python/issues/1232)) -* **deps-dev:** bump mypy-boto3-dynamodb from 1.23.0.post1 to 1.24.0 ([#1234](https://github.com/awslabs/aws-lambda-powertools-python/issues/1234)) -* **deps-dev:** bump mypy-boto3-appconfig from 1.23.0.post1 to 1.24.0 ([#1233](https://github.com/awslabs/aws-lambda-powertools-python/issues/1233)) -* **governance:** auto-merge on all PR events +* **deps-dev:** bump flake8-comprehensions from 3.5.0 to 3.6.0 ([#609](https://github.com/awslabs/aws-lambda-powertools-python/issues/609)) +* **deps-dev:** bump mkdocs-material from 7.2.1 to 7.2.2 ([#582](https://github.com/awslabs/aws-lambda-powertools-python/issues/582)) +* **docs:** update description to trigger changelog generation +* **docs:** correct markdown based on markdown lint ([#603](https://github.com/awslabs/aws-lambda-powertools-python/issues/603)) +* **docs:** remove pause sentence from roadmap ([#1409](https://github.com/awslabs/aws-lambda-powertools-python/issues/1409)) +* **docs:** update CHANGELOG for v1.26.7 +* **docs:** update site name to test ci changelog +* **event-handler:** adjusts exception docstrings to not confuse AppSync customers +* **governance:** remove devcontainer in favour of gitpod.io ([#1411](https://github.com/awslabs/aws-lambda-powertools-python/issues/1411)) * **governance:** add release label on pr merge +* **governance:** auto-merge on all PR events * **governance:** enforce safe scope on pr merge labelling * **governance:** limit build workflow to code changes only -* **governance:** auto-merge workflow_dispatch off -* **governance:** auto-merge to use squash -* **governance:** check for related issue in new PRs -* **governance:** auto-merge mypy-stub dependabot -* **governance:** address gh reusable workflow limitation -* **governance:** fix workflow action requirements & syntax -* **governance:** warn message on closed issues -* **metrics:** revert dimensions test before splitting ([#1243](https://github.com/awslabs/aws-lambda-powertools-python/issues/1243)) - - - -## [v1.26.1] - 2022-06-07 -## Bug Fixes - -* **metrics:** raise SchemaValidationError for >8 metric dimensions ([#1240](https://github.com/awslabs/aws-lambda-powertools-python/issues/1240)) - -## Documentation - -* **governance:** link roadmap and maintainers doc -* **maintainers:** initial maintainers playbook ([#1222](https://github.com/awslabs/aws-lambda-powertools-python/issues/1222)) -* **roadmap:** use pinned pause issue instead - -## Maintenance - -* bump version 1.26.1 -* **deps-dev:** bump mypy from 0.950 to 0.960 ([#1224](https://github.com/awslabs/aws-lambda-powertools-python/issues/1224)) -* **deps-dev:** bump mypy-boto3-secretsmanager from 1.23.0.post1 to 1.23.8 ([#1225](https://github.com/awslabs/aws-lambda-powertools-python/issues/1225)) - - - -## [v1.26.0] - 2022-05-20 -## Bug Fixes - -* **batch:** missing space in BatchProcessingError message ([#1201](https://github.com/awslabs/aws-lambda-powertools-python/issues/1201)) -* **batch:** docstring fix for success_handler() record parameter ([#1202](https://github.com/awslabs/aws-lambda-powertools-python/issues/1202)) -* **docs:** remove Slack link ([#1210](https://github.com/awslabs/aws-lambda-powertools-python/issues/1210)) - -## Documentation - -* **layer:** upgrade to 1.25.10 -* **roadmap:** add new roadmap section ([#1204](https://github.com/awslabs/aws-lambda-powertools-python/issues/1204)) - -## Features - -* **parameters:** accept boto3_client to support private endpoints and ease testing ([#1096](https://github.com/awslabs/aws-lambda-powertools-python/issues/1096)) - -## Maintenance - -* bump to 1.26.0 -* **deps:** bump pydantic from 1.9.0 to 1.9.1 ([#1221](https://github.com/awslabs/aws-lambda-powertools-python/issues/1221)) -* **deps:** bump email-validator from 1.1.3 to 1.2.1 ([#1199](https://github.com/awslabs/aws-lambda-powertools-python/issues/1199)) -* **deps-dev:** bump mypy-boto3-secretsmanager from 1.21.34 to 1.23.0.post1 ([#1218](https://github.com/awslabs/aws-lambda-powertools-python/issues/1218)) -* **deps-dev:** bump mypy-boto3-appconfig from 1.21.34 to 1.23.0.post1 ([#1219](https://github.com/awslabs/aws-lambda-powertools-python/issues/1219)) -* **deps-dev:** bump mypy-boto3-ssm from 1.21.34 to 1.23.0.post1 ([#1220](https://github.com/awslabs/aws-lambda-powertools-python/issues/1220)) - - - -## [v1.25.10] - 2022-04-29 -## Bug Fixes - -* **data-classes:** Add missing SES fields and ([#1045](https://github.com/awslabs/aws-lambda-powertools-python/issues/1045)) -* **deps:** Ignore boto3 changes until needed ([#1151](https://github.com/awslabs/aws-lambda-powertools-python/issues/1151)) -* **deps-dev:** remove jmespath due to dev deps conflict ([#1148](https://github.com/awslabs/aws-lambda-powertools-python/issues/1148)) -* **event_handler:** exception_handler to handle ServiceError exceptions ([#1160](https://github.com/awslabs/aws-lambda-powertools-python/issues/1160)) -* **event_handler:** Allow for event_source support ([#1159](https://github.com/awslabs/aws-lambda-powertools-python/issues/1159)) -* **parser:** Add missing fields for SESEvent ([#1027](https://github.com/awslabs/aws-lambda-powertools-python/issues/1027)) - -## Documentation - -* **layer:** upgrade to 1.25.9 - -## Features - -* **parameters:** add clear_cache method for providers ([#1194](https://github.com/awslabs/aws-lambda-powertools-python/issues/1194)) - -## Maintenance - -* include regression in changelog -* bump to 1.25.10 -* **ci:** changelog pre-generation to fetch tags from origin -* **ci:** disable mergify configuration after breaking changes ([#1188](https://github.com/awslabs/aws-lambda-powertools-python/issues/1188)) -* **ci:** post release on tagged issues too -* **deps:** bump codecov/codecov-action from 3.0.0 to 3.1.0 ([#1143](https://github.com/awslabs/aws-lambda-powertools-python/issues/1143)) -* **deps:** bump github/codeql-action from 1 to 2 ([#1154](https://github.com/awslabs/aws-lambda-powertools-python/issues/1154)) -* **deps-dev:** bump flake8-eradicate from 1.2.0 to 1.2.1 ([#1158](https://github.com/awslabs/aws-lambda-powertools-python/issues/1158)) -* **deps-dev:** bump mypy from 0.942 to 0.950 ([#1162](https://github.com/awslabs/aws-lambda-powertools-python/issues/1162)) -* **deps-dev:** bump mkdocs-git-revision-date-plugin ([#1146](https://github.com/awslabs/aws-lambda-powertools-python/issues/1146)) -* **deps-dev:** bump flake8-bugbear from 22.1.11 to 22.4.25 ([#1156](https://github.com/awslabs/aws-lambda-powertools-python/issues/1156)) -* **deps-dev:** bump xenon from 0.8.0 to 0.9.0 ([#1145](https://github.com/awslabs/aws-lambda-powertools-python/issues/1145)) -* **deps-dev:** bump mypy from 0.931 to 0.942 ([#1133](https://github.com/awslabs/aws-lambda-powertools-python/issues/1133)) - -## Regression - -* **parser:** Add missing fields for SESEvent ([#1027](https://github.com/awslabs/aws-lambda-powertools-python/issues/1027)) ([#1190](https://github.com/awslabs/aws-lambda-powertools-python/issues/1190)) - - - -## [v1.25.9] - 2022-04-21 -## Bug Fixes - -* **deps:** correct py36 marker for jmespath - -## Maintenance - -* bump to 1.25.9 - - - -## [v1.25.8] - 2022-04-21 -## Bug Fixes - -* removed ambiguous quotes from labels. -* **deps:** update jmespath marker to support 1.0 and py3.6 ([#1139](https://github.com/awslabs/aws-lambda-powertools-python/issues/1139)) -* **governance:** update label in names in issues - -## Documentation - -* **install:** instructions to reduce pydantic package size ([#1077](https://github.com/awslabs/aws-lambda-powertools-python/issues/1077)) -* **layer:** remove link from clipboard button ([#1135](https://github.com/awslabs/aws-lambda-powertools-python/issues/1135)) -* **layer:** update to 1.25.7 - -## Maintenance - -* bump to 1.25.8 -* **deps:** bump codecov/codecov-action from 2.1.0 to 3.0.0 ([#1102](https://github.com/awslabs/aws-lambda-powertools-python/issues/1102)) -* **deps:** bump actions/upload-artifact from 2 to 3 ([#1103](https://github.com/awslabs/aws-lambda-powertools-python/issues/1103)) -* **deps-dev:** bump mkdocs-material from 8.2.4 to 8.2.7 ([#1131](https://github.com/awslabs/aws-lambda-powertools-python/issues/1131)) -* **deps-dev:** bump pytest from 6.2.5 to 7.0.1 ([#1063](https://github.com/awslabs/aws-lambda-powertools-python/issues/1063)) - - - -## [v1.25.7] - 2022-04-08 -## Bug Fixes - -* **api_gateway:** allow whitespace in routes' path parameter ([#1099](https://github.com/awslabs/aws-lambda-powertools-python/issues/1099)) -* **api_gateway:** allow whitespace in routes' path parameter ([#1099](https://github.com/awslabs/aws-lambda-powertools-python/issues/1099)) -* **idempotency:** pass by value on idem key to guard inadvert mutations ([#1090](https://github.com/awslabs/aws-lambda-powertools-python/issues/1090)) -* **logger:** clear_state should keep custom key formats ([#1095](https://github.com/awslabs/aws-lambda-powertools-python/issues/1095)) -* **middleware_factory:** ret type annotation for handler dec ([#1066](https://github.com/awslabs/aws-lambda-powertools-python/issues/1066)) - -## Documentation - -* **layer:** update to 1.25.6; cosmetic changes - -## Maintenance - -* bump to 1.25.7 * **governance:** refresh pull request template sections * **governance:** update external non-triage effort disclaimer +* **governance:** update wording tech debt to summary in maintenance template +* **governance:** add new maintenance issue template for tech debt ([#1326](https://github.com/awslabs/aws-lambda-powertools-python/issues/1326)) +* **governance:** fix typo on semantic commit link introduced in [#1](https://github.com/awslabs/aws-lambda-powertools-python/issues/1)aef4 +* **governance:** add pre-configured dev environment with GitPod.io to ease contributions ([#1403](https://github.com/awslabs/aws-lambda-powertools-python/issues/1403)) +* **governance:** auto-merge workflow_dispatch off +* **governance:** update emeritus affiliation +* **governance:** remove any step relying on master branch * **governance:** update static typing to a form * **governance:** update rfc to a form * **governance:** update feat request to a form * **governance:** bug report form typo -* **governance:** update docs report to a form +* **governance:** fix workflow action requirements & syntax +* **governance:** auto-merge to use squash +* **governance:** warn message on closed issues * **governance:** update bug report to a form +* **governance:** auto-merge mypy-stub dependabot +* **governance:** fix on_merged_pr workflow syntax * **governance:** new ask a question * **governance:** new static typing report - - - -## [v1.25.6] - 2022-04-01 -## Bug Fixes - -* **logger:** clear_state regression on absent standard keys ([#1088](https://github.com/awslabs/aws-lambda-powertools-python/issues/1088)) - -## Documentation - -* **layer:** bump to 1.25.5 - -## Maintenance - -* bump to 1.25.6 - - - -## [v1.25.5] - 2022-03-18 -## Bug Fixes - -* **logger-utils:** regression on exclude set leading to no formatter ([#1080](https://github.com/awslabs/aws-lambda-powertools-python/issues/1080)) - -## Maintenance - -* bump to 1.25.5 - - - -## [v1.25.4] - 2022-03-17 -## Bug Fixes - -* package_logger as const over logger instance -* repurpose test to cover parent loggers case -* use addHandler over monkeypatch - -## Documentation - -* **appsync:** fix typo -* **contributing:** operational excellence pause -* **layer:** update to 1.25.3 - -## Maintenance - -* bump to 1.25.4 -* remove duplicate test -* comment reason for change -* remove unnecessary test -* lint unused import +* **governance:** warn message on closed issues +* **governance:** address gh reusable workflow limitation +* **governance:** check for related issue in new PRs +* **governance:** update docs report to a form +* **layers:** bump to 1.26.6 using layer v26 +* **layers:** upgrade cdk dep hashes to prevent ci fail +* **layers:** bump to 21 for 1.26.2 +* **layers:** bump to 10 for 1.25.0 +* **layers:** bump to 1.26.5 +* **layers:** bump to 22 for 1.26.3 +* **layers:** add release pipeline in GitHub Actions ([#1278](https://github.com/awslabs/aws-lambda-powertools-python/issues/1278)) +* **layers:** bump to 22 for 1.26.3 +* **layers:** add release pipeline in GitHub Actions ([#1278](https://github.com/awslabs/aws-lambda-powertools-python/issues/1278)) +* **layers:** replace layers account secret ([#1329](https://github.com/awslabs/aws-lambda-powertools-python/issues/1329)) +* **layers:** layer canary stack should not hardcode resource name +* **layers:** expand to all aws commercial regions ([#1324](https://github.com/awslabs/aws-lambda-powertools-python/issues/1324)) +* **license:** Add THIRD-PARTY-LICENSES ([#641](https://github.com/awslabs/aws-lambda-powertools-python/issues/641)) +* **license:** add third party license ([#635](https://github.com/awslabs/aws-lambda-powertools-python/issues/635)) +* **maintainer:** add Leandro as maintainer ([#1468](https://github.com/awslabs/aws-lambda-powertools-python/issues/1468)) +* **maintainers:** add Ruben as a maintainer ([#1392](https://github.com/awslabs/aws-lambda-powertools-python/issues/1392)) +* **maintainers:** update release workflow link +* **maintenance:** add discord link to first PR and first issue ([#1493](https://github.com/awslabs/aws-lambda-powertools-python/issues/1493)) +* **mergify:** disable check for matrix jobs +* **mergify:** use job name to match GH Actions +* **metrics:** revert dimensions test before splitting ([#1243](https://github.com/awslabs/aws-lambda-powertools-python/issues/1243)) +* **metrics:** fix tests when warnings are disabled ([#994](https://github.com/awslabs/aws-lambda-powertools-python/issues/994)) +* **shared:** fix cyclic import & refactor data extraction fn ([#613](https://github.com/awslabs/aws-lambda-powertools-python/issues/613)) +* **test-perf:** use pytest-benchmark to improve reliability ([#1250](https://github.com/awslabs/aws-lambda-powertools-python/issues/1250)) +* **tests:** build and deploy Lambda Layer stack once ([#1466](https://github.com/awslabs/aws-lambda-powertools-python/issues/1466)) +* **tests:** refactor E2E test mechanics to ease maintenance, writing tests and parallelization ([#1444](https://github.com/awslabs/aws-lambda-powertools-python/issues/1444)) +* **tests:** enable end-to-end test workflow ([#1470](https://github.com/awslabs/aws-lambda-powertools-python/issues/1470)) +* **tests:** refactor E2E logger to ease maintenance, writing tests and parallelization ([#1460](https://github.com/awslabs/aws-lambda-powertools-python/issues/1460)) +* **tests:** refactor E2E tracer to ease maintenance, writing tests and parallelization ([#1457](https://github.com/awslabs/aws-lambda-powertools-python/issues/1457)) ## Regression * service_name fixture - -## Pull Requests - -* Merge pull request [#1075](https://github.com/awslabs/aws-lambda-powertools-python/issues/1075) from mploski/fix/existing-loggers-duplicated-logs - - - -## [v1.25.3] - 2022-03-09 -## Bug Fixes - -* **logger:** ensure state is cleared for custom formatters ([#1072](https://github.com/awslabs/aws-lambda-powertools-python/issues/1072)) - -## Documentation - -* **plugin:** add mermaid to create diagram as code ([#1070](https://github.com/awslabs/aws-lambda-powertools-python/issues/1070)) - -## Maintenance - -* bump to 1.25.3 - - - -## [v1.25.2] - 2022-03-07 -## Bug Fixes - -* **event_handler:** docs snippets, high-level import CorsConfig ([#1019](https://github.com/awslabs/aws-lambda-powertools-python/issues/1019)) -* **lambda-authorizer:** allow proxy resources path in arn ([#1051](https://github.com/awslabs/aws-lambda-powertools-python/issues/1051)) -* **metrics:** flush upon a single metric 100th data point ([#1046](https://github.com/awslabs/aws-lambda-powertools-python/issues/1046)) - -## Documentation - -* **layer:** update to 1.25.1 -* **parser:** APIGatewayProxyEvent to APIGatewayProxyEventModel ([#1061](https://github.com/awslabs/aws-lambda-powertools-python/issues/1061)) - -## Maintenance - -* bump to 1.25.2 -* **deps:** bump actions/setup-python from 2.3.1 to 3 ([#1048](https://github.com/awslabs/aws-lambda-powertools-python/issues/1048)) -* **deps:** bump actions/checkout from 2 to 3 ([#1052](https://github.com/awslabs/aws-lambda-powertools-python/issues/1052)) -* **deps:** bump actions/github-script from 5 to 6 ([#1023](https://github.com/awslabs/aws-lambda-powertools-python/issues/1023)) -* **deps:** bump fastjsonschema from 2.15.2 to 2.15.3 ([#949](https://github.com/awslabs/aws-lambda-powertools-python/issues/949)) -* **deps-dev:** bump mkdocs-material from 8.1.9 to 8.2.4 ([#1054](https://github.com/awslabs/aws-lambda-powertools-python/issues/1054)) - - - -## [v1.25.1] - 2022-02-14 -## Bug Fixes - -* **batch:** bugfix to clear exceptions between executions ([#1022](https://github.com/awslabs/aws-lambda-powertools-python/issues/1022)) - -## Maintenance - -* bump to 1.25.1 -* **layers:** bump to 10 for 1.25.0 - - - -## [v1.25.0] - 2022-02-09 -## Bug Fixes - -* **apigateway:** remove indentation in debug_mode ([#987](https://github.com/awslabs/aws-lambda-powertools-python/issues/987)) -* **batch:** delete >10 messages in legacy sqs processor ([#818](https://github.com/awslabs/aws-lambda-powertools-python/issues/818)) -* **ci:** pr label regex for special chars in title -* **logger:** exclude source_logger in copy_config_to_registered_loggers ([#1001](https://github.com/awslabs/aws-lambda-powertools-python/issues/1001)) -* **logger:** test generates logfile - -## Documentation - -* fix syntax errors and line highlights ([#1004](https://github.com/awslabs/aws-lambda-powertools-python/issues/1004)) -* add better BDD coments -* **event-handler:** improve testing section for graphql ([#996](https://github.com/awslabs/aws-lambda-powertools-python/issues/996)) -* **layer:** update to 1.24.2 -* **parameters:** add testing your code section ([#1017](https://github.com/awslabs/aws-lambda-powertools-python/issues/1017)) -* **theme:** upgrade mkdocs-material to 8.x ([#1002](https://github.com/awslabs/aws-lambda-powertools-python/issues/1002)) -* **tutorial:** fix broken internal links ([#1000](https://github.com/awslabs/aws-lambda-powertools-python/issues/1000)) - -## Features - -* **event-handler:** new resolvers to fix current_event typing ([#978](https://github.com/awslabs/aws-lambda-powertools-python/issues/978)) -* **logger:** log_event support event data classes (e.g. S3Event) ([#984](https://github.com/awslabs/aws-lambda-powertools-python/issues/984)) -* **mypy:** complete mypy support for the entire codebase ([#943](https://github.com/awslabs/aws-lambda-powertools-python/issues/943)) - -## Maintenance - -* bump to 1.25.0 -* correct docs -* correct docs -* use isinstance over type -* **deps-dev:** bump flake8-bugbear from 21.11.29 to 22.1.11 ([#955](https://github.com/awslabs/aws-lambda-powertools-python/issues/955)) -* **metrics:** fix tests when warnings are disabled ([#994](https://github.com/awslabs/aws-lambda-powertools-python/issues/994)) - -## Pull Requests - -* Merge pull request [#971](https://github.com/awslabs/aws-lambda-powertools-python/issues/971) from gyft/fix-logger-util-tests - - - -## [v1.24.2] - 2022-01-21 -## Bug Fixes - -* **data-classes:** underscore support in api gateway authorizer resource name ([#969](https://github.com/awslabs/aws-lambda-powertools-python/issues/969)) - -## Documentation - -* **layer:** update to 1.24.1 - -## Maintenance - -* bump to 1.24.2 - - - -## [v1.24.1] - 2022-01-20 -## Bug Fixes - -* remove unused json import -* remove apigw contract when using event-handler, apigw tracing -* use decorators, split cold start to ease reading -* incorrect log keys, indentation, snippet consistency -* remove f-strings that doesn't evaluate expr -* **batch:** report multiple failures ([#967](https://github.com/awslabs/aws-lambda-powertools-python/issues/967)) -* **data-classes:** docstring typos and clean up ([#937](https://github.com/awslabs/aws-lambda-powertools-python/issues/937)) -* **parameters:** appconfig internal _get docstrings ([#934](https://github.com/awslabs/aws-lambda-powertools-python/issues/934)) - -## Documentation - -* rename quickstart to tutorial in readme -* rename to tutorial given the size -* add final consideration section -* **batch:** snippet typo on batch processed messages iteration ([#951](https://github.com/awslabs/aws-lambda-powertools-python/issues/951)) -* **batch:** fix typo in context manager keyword ([#938](https://github.com/awslabs/aws-lambda-powertools-python/issues/938)) -* **homepage:** link to typescript version ([#950](https://github.com/awslabs/aws-lambda-powertools-python/issues/950)) -* **install:** new lambda layer for 1.24.0 release -* **metrics:** keep it consistent with other sections, update metric names -* **nav:** make REST and GraphQL event handlers more explicit ([#959](https://github.com/awslabs/aws-lambda-powertools-python/issues/959)) -* **quickstart:** expand on intro line -* **quickstart:** tidy requirements up -* **quickstart:** make section agnostic to json lib -* **quickstart:** same process for Logger -* **quickstart:** add sub-sections, fix highlight & code -* **quickstart:** sentence fragmentation, tidy up -* **tenets:** make core, non-core more explicit -* **tracer:** warning to note on local traces -* **tracer:** add initial image, requirements -* **tracer:** add annotation, metadata, and image -* **tracer:** update ServiceLens image w/ API GW, copywriting -* **tutorial:** fix path to images ([#963](https://github.com/awslabs/aws-lambda-powertools-python/issues/963)) - -## Features - -* **ci:** auto-notify & close issues on release -* **logger:** clone powertools logger config to any Python logger ([#927](https://github.com/awslabs/aws-lambda-powertools-python/issues/927)) - -## Maintenance - -* bump to 1.24.1 -* bump to 1.24.1 -* **ci:** run codeql analysis on push only -* **ci:** fix mergify dependabot queue -* **ci:** add queue name in mergify -* **ci:** remove mergify legacy key -* **ci:** update mergify bot breaking change -* **ci:** safely label PR based on title -* **deps:** bump pydantic from 1.8.2 to 1.9.0 ([#933](https://github.com/awslabs/aws-lambda-powertools-python/issues/933)) -* **deps-dev:** bump mypy from 0.930 to 0.931 ([#941](https://github.com/awslabs/aws-lambda-powertools-python/issues/941)) - -## Regression - * order to APP logger/service name due to screenshots - -## Pull Requests - -* Merge pull request [#769](https://github.com/awslabs/aws-lambda-powertools-python/issues/769) from mploski/docs/quick-start - - - -## [v1.24.0] - 2021-12-31 -## Bug Fixes - -* **apigateway:** support [@app](https://github.com/app).not_found() syntax & housekeeping ([#926](https://github.com/awslabs/aws-lambda-powertools-python/issues/926)) -* **event-sources:** handle dynamodb null type as none, not bool ([#929](https://github.com/awslabs/aws-lambda-powertools-python/issues/929)) -* **warning:** future distutils deprecation ([#921](https://github.com/awslabs/aws-lambda-powertools-python/issues/921)) - -## Documentation - -* consistency around admonitions and snippets ([#919](https://github.com/awslabs/aws-lambda-powertools-python/issues/919)) -* Added GraphQL Sample API to Examples section of README.md ([#930](https://github.com/awslabs/aws-lambda-powertools-python/issues/930)) -* **batch:** remove leftover from legacy -* **layer:** bump Lambda Layer to version 6 -* **tracer:** new ignore_endpoint feature ([#931](https://github.com/awslabs/aws-lambda-powertools-python/issues/931)) - -## Features - -* **event-sources:** cache parsed json in data class ([#909](https://github.com/awslabs/aws-lambda-powertools-python/issues/909)) -* **feature_flags:** support beyond boolean values (JSON values) ([#804](https://github.com/awslabs/aws-lambda-powertools-python/issues/804)) -* **idempotency:** support dataclasses & pydantic models payloads ([#908](https://github.com/awslabs/aws-lambda-powertools-python/issues/908)) -* **logger:** support use_datetime_directive for timestamps ([#920](https://github.com/awslabs/aws-lambda-powertools-python/issues/920)) -* **tracer:** ignore tracing for certain hostname(s) or url(s) ([#910](https://github.com/awslabs/aws-lambda-powertools-python/issues/910)) - -## Maintenance - -* bump to 1.24.0 -* **deps-dev:** bump mypy from 0.920 to 0.930 ([#925](https://github.com/awslabs/aws-lambda-powertools-python/issues/925)) - - - -## [v1.23.0] - 2021-12-20 -## Bug Fixes - -* **apigateway:** allow list of HTTP methods in route method ([#838](https://github.com/awslabs/aws-lambda-powertools-python/issues/838)) -* **event-sources:** Pass authorizer data to APIGatewayEventAuthorizer ([#897](https://github.com/awslabs/aws-lambda-powertools-python/issues/897)) -* **event-sources:** handle claimsOverrideDetails set to null ([#878](https://github.com/awslabs/aws-lambda-powertools-python/issues/878)) -* **idempotency:** include decorated fn name in hash ([#869](https://github.com/awslabs/aws-lambda-powertools-python/issues/869)) -* **metrics:** explicit type to single_metric ctx manager ([#865](https://github.com/awslabs/aws-lambda-powertools-python/issues/865)) -* **parameters:** appconfig transform and return types ([#877](https://github.com/awslabs/aws-lambda-powertools-python/issues/877)) -* **parser:** overload parse when using envelope ([#885](https://github.com/awslabs/aws-lambda-powertools-python/issues/885)) -* **parser:** kinesis sequence number is str, not int ([#907](https://github.com/awslabs/aws-lambda-powertools-python/issues/907)) -* **parser:** mypy support for payload type override as models ([#883](https://github.com/awslabs/aws-lambda-powertools-python/issues/883)) -* **tracer:** add warm start annotation (ColdStart=False) ([#851](https://github.com/awslabs/aws-lambda-powertools-python/issues/851)) - -## Documentation - -* external reference to cloudformation custom resource helper ([#914](https://github.com/awslabs/aws-lambda-powertools-python/issues/914)) -* add new public Slack invite -* disable search blur in non-prod env -* update Lambda Layers version -* **apigateway:** add new not_found feature ([#915](https://github.com/awslabs/aws-lambda-powertools-python/issues/915)) -* **apigateway:** fix sample layout provided ([#864](https://github.com/awslabs/aws-lambda-powertools-python/issues/864)) -* **appsync:** fix users.py typo to locations [#830](https://github.com/awslabs/aws-lambda-powertools-python/issues/830) -* **lambda_layer:** fix CDK layer syntax - -## Features - -* **apigateway:** add exception_handler support ([#898](https://github.com/awslabs/aws-lambda-powertools-python/issues/898)) -* **apigateway:** access parent api resolver from router ([#842](https://github.com/awslabs/aws-lambda-powertools-python/issues/842)) -* **batch:** new BatchProcessor for SQS, DynamoDB, Kinesis ([#886](https://github.com/awslabs/aws-lambda-powertools-python/issues/886)) -* **logger:** allow handler with custom kwargs signature ([#913](https://github.com/awslabs/aws-lambda-powertools-python/issues/913)) -* **tracer:** add service annotation when service is set ([#861](https://github.com/awslabs/aws-lambda-powertools-python/issues/861)) - -## Maintenance - -* correct pr label order -* minor housekeeping before release ([#912](https://github.com/awslabs/aws-lambda-powertools-python/issues/912)) -* bump to 1.23.0 -* **ci:** split latest docs workflow -* **deps:** bump fastjsonschema from 2.15.1 to 2.15.2 ([#891](https://github.com/awslabs/aws-lambda-powertools-python/issues/891)) -* **deps:** bump actions/setup-python from 2.2.2 to 2.3.0 ([#831](https://github.com/awslabs/aws-lambda-powertools-python/issues/831)) -* **deps:** bump aws-xray-sdk from 2.8.0 to 2.9.0 ([#876](https://github.com/awslabs/aws-lambda-powertools-python/issues/876)) -* **deps:** support arm64 when developing locally ([#862](https://github.com/awslabs/aws-lambda-powertools-python/issues/862)) -* **deps:** bump actions/setup-python from 2.3.0 to 2.3.1 ([#852](https://github.com/awslabs/aws-lambda-powertools-python/issues/852)) -* **deps-dev:** bump flake8 from 3.9.2 to 4.0.1 ([#789](https://github.com/awslabs/aws-lambda-powertools-python/issues/789)) -* **deps-dev:** bump black from 21.10b0 to 21.11b1 ([#839](https://github.com/awslabs/aws-lambda-powertools-python/issues/839)) -* **deps-dev:** bump black from 21.11b1 to 21.12b0 ([#872](https://github.com/awslabs/aws-lambda-powertools-python/issues/872)) -* **deps-dev:** bump mypy from 0.910 to 0.920 ([#903](https://github.com/awslabs/aws-lambda-powertools-python/issues/903)) - - - -## [v1.22.0] - 2021-11-17 -## Bug Fixes - -* change supported python version from 3.6.1 to 3.6.2, bump black ([#807](https://github.com/awslabs/aws-lambda-powertools-python/issues/807)) -* **ci:** comment custom publish version checker -* **ci:** skip sync master on docs hotfix -* **parser:** body/QS can be null or omitted in apigw v1/v2 ([#820](https://github.com/awslabs/aws-lambda-powertools-python/issues/820)) - -## Code Refactoring - -* **apigateway:** Add BaseRouter and duplicate route check ([#757](https://github.com/awslabs/aws-lambda-powertools-python/issues/757)) - -## Documentation - -* updated Lambda Layers definition & limitations. ([#775](https://github.com/awslabs/aws-lambda-powertools-python/issues/775)) -* Idiomatic tenet updated to Progressive -* use higher contrast font ([#822](https://github.com/awslabs/aws-lambda-powertools-python/issues/822)) -* use higher contrast font -* fix indentation of SAM snippets in install section ([#778](https://github.com/awslabs/aws-lambda-powertools-python/issues/778)) -* improve public lambda layer wording, clipboard buttons ([#762](https://github.com/awslabs/aws-lambda-powertools-python/issues/762)) -* add amplify-cli instructions for public layer ([#754](https://github.com/awslabs/aws-lambda-powertools-python/issues/754)) -* **api-gateway:** add support for new router feature ([#767](https://github.com/awslabs/aws-lambda-powertools-python/issues/767)) -* **apigateway:** re-add sample layout, add considerations ([#826](https://github.com/awslabs/aws-lambda-powertools-python/issues/826)) -* **appsync:** add new router feature ([#821](https://github.com/awslabs/aws-lambda-powertools-python/issues/821)) -* **idempotency:** add support for DynamoDB composite keys ([#808](https://github.com/awslabs/aws-lambda-powertools-python/issues/808)) -* **tenets:** update Idiomatic tenet to Progressive ([#823](https://github.com/awslabs/aws-lambda-powertools-python/issues/823)) - -## Features - -* **apigateway:** add Router to allow large routing composition ([#645](https://github.com/awslabs/aws-lambda-powertools-python/issues/645)) -* **appsync:** add Router to allow large resolver composition ([#776](https://github.com/awslabs/aws-lambda-powertools-python/issues/776)) -* **data-classes:** ActiveMQ and RabbitMQ support ([#770](https://github.com/awslabs/aws-lambda-powertools-python/issues/770)) -* **logger:** add ALB correlation ID support ([#816](https://github.com/awslabs/aws-lambda-powertools-python/issues/816)) - -## Maintenance - -* fix var expr -* remove Lambda Layer version tag -* bump to 1.22.0 -* conditional to publish docs only attempt 3 -* conditional to publish docs only attempt 2 -* conditional to publish docs only -* **deps:** bump boto3 from 1.18.58 to 1.18.59 ([#760](https://github.com/awslabs/aws-lambda-powertools-python/issues/760)) -* **deps:** bump boto3 from 1.18.56 to 1.18.58 ([#755](https://github.com/awslabs/aws-lambda-powertools-python/issues/755)) -* **deps:** bump urllib3 from 1.26.4 to 1.26.5 ([#787](https://github.com/awslabs/aws-lambda-powertools-python/issues/787)) -* **deps:** bump boto3 from 1.19.6 to 1.20.3 ([#809](https://github.com/awslabs/aws-lambda-powertools-python/issues/809)) -* **deps:** bump boto3 from 1.18.61 to 1.19.6 ([#783](https://github.com/awslabs/aws-lambda-powertools-python/issues/783)) -* **deps:** bump boto3 from 1.20.3 to 1.20.5 ([#817](https://github.com/awslabs/aws-lambda-powertools-python/issues/817)) -* **deps:** bump boto3 from 1.18.59 to 1.18.61 ([#766](https://github.com/awslabs/aws-lambda-powertools-python/issues/766)) -* **deps-dev:** bump coverage from 6.0.1 to 6.0.2 ([#764](https://github.com/awslabs/aws-lambda-powertools-python/issues/764)) -* **deps-dev:** bump pytest-asyncio from 0.15.1 to 0.16.0 ([#782](https://github.com/awslabs/aws-lambda-powertools-python/issues/782)) -* **deps-dev:** bump flake8-eradicate from 1.1.0 to 1.2.0 ([#784](https://github.com/awslabs/aws-lambda-powertools-python/issues/784)) -* **deps-dev:** bump flake8-isort from 4.0.0 to 4.1.1 ([#785](https://github.com/awslabs/aws-lambda-powertools-python/issues/785)) -* **deps-dev:** bump mkdocs-material from 7.3.2 to 7.3.3 ([#758](https://github.com/awslabs/aws-lambda-powertools-python/issues/758)) -* **deps-dev:** bump flake8-comprehensions from 3.6.1 to 3.7.0 ([#759](https://github.com/awslabs/aws-lambda-powertools-python/issues/759)) -* **deps-dev:** bump mkdocs-material from 7.3.3 to 7.3.5 ([#781](https://github.com/awslabs/aws-lambda-powertools-python/issues/781)) -* **deps-dev:** bump coverage from 6.0 to 6.0.1 ([#751](https://github.com/awslabs/aws-lambda-powertools-python/issues/751)) -* **deps-dev:** bump mkdocs-material from 7.3.5 to 7.3.6 ([#791](https://github.com/awslabs/aws-lambda-powertools-python/issues/791)) -* **deps-dev:** bump coverage from 6.0.2 to 6.1.2 ([#810](https://github.com/awslabs/aws-lambda-powertools-python/issues/810)) -* **deps-dev:** bump isort from 5.9.3 to 5.10.1 ([#811](https://github.com/awslabs/aws-lambda-powertools-python/issues/811)) - - - -## [v1.21.1] - 2021-10-07 -## Documentation - -* add new public layer ARNs ([#746](https://github.com/awslabs/aws-lambda-powertools-python/issues/746)) - -## Maintenance - -* include public layers changelog -* bump to 1.21.1 -* include regression in changelog -* ignore constants in test cov ([#745](https://github.com/awslabs/aws-lambda-powertools-python/issues/745)) -* ignore constants in tests cov -* add support for publishing fallback -* **deps:** bump boto3 from 1.18.54 to 1.18.56 ([#742](https://github.com/awslabs/aws-lambda-powertools-python/issues/742)) -* **deps-dev:** bump mkdocs-material from 7.3.1 to 7.3.2 ([#741](https://github.com/awslabs/aws-lambda-powertools-python/issues/741)) - -## Regression - * **metrics:** typing regression on log_metrics callable ([#744](https://github.com/awslabs/aws-lambda-powertools-python/issues/744)) - - - -## [v1.21.0] - 2021-10-05 -## Bug Fixes - -* **data-classes:** use correct asdict funciton ([#666](https://github.com/awslabs/aws-lambda-powertools-python/issues/666)) -* **feature-flags:** rules should evaluate with an AND op ([#724](https://github.com/awslabs/aws-lambda-powertools-python/issues/724)) -* **idempotency:** sorting keys before hashing ([#722](https://github.com/awslabs/aws-lambda-powertools-python/issues/722)) -* **idempotency:** sorting keys before hashing -* **logger:** push extra keys to the end ([#722](https://github.com/awslabs/aws-lambda-powertools-python/issues/722)) -* **mypy:** a few return types, type signatures, and untyped areas ([#718](https://github.com/awslabs/aws-lambda-powertools-python/issues/718)) - -## Code Refactoring - -* **data-classes:** clean up internal logic for APIGatewayAuthorizerResponse ([#643](https://github.com/awslabs/aws-lambda-powertools-python/issues/643)) - -## Documentation - -* Terraform reference for SAR Lambda Layer ([#716](https://github.com/awslabs/aws-lambda-powertools-python/issues/716)) -* add team behind it and email -* **event-handler:** document catch-all routes ([#705](https://github.com/awslabs/aws-lambda-powertools-python/issues/705)) -* **idempotency:** fix misleading idempotent examples ([#661](https://github.com/awslabs/aws-lambda-powertools-python/issues/661)) -* **jmespath:** clarify envelope terminology -* **parser:** fix incorrect import in root_validator example ([#735](https://github.com/awslabs/aws-lambda-powertools-python/issues/735)) - -## Features - -* expose jmespath powertools functions ([#736](https://github.com/awslabs/aws-lambda-powertools-python/issues/736)) -* add get_raw_configuration property in store; expose store -* boto3 sessions in batch, parameters & idempotency ([#717](https://github.com/awslabs/aws-lambda-powertools-python/issues/717)) -* **feature-flags:** Bring your own logger for debug ([#709](https://github.com/awslabs/aws-lambda-powertools-python/issues/709)) -* **feature-flags:** improve "IN/NOT_IN"; new rule actions ([#710](https://github.com/awslabs/aws-lambda-powertools-python/issues/710)) -* **feature-flags:** get_raw_configuration property in Store ([#720](https://github.com/awslabs/aws-lambda-powertools-python/issues/720)) -* **feature_flags:** Added inequality conditions ([#721](https://github.com/awslabs/aws-lambda-powertools-python/issues/721)) -* **idempotency:** makes customers unit testing easier ([#719](https://github.com/awslabs/aws-lambda-powertools-python/issues/719)) -* **validator:** include missing data elements from a validation error ([#686](https://github.com/awslabs/aws-lambda-powertools-python/issues/686)) - -## Maintenance - -* add python 3.9 support -* bump to 1.21.0 -* **deps:** bump boto3 from 1.18.41 to 1.18.49 ([#703](https://github.com/awslabs/aws-lambda-powertools-python/issues/703)) -* **deps:** bump boto3 from 1.18.32 to 1.18.38 ([#671](https://github.com/awslabs/aws-lambda-powertools-python/issues/671)) -* **deps:** bump boto3 from 1.18.38 to 1.18.41 ([#677](https://github.com/awslabs/aws-lambda-powertools-python/issues/677)) -* **deps:** bump boto3 from 1.18.51 to 1.18.54 ([#733](https://github.com/awslabs/aws-lambda-powertools-python/issues/733)) -* **deps:** bump boto3 from 1.18.49 to 1.18.51 ([#713](https://github.com/awslabs/aws-lambda-powertools-python/issues/713)) -* **deps:** bump codecov/codecov-action from 2.0.2 to 2.1.0 ([#675](https://github.com/awslabs/aws-lambda-powertools-python/issues/675)) -* **deps-dev:** bump flake8-bugbear from 21.9.1 to 21.9.2 ([#712](https://github.com/awslabs/aws-lambda-powertools-python/issues/712)) -* **deps-dev:** bump mkdocs-material from 7.3.0 to 7.3.1 ([#731](https://github.com/awslabs/aws-lambda-powertools-python/issues/731)) -* **deps-dev:** bump mkdocs-material from 7.2.8 to 7.3.0 ([#695](https://github.com/awslabs/aws-lambda-powertools-python/issues/695)) -* **deps-dev:** bump mkdocs-material from 7.2.6 to 7.2.8 ([#682](https://github.com/awslabs/aws-lambda-powertools-python/issues/682)) -* **deps-dev:** bump flake8-bugbear from 21.4.3 to 21.9.1 ([#676](https://github.com/awslabs/aws-lambda-powertools-python/issues/676)) -* **deps-dev:** bump coverage from 5.5 to 6.0 ([#732](https://github.com/awslabs/aws-lambda-powertools-python/issues/732)) -* **deps-dev:** bump radon from 4.5.2 to 5.1.0 ([#673](https://github.com/awslabs/aws-lambda-powertools-python/issues/673)) -* **deps-dev:** bump pytest-cov from 2.12.1 to 3.0.0 ([#730](https://github.com/awslabs/aws-lambda-powertools-python/issues/730)) -* **deps-dev:** bump xenon from 0.7.3 to 0.8.0 ([#669](https://github.com/awslabs/aws-lambda-powertools-python/issues/669)) - - - -## [v1.20.2] - 2021-09-02 -## Bug Fixes - -* Fix issue with strip_prefixes ([#647](https://github.com/awslabs/aws-lambda-powertools-python/issues/647)) - -## Maintenance - -* bump to 1.20.2 -* **deps:** bump boto3 from 1.18.26 to 1.18.32 ([#663](https://github.com/awslabs/aws-lambda-powertools-python/issues/663)) -* **deps-dev:** bump mkdocs-material from 7.2.4 to 7.2.6 ([#665](https://github.com/awslabs/aws-lambda-powertools-python/issues/665)) -* **deps-dev:** bump pytest from 6.2.4 to 6.2.5 ([#662](https://github.com/awslabs/aws-lambda-powertools-python/issues/662)) -* **license:** Add THIRD-PARTY-LICENSES ([#641](https://github.com/awslabs/aws-lambda-powertools-python/issues/641)) - - - -## [v1.20.1] - 2021-08-22 -## Bug Fixes - -* **idempotency:** sorting keys before hashing ([#639](https://github.com/awslabs/aws-lambda-powertools-python/issues/639)) - -## Maintenance - -* bump to 1.20.1 -* markdown linter fixes ([#636](https://github.com/awslabs/aws-lambda-powertools-python/issues/636)) -* setup codespaces ([#637](https://github.com/awslabs/aws-lambda-powertools-python/issues/637)) -* **license:** add third party license ([#635](https://github.com/awslabs/aws-lambda-powertools-python/issues/635)) - - - -## [v1.20.0] - 2021-08-21 -## Bug Fixes - -* **api-gateway:** HTTP API strip stage name from request path ([#622](https://github.com/awslabs/aws-lambda-powertools-python/issues/622)) -* **docs:** correct feature_flags link and json exmaples ([#605](https://github.com/awslabs/aws-lambda-powertools-python/issues/605)) - -## Code Refactoring - -* **event_handler:** match to match_results; 3.10 new keyword ([#616](https://github.com/awslabs/aws-lambda-powertools-python/issues/616)) - -## Documentation - -* **api-gateway:** add new API mapping support -* **data-class:** fix invalid syntax in new AppSync Authorizer -* **data-classes:** make authorizer concise; use enum ([#630](https://github.com/awslabs/aws-lambda-powertools-python/issues/630)) - -## Features - -* **data-classes:** authorizer for http api and rest api ([#620](https://github.com/awslabs/aws-lambda-powertools-python/issues/620)) -* **data-classes:** data_as_bytes prop KinesisStreamRecordPayload ([#628](https://github.com/awslabs/aws-lambda-powertools-python/issues/628)) -* **data-classes:** AppSync Lambda authorizer event ([#610](https://github.com/awslabs/aws-lambda-powertools-python/issues/610)) -* **event-handler:** prefixes to strip for custom mappings ([#579](https://github.com/awslabs/aws-lambda-powertools-python/issues/579)) -* **general:** support for Python 3.9 ([#626](https://github.com/awslabs/aws-lambda-powertools-python/issues/626)) -* **idempotency:** support for any synchronous function ([#625](https://github.com/awslabs/aws-lambda-powertools-python/issues/625)) - -## Maintenance - -* update changelog to reflect out-of-band commits -* bump to 1.20.0 -* update new changelog version tag -* **actions:** include new labels -* **api-docs:** enable allow_reuse to fix the docs ([#612](https://github.com/awslabs/aws-lambda-powertools-python/issues/612)) -* **deps:** bump boto3 from 1.18.25 to 1.18.26 ([#627](https://github.com/awslabs/aws-lambda-powertools-python/issues/627)) -* **deps:** bump boto3 from 1.18.24 to 1.18.25 ([#623](https://github.com/awslabs/aws-lambda-powertools-python/issues/623)) -* **deps:** bump boto3 from 1.18.22 to 1.18.24 ([#619](https://github.com/awslabs/aws-lambda-powertools-python/issues/619)) -* **deps:** bump boto3 from 1.18.21 to 1.18.22 ([#614](https://github.com/awslabs/aws-lambda-powertools-python/issues/614)) -* **deps:** bump boto3 from 1.18.17 to 1.18.21 ([#608](https://github.com/awslabs/aws-lambda-powertools-python/issues/608)) -* **deps-dev:** bump flake8-comprehensions from 3.6.0 to 3.6.1 ([#615](https://github.com/awslabs/aws-lambda-powertools-python/issues/615)) -* **deps-dev:** bump flake8-comprehensions from 3.5.0 to 3.6.0 ([#609](https://github.com/awslabs/aws-lambda-powertools-python/issues/609)) -* **deps-dev:** bump mkdocs-material from 7.2.3 to 7.2.4 ([#607](https://github.com/awslabs/aws-lambda-powertools-python/issues/607)) -* **docs:** correct markdown based on markdown lint ([#603](https://github.com/awslabs/aws-lambda-powertools-python/issues/603)) -* **shared:** fix cyclic import & refactor data extraction fn ([#613](https://github.com/awslabs/aws-lambda-powertools-python/issues/613)) - - - -## [v1.19.0] - 2021-08-11 -## Bug Fixes - -* **deps:** bump poetry to latest ([#592](https://github.com/awslabs/aws-lambda-powertools-python/issues/592)) -* **feature-flags:** bug handling multiple conditions ([#599](https://github.com/awslabs/aws-lambda-powertools-python/issues/599)) -* **feature-toggles:** correct cdk example ([#601](https://github.com/awslabs/aws-lambda-powertools-python/issues/601)) -* **parser:** apigw wss validation check_message_id; housekeeping ([#553](https://github.com/awslabs/aws-lambda-powertools-python/issues/553)) - -## Code Refactoring - -* **feature-flags:** add debug for all features evaluation" ([#590](https://github.com/awslabs/aws-lambda-powertools-python/issues/590)) -* **feature_flags:** optimize UX and maintenance ([#563](https://github.com/awslabs/aws-lambda-powertools-python/issues/563)) - -## Documentation - -* **event-handler:** new custom serializer option -* **feature-flags:** add guidance when to use vs env vars vs parameters -* **feature-flags:** fix sample feature name in evaluate -* **feature-flags:** create concrete documentation ([#594](https://github.com/awslabs/aws-lambda-powertools-python/issues/594)) -* **feature-toggles:** correct docs and typing ([#588](https://github.com/awslabs/aws-lambda-powertools-python/issues/588)) -* **feature_flags:** fix SAM infra, convert CDK to Python -* **parameters:** auto-transforming values based on suffix ([#573](https://github.com/awslabs/aws-lambda-powertools-python/issues/573)) -* **readme:** add code coverage badge ([#577](https://github.com/awslabs/aws-lambda-powertools-python/issues/577)) -* **tracer:** update wording that it auto-disables on non-Lambda env - -## Features - -* **api-gateway:** add support for custom serializer ([#568](https://github.com/awslabs/aws-lambda-powertools-python/issues/568)) -* **data-classes:** decode json_body if based64 encoded ([#560](https://github.com/awslabs/aws-lambda-powertools-python/issues/560)) -* **feature flags:** Add not_in action and rename contains to in ([#589](https://github.com/awslabs/aws-lambda-powertools-python/issues/589)) -* **params:** expose high level max_age, raise_on_transform_error ([#567](https://github.com/awslabs/aws-lambda-powertools-python/issues/567)) -* **tracer:** disable tracer when for non-Lambda envs ([#598](https://github.com/awslabs/aws-lambda-powertools-python/issues/598)) - -## Maintenance - -* only build docs on docs path -* update pypi description, keywords -* bump to 1.19.0 -* enable autolabel based on PR title -* include feature-flags docs hotfix -* **deps:** bump boto3 from 1.18.15 to 1.18.17 ([#597](https://github.com/awslabs/aws-lambda-powertools-python/issues/597)) -* **deps:** bump boto3 from 1.18.1 to 1.18.15 ([#591](https://github.com/awslabs/aws-lambda-powertools-python/issues/591)) -* **deps:** bump codecov/codecov-action from 2.0.1 to 2.0.2 ([#558](https://github.com/awslabs/aws-lambda-powertools-python/issues/558)) -* **deps-dev:** bump mkdocs-material from 7.2.1 to 7.2.2 ([#582](https://github.com/awslabs/aws-lambda-powertools-python/issues/582)) -* **deps-dev:** bump mkdocs-material from 7.2.2 to 7.2.3 ([#596](https://github.com/awslabs/aws-lambda-powertools-python/issues/596)) -* **deps-dev:** bump pdoc3 from 0.9.2 to 0.10.0 ([#584](https://github.com/awslabs/aws-lambda-powertools-python/issues/584)) -* **deps-dev:** bump isort from 5.9.2 to 5.9.3 ([#574](https://github.com/awslabs/aws-lambda-powertools-python/issues/574)) -* **deps-dev:** bump mkdocs-material from 7.2.0 to 7.2.1 ([#566](https://github.com/awslabs/aws-lambda-powertools-python/issues/566)) -* **deps-dev:** bump mkdocs-material from 7.1.11 to 7.2.0 ([#551](https://github.com/awslabs/aws-lambda-powertools-python/issues/551)) -* **deps-dev:** bump flake8-black from 0.2.1 to 0.2.3 ([#541](https://github.com/awslabs/aws-lambda-powertools-python/issues/541)) - - - -## [v1.18.1] - 2021-07-23 -## Bug Fixes - -* **api-gateway:** route regression non-word and unsafe URI chars ([#556](https://github.com/awslabs/aws-lambda-powertools-python/issues/556)) - -## Maintenance - -* bump 1.18.1 - - - -## [v1.18.0] - 2021-07-20 -## Bug Fixes - -* **api-gateway:** non-greedy route pattern regex ([#533](https://github.com/awslabs/aws-lambda-powertools-python/issues/533)) -* **api-gateway:** incorrect plain text mimetype [#506](https://github.com/awslabs/aws-lambda-powertools-python/issues/506) -* **data-classes:** include milliseconds in scalar types ([#504](https://github.com/awslabs/aws-lambda-powertools-python/issues/504)) -* **mypy:** fixes to resolve no implicit optional errors ([#521](https://github.com/awslabs/aws-lambda-powertools-python/issues/521)) -* **parser:** Make ApiGateway version, authorizer fields optional ([#532](https://github.com/awslabs/aws-lambda-powertools-python/issues/532)) -* **tracer:** mypy generic to preserve decorated method signature ([#529](https://github.com/awslabs/aws-lambda-powertools-python/issues/529)) - -## Code Refactoring - -* **feature-toggles:** Code coverage and housekeeping ([#530](https://github.com/awslabs/aws-lambda-powertools-python/issues/530)) - -## Documentation - -* **api-gateway:** document new HTTP service error exceptions ([#546](https://github.com/awslabs/aws-lambda-powertools-python/issues/546)) -* **logger:** document new get_correlation_id method ([#545](https://github.com/awslabs/aws-lambda-powertools-python/issues/545)) - -## Features - -* **api-gateway:** add debug mode ([#507](https://github.com/awslabs/aws-lambda-powertools-python/issues/507)) -* **api-gateway:** add common service errors ([#506](https://github.com/awslabs/aws-lambda-powertools-python/issues/506)) -* **event-handler:** Support AppSyncResolverEvent subclassing ([#526](https://github.com/awslabs/aws-lambda-powertools-python/issues/526)) -* **feat-toggle:** New simple feature toggles rule engine (WIP) ([#494](https://github.com/awslabs/aws-lambda-powertools-python/issues/494)) -* **logger:** add get_correlation_id method ([#516](https://github.com/awslabs/aws-lambda-powertools-python/issues/516)) -* **mypy:** add mypy support to makefile ([#508](https://github.com/awslabs/aws-lambda-powertools-python/issues/508)) - -## Maintenance - -* bump 1.18.0 ([#547](https://github.com/awslabs/aws-lambda-powertools-python/issues/547)) -* **deps:** bump codecov/codecov-action from 1 to 2.0.1 ([#539](https://github.com/awslabs/aws-lambda-powertools-python/issues/539)) -* **deps:** bump boto3 from 1.18.0 to 1.18.1 ([#528](https://github.com/awslabs/aws-lambda-powertools-python/issues/528)) -* **deps:** bump boto3 from 1.17.110 to 1.18.0 ([#527](https://github.com/awslabs/aws-lambda-powertools-python/issues/527)) -* **deps:** bump boto3 from 1.17.102 to 1.17.110 ([#523](https://github.com/awslabs/aws-lambda-powertools-python/issues/523)) -* **deps-dev:** bump mkdocs-material from 7.1.10 to 7.1.11 ([#542](https://github.com/awslabs/aws-lambda-powertools-python/issues/542)) -* **deps-dev:** bump mkdocs-material from 7.1.9 to 7.1.10 ([#522](https://github.com/awslabs/aws-lambda-powertools-python/issues/522)) -* **deps-dev:** bump isort from 5.9.1 to 5.9.2 ([#514](https://github.com/awslabs/aws-lambda-powertools-python/issues/514)) -* **event-handler:** adjusts exception docstrings to not confuse AppSync customers - - - -## [v1.17.1] - 2021-07-02 -## Bug Fixes - -* **validator:** handle built-in custom formats correctly ([#498](https://github.com/awslabs/aws-lambda-powertools-python/issues/498)) - -## Documentation - -* add Layers example for Serverless framework & CDK ([#500](https://github.com/awslabs/aws-lambda-powertools-python/issues/500)) -* enable dark mode switch ([#471](https://github.com/awslabs/aws-lambda-powertools-python/issues/471)) -* **logger:** add FAQ for cross-account searches ([#501](https://github.com/awslabs/aws-lambda-powertools-python/issues/501)) -* **tracer:** additional scenario when to disable auto-capture ([#499](https://github.com/awslabs/aws-lambda-powertools-python/issues/499)) - -## Maintenance - -* bump 1.17.1 ([#502](https://github.com/awslabs/aws-lambda-powertools-python/issues/502)) -* **deps:** bump boto3 from 1.17.101 to 1.17.102 ([#493](https://github.com/awslabs/aws-lambda-powertools-python/issues/493)) -* **deps:** bump boto3 from 1.17.91 to 1.17.101 ([#490](https://github.com/awslabs/aws-lambda-powertools-python/issues/490)) -* **deps:** bump email-validator from 1.1.2 to 1.1.3 ([#478](https://github.com/awslabs/aws-lambda-powertools-python/issues/478)) -* **deps:** bump boto3 from 1.17.89 to 1.17.91 ([#473](https://github.com/awslabs/aws-lambda-powertools-python/issues/473)) -* **deps-dev:** bump flake8-eradicate from 1.0.0 to 1.1.0 ([#492](https://github.com/awslabs/aws-lambda-powertools-python/issues/492)) -* **deps-dev:** bump isort from 5.8.0 to 5.9.1 ([#487](https://github.com/awslabs/aws-lambda-powertools-python/issues/487)) -* **deps-dev:** bump mkdocs-material from 7.1.7 to 7.1.9 ([#491](https://github.com/awslabs/aws-lambda-powertools-python/issues/491)) - - - -## [v1.17.0] - 2021-06-08 -## Documentation - -* include new public roadmap ([#452](https://github.com/awslabs/aws-lambda-powertools-python/issues/452)) -* **data_classes:** fix missing dynamodb stream get_type/value -* **idempotency:** remove old todo - -## Features - -* **data-classes:** add AttributeValueType to DynamoDBStreamEvent ([#462](https://github.com/awslabs/aws-lambda-powertools-python/issues/462)) -* **data-classes:** decorator to instantiate data_classes and docs updates ([#442](https://github.com/awslabs/aws-lambda-powertools-python/issues/442)) -* **logger:** add option to clear state per invocation ([#467](https://github.com/awslabs/aws-lambda-powertools-python/issues/467)) -* **parser:** add support for API Gateway HTTP API [#434](https://github.com/awslabs/aws-lambda-powertools-python/issues/434) ([#441](https://github.com/awslabs/aws-lambda-powertools-python/issues/441)) - -## Maintenance - -* bump xenon from 0.7.1 to 0.7.3 ([#446](https://github.com/awslabs/aws-lambda-powertools-python/issues/446)) -* fix changelog file redirection -* include dependencies label under maintenance -* ignore codecov upload -* reintroduce codecov token -* fix path for PR auto-labelling -* assited changelog pre-generation, auto-label PR ([#443](https://github.com/awslabs/aws-lambda-powertools-python/issues/443)) -* enable dependabot for dep upgrades ([#444](https://github.com/awslabs/aws-lambda-powertools-python/issues/444)) -* enable mergify ([#450](https://github.com/awslabs/aws-lambda-powertools-python/issues/450)) -* dependabot/mergify guardrail for major versions -* fix dependabot commit messages prefix -* fix dependabot unique set config -* bump mkdocs-material from 7.1.5 to 7.1.6 ([#451](https://github.com/awslabs/aws-lambda-powertools-python/issues/451)) -* bump version to 1.17.0 -* bump boto3 from 1.17.78 to 1.17.84 ([#449](https://github.com/awslabs/aws-lambda-powertools-python/issues/449)) -* update mergify to require approval on dependabot ([#456](https://github.com/awslabs/aws-lambda-powertools-python/issues/456)) -* bump actions/setup-python from 1 to 2.2.2 ([#445](https://github.com/awslabs/aws-lambda-powertools-python/issues/445)) -* trial boring cyborg automation -* **deps:** bump boto3 from 1.17.87 to 1.17.88 ([#463](https://github.com/awslabs/aws-lambda-powertools-python/issues/463)) -* **deps:** bump boto3 from 1.17.88 to 1.17.89 ([#466](https://github.com/awslabs/aws-lambda-powertools-python/issues/466)) -* **deps:** bump boto3 from 1.17.84 to 1.17.85 ([#455](https://github.com/awslabs/aws-lambda-powertools-python/issues/455)) -* **deps:** bump boto3 from 1.17.85 to 1.17.86 ([#458](https://github.com/awslabs/aws-lambda-powertools-python/issues/458)) -* **deps:** bump boto3 from 1.17.86 to 1.17.87 ([#459](https://github.com/awslabs/aws-lambda-powertools-python/issues/459)) -* **deps-dev:** bump mkdocs-material from 7.1.6 to 7.1.7 ([#464](https://github.com/awslabs/aws-lambda-powertools-python/issues/464)) -* **deps-dev:** bump pytest-cov from 2.12.0 to 2.12.1 ([#454](https://github.com/awslabs/aws-lambda-powertools-python/issues/454)) -* **mergify:** use job name to match GH Actions -* **mergify:** disable check for matrix jobs - - - -## [v1.16.1] - 2021-05-23 -## Features - -* **parser:** security issue in Pydantic [#436](https://github.com/awslabs/aws-lambda-powertools-python/issues/436) ([#437](https://github.com/awslabs/aws-lambda-powertools-python/issues/437)) - -## Maintenance - -* bump to 1.16.1 - - - -## [v1.16.0] - 2021-05-17 -## Features - -* **data-classes:** decode base64 encoded body ([#425](https://github.com/awslabs/aws-lambda-powertools-python/issues/425)) -* **data-classes:** support for code pipeline job event ([#416](https://github.com/awslabs/aws-lambda-powertools-python/issues/416)) - -## Maintenance - -* bump to 1.16.0 - - - -## [v1.15.1] - 2021-05-13 -## Bug Fixes - -* **docs:** Use updated names for ProxyEventType ([#424](https://github.com/awslabs/aws-lambda-powertools-python/issues/424)) - -## Documentation - -* update list of features -* **event_handler:** add missing note on trimmed responses - -## Maintenance - -* bump to 1.15.1 - - - -## [v1.15.0] - 2021-05-06 -## Bug Fixes - -* **deps:** Bump aws-xray-sdk from 2.6.0 to 2.8.0 ([#413](https://github.com/awslabs/aws-lambda-powertools-python/issues/413)) -* **docs:** workflow to include api ref in latest alias ([#408](https://github.com/awslabs/aws-lambda-powertools-python/issues/408)) -* **parser:** Improve types for parser.py ([#419](https://github.com/awslabs/aws-lambda-powertools-python/issues/419)) -* **validator:** event type annotation as any in validate fn ([#405](https://github.com/awslabs/aws-lambda-powertools-python/issues/405)) - -## Code Refactoring - -* simplify custom formatter for minor changes ([#417](https://github.com/awslabs/aws-lambda-powertools-python/issues/417)) -* **event-handler:** api gateway handler review changes ([#420](https://github.com/awslabs/aws-lambda-powertools-python/issues/420)) -* **event-handler:** Add ResponseBuilder and more docs ([#412](https://github.com/awslabs/aws-lambda-powertools-python/issues/412)) -* **logger:** BYOFormatter and Handler, UTC support, and more ([#404](https://github.com/awslabs/aws-lambda-powertools-python/issues/404)) - -## Documentation - -* **api_gateway:** new event handler for API Gateway and ALB ([#418](https://github.com/awslabs/aws-lambda-powertools-python/issues/418)) -* **event_handler:** fix closing brackets in CORS sample -* **event_handler:** remove beta flag from new HTTP utility -* **idempotency:** remove beta flag -* **logger:** improvements extensibility & new features ([#415](https://github.com/awslabs/aws-lambda-powertools-python/issues/415)) -* **parser:** fix table and heading syntax -* **tracer:** Fix line highlighting ([#395](https://github.com/awslabs/aws-lambda-powertools-python/issues/395)) - -## Features - -* add support to persist default dimensions ([#410](https://github.com/awslabs/aws-lambda-powertools-python/issues/410)) -* **event-handle:** allow for cors=None setting ([#421](https://github.com/awslabs/aws-lambda-powertools-python/issues/421)) -* **event-handler:** add http ProxyEvent handler ([#369](https://github.com/awslabs/aws-lambda-powertools-python/issues/369)) -* **parser:** Support for API GW v1 proxy schema & envelope ([#403](https://github.com/awslabs/aws-lambda-powertools-python/issues/403)) - -## Maintenance - -* bump to 1.15.0 ([#422](https://github.com/awslabs/aws-lambda-powertools-python/issues/422)) +* **parser:** Add missing fields for SESEvent ([#1027](https://github.com/awslabs/aws-lambda-powertools-python/issues/1027)) ([#1190](https://github.com/awslabs/aws-lambda-powertools-python/issues/1190)) @@ -2317,47 +1776,7 @@ * Merge pull request [#5](https://github.com/awslabs/aws-lambda-powertools-python/issues/5) from jfuss/feat/python38 -[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.28.0...HEAD -[v1.28.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.27.0...v1.28.0 -[v1.27.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.7...v1.27.0 -[v1.26.7]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.6...v1.26.7 -[v1.26.6]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.5...v1.26.6 -[v1.26.5]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.4...v1.26.5 -[v1.26.4]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.3...v1.26.4 -[v1.26.3]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.2...v1.26.3 -[v1.26.2]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.1...v1.26.2 -[v1.26.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.26.0...v1.26.1 -[v1.26.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.10...v1.26.0 -[v1.25.10]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.9...v1.25.10 -[v1.25.9]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.8...v1.25.9 -[v1.25.8]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.7...v1.25.8 -[v1.25.7]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.6...v1.25.7 -[v1.25.6]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.5...v1.25.6 -[v1.25.5]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.4...v1.25.5 -[v1.25.4]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.3...v1.25.4 -[v1.25.3]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.2...v1.25.3 -[v1.25.2]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.1...v1.25.2 -[v1.25.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.25.0...v1.25.1 -[v1.25.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.24.2...v1.25.0 -[v1.24.2]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.24.1...v1.24.2 -[v1.24.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.24.0...v1.24.1 -[v1.24.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.23.0...v1.24.0 -[v1.23.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.22.0...v1.23.0 -[v1.22.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.21.1...v1.22.0 -[v1.21.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.21.0...v1.21.1 -[v1.21.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.20.2...v1.21.0 -[v1.20.2]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.20.1...v1.20.2 -[v1.20.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.20.0...v1.20.1 -[v1.20.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.19.0...v1.20.0 -[v1.19.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.18.1...v1.19.0 -[v1.18.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.18.0...v1.18.1 -[v1.18.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.17.1...v1.18.0 -[v1.17.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.17.0...v1.17.1 -[v1.17.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.16.1...v1.17.0 -[v1.16.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.16.0...v1.16.1 -[v1.16.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.15.1...v1.16.0 -[v1.15.1]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.15.0...v1.15.1 -[v1.15.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.14.0...v1.15.0 +[Unreleased]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.14.0...HEAD [v1.14.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.13.0...v1.14.0 [v1.13.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.12.0...v1.13.0 [v1.12.0]: https://github.com/awslabs/aws-lambda-powertools-python/compare/v1.11.0...v1.12.0 From e1fdb21851c2b758327aa84716a018784cbfe128 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 17:14:11 +0000 Subject: [PATCH 49/49] chore(deps): bump pydantic from 1.10.1 to 1.10.2 Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.1 to 1.10.2. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.1...v1.10.2) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 128 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 73 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8c9c03b80fd..ede4665455f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -21,28 +21,30 @@ tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900 tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] -name = "aws-cdk-lib" -version = "2.40.0" -description = "Version 2 of the AWS Cloud Development Kit library" +name = "aws-cdk.aws-apigatewayv2-alpha" +version = "2.39.1a0" +description = "The CDK Construct Library for AWS::APIGatewayv2" category = "dev" optional = false python-versions = "~=3.7" [package.dependencies] +aws-cdk-lib = ">=2.39.1,<3.0.0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.66.0,<2.0.0" +jsii = ">=1.65.1,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] -name = "aws-cdk.aws-apigatewayv2-alpha" +name = "aws-cdk.aws-apigatewayv2-integrations-alpha" version = "2.39.1a0" -description = "The CDK Construct Library for AWS::APIGatewayv2" +description = "Integrations for AWS APIGateway V2" category = "dev" optional = false python-versions = "~=3.7" [package.dependencies] +"aws-cdk.aws-apigatewayv2-alpha" = "2.39.1.a0" aws-cdk-lib = ">=2.39.1,<3.0.0" constructs = ">=10.0.0,<11.0.0" jsii = ">=1.65.1,<2.0.0" @@ -50,18 +52,16 @@ publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" [[package]] -name = "aws-cdk.aws-apigatewayv2-integrations-alpha" -version = "2.39.1a0" -description = "Integrations for AWS APIGateway V2" +name = "aws-cdk-lib" +version = "2.40.0" +description = "Version 2 of the AWS Cloud Development Kit library" category = "dev" optional = false python-versions = "~=3.7" [package.dependencies] -aws-cdk-lib = ">=2.39.1,<3.0.0" -"aws-cdk.aws-apigatewayv2-alpha" = "2.39.1.a0" constructs = ">=10.0.0,<11.0.0" -jsii = ">=1.65.1,<2.0.0" +jsii = ">=1.66.0,<2.0.0" publication = ">=0.0.3" typeguard = ">=2.13.3,<2.14.0" @@ -94,7 +94,7 @@ stevedore = ">=1.20.0" [package.extras] test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] toml = ["toml"] -yaml = ["pyyaml"] +yaml = ["PyYAML"] [[package]] name = "black" @@ -411,6 +411,7 @@ python-versions = ">=3.6,<4.0" attrs = "*" eradicate = ">=2.0,<3.0" flake8 = ">=3.5,<6" +setuptools = "*" [[package]] name = "flake8-fixme" @@ -586,7 +587,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} MarkupSafe = ">=0.9.2" [package.extras] -babel = ["babel"] +babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] @@ -957,7 +958,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.1" +version = "1.10.2" description = "Data validation and settings management using python type hints" category = "main" optional = true @@ -1241,6 +1242,19 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +[[package]] +name = "setuptools" +version = "65.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1397,10 +1411,6 @@ attrs = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] -aws-cdk-lib = [ - {file = "aws-cdk-lib-2.40.0.tar.gz", hash = "sha256:0a4c2c9ad95e126b4a157134f968c7d0bcd74b6bd91f208be72aa562952114ce"}, - {file = "aws_cdk_lib-2.40.0-py3-none-any.whl", hash = "sha256:376c64aefe5971c121c6098ab7fd7efa7a61a9caa8f1b9deeeb87c1a5a768318"}, -] "aws-cdk.aws-apigatewayv2-alpha" = [ {file = "aws-cdk.aws-apigatewayv2-alpha-2.39.1a0.tar.gz", hash = "sha256:2a506e8e9015f1cf15f951b4dbc09ffee17d96aa77491b84ca1ab4b790388bdc"}, {file = "aws_cdk.aws_apigatewayv2_alpha-2.39.1a0-py3-none-any.whl", hash = "sha256:00ec8ee0c777f3dba81a40553e649aac3f707484af07c90d0f369ceb78512164"}, @@ -1409,6 +1419,10 @@ aws-cdk-lib = [ {file = "aws-cdk.aws-apigatewayv2-integrations-alpha-2.39.1a0.tar.gz", hash = "sha256:67f7e38214466bd15438301828c0b210b08fc16ecf35781210cdda4eae3151e2"}, {file = "aws_cdk.aws_apigatewayv2_integrations_alpha-2.39.1a0-py3-none-any.whl", hash = "sha256:29a46bad1fe1fd8d9c2356686636a0e83db9e4b6b24d8765f5024fc2988f8661"}, ] +aws-cdk-lib = [ + {file = "aws-cdk-lib-2.40.0.tar.gz", hash = "sha256:0a4c2c9ad95e126b4a157134f968c7d0bcd74b6bd91f208be72aa562952114ce"}, + {file = "aws_cdk_lib-2.40.0-py3-none-any.whl", hash = "sha256:376c64aefe5971c121c6098ab7fd7efa7a61a9caa8f1b9deeeb87c1a5a768318"}, +] aws-xray-sdk = [ {file = "aws-xray-sdk-2.10.0.tar.gz", hash = "sha256:9b14924fd0628cf92936055864655354003f0b1acc3e1c3ffde6403d0799dd7a"}, {file = "aws_xray_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:7551e81a796e1a5471ebe84844c40e8edf7c218db33506d046fec61f7495eda4"}, @@ -1831,42 +1845,42 @@ pycodestyle = [ {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pydantic = [ - {file = "pydantic-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:221166d99726238f71adc4fa9f3e94063a10787574b966f86a774559e709ac5a"}, - {file = "pydantic-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a90e85d95fd968cd7cae122e0d3e0e1f6613bc88c1ff3fe838ac9785ea4b1c4c"}, - {file = "pydantic-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2157aaf5718c648eaec9e654a34179ae42ffc363dc3ad058538a4f3ecbd9341"}, - {file = "pydantic-1.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6142246fc9adb51cadaeb84fb52a86f3adad4c6a7b0938a5dd0b1356b0088217"}, - {file = "pydantic-1.10.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60dad97a09b6f44690c05467a4f397b62bfc2c839ac39102819d6979abc2be0d"}, - {file = "pydantic-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6f5bcb59d33ec46621dae76e714c53035087666cac80c81c9047a84f3ff93d0"}, - {file = "pydantic-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:522906820cd60e63c7960ba83078bf2d2ad2dd0870bf68248039bcb1ec3eb0a4"}, - {file = "pydantic-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d545c89d88bdd5559db17aeb5a61a26799903e4bd76114779b3bf1456690f6ce"}, - {file = "pydantic-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad2374b5b3b771dcc6e2f6e0d56632ab63b90e9808b7a73ad865397fcdb4b2cd"}, - {file = "pydantic-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90e02f61b7354ed330f294a437d0bffac9e21a5d46cb4cc3c89d220e497db7ac"}, - {file = "pydantic-1.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc5ffe7bd0b4778fa5b7a5f825c52d6cfea3ae2d9b52b05b9b1d97e36dee23a8"}, - {file = "pydantic-1.10.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7acb7b66ffd2bc046eaff0063df84c83fc3826722d5272adaeadf6252e17f691"}, - {file = "pydantic-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7e6786ed5faa559dea5a77f6d2de9a08d18130de9344533535d945f34bdcd42e"}, - {file = "pydantic-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:c7bf8ff1d18186eb0cbe42bd9bfb4cbf7fde1fd01b8608925458990c21f202f0"}, - {file = "pydantic-1.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14a5babda137a294df7ad5f220986d79bbb87fdeb332c6ded61ce19da7f5f3bf"}, - {file = "pydantic-1.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5659cb9c6b3d27fc0067025c4f5a205f5e838232a4a929b412781117c2343d44"}, - {file = "pydantic-1.10.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d70fb91b03c32d2e857b071a22a5225e6b625ca82bd2cc8dd729d88e0bd200"}, - {file = "pydantic-1.10.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9a93be313e40f12c6f2cb84533b226bbe23d0774872e38d83415e6890215e3a6"}, - {file = "pydantic-1.10.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d55aeb01bb7bd7c7e1bd904668a4a2ffcbb1c248e7ae9eb40a272fd7e67dd98b"}, - {file = "pydantic-1.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:43d41b6f13706488e854729955ba8f740e6ec375cd16b72b81dc24b9d84f0d15"}, - {file = "pydantic-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f31ffe0e38805a0e6410330f78147bb89193b136d7a5f79cae60d3e849b520a6"}, - {file = "pydantic-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8eee69eda7674977b079a21e7bf825b59d8bf15145300e8034ed3eb239ac444f"}, - {file = "pydantic-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f927bff6c319fc92e0a2cbeb2609b5c1cd562862f4b54ec905e353282b7c8b1"}, - {file = "pydantic-1.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1bc3f8fef6ba36977108505e90558911e7fbccb4e930805d5dd90891b56ff4"}, - {file = "pydantic-1.10.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96ab6ce1346d14c6e581a69c333bdd1b492df9cf85ad31ad77a8aa42180b7e09"}, - {file = "pydantic-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:444cf220a12134da1cd42fe4f45edff622139e10177ce3d8ef2b4f41db1291b2"}, - {file = "pydantic-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:dbfbff83565b4514dd8cebc8b8c81a12247e89427ff997ad0a9da7b2b1065c12"}, - {file = "pydantic-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5327406f4bfd5aee784e7ad2a6a5fdd7171c19905bf34cb1994a1ba73a87c468"}, - {file = "pydantic-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1072eae28bf034a311764c130784e8065201a90edbca10f495c906737b3bd642"}, - {file = "pydantic-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce901335667a68dfbc10dd2ee6c0d676b89210d754441c2469fbc37baf7ee2ed"}, - {file = "pydantic-1.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d6465cd2112441305faf5143a491b40de07a203116b5755a2108e36b25308d"}, - {file = "pydantic-1.10.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b5e5e7a0ec96704099e271911a1049321ba1afda92920df0769898a7e9a1298"}, - {file = "pydantic-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ae43704358304da45c1c3dd7056f173c618b252f91594bcb6d6f6b4c6c284dee"}, - {file = "pydantic-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:2d7da49229ffb1049779a5a6c1c50a26da164bd053cf8ee9042197dc08a98259"}, - {file = "pydantic-1.10.1-py3-none-any.whl", hash = "sha256:f8b10e59c035ff3dcc9791619d6e6c5141e0fa5cbe264e19e267b8d523b210bf"}, - {file = "pydantic-1.10.1.tar.gz", hash = "sha256:d41bb80347a8a2d51fbd6f1748b42aca14541315878447ba159617544712f770"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pyflakes = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, @@ -2057,6 +2071,10 @@ s3transfer = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, ] +setuptools = [ + {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, + {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},