diff --git a/aws_lambda_powertools/utilities/advanced_parser/__init__.py b/aws_lambda_powertools/utilities/advanced_parser/__init__.py deleted file mode 100644 index 017b5086bb0..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Advanced parser utility -""" -from .envelopes import Envelope, InvalidEnvelopeError, parse_envelope -from .parser import parser - -__all__ = ["InvalidEnvelopeError", "Envelope", "parse_envelope", "parser"] diff --git a/aws_lambda_powertools/utilities/advanced_parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/advanced_parser/envelopes/__init__.py deleted file mode 100644 index 5fa4c396ba1..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/envelopes/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .envelopes import Envelope, InvalidEnvelopeError, parse_envelope - -__all__ = ["InvalidEnvelopeError", "Envelope", "parse_envelope"] diff --git a/aws_lambda_powertools/utilities/advanced_parser/envelopes/base.py b/aws_lambda_powertools/utilities/advanced_parser/envelopes/base.py deleted file mode 100644 index d972bfb3872..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/envelopes/base.py +++ /dev/null @@ -1,40 +0,0 @@ -import logging -from abc import ABC, abstractmethod -from typing import Any, Dict - -from pydantic import BaseModel, ValidationError - -logger = logging.getLogger(__name__) - - -class BaseEnvelope(ABC): - @staticmethod - def _parse_user_dict_schema(user_event: Dict[str, Any], schema: BaseModel) -> Any: - if user_event is None: - return None - logger.debug("parsing user dictionary schema") - try: - return schema(**user_event) - except (ValidationError, TypeError): - logger.exception("Validation exception while extracting user custom schema") - raise - - @staticmethod - def _parse_user_json_string_schema(user_event: str, schema: BaseModel) -> Any: - if user_event is None: - return None - # this is used in cases where the underlying schema is not a Dict that can be parsed as baseModel - # but a plain string i.e SQS has plain string payload - if schema == str: - logger.debug("input is string, returning") - return user_event - logger.debug("trying to parse as json encoded string") - try: - return schema.parse_raw(user_event) - except (ValidationError, TypeError): - logger.exception("Validation exception while extracting user custom schema") - raise - - @abstractmethod - def parse(self, event: Dict[str, Any], schema: BaseModel): - return NotImplemented diff --git a/aws_lambda_powertools/utilities/advanced_parser/envelopes/dynamodb.py b/aws_lambda_powertools/utilities/advanced_parser/envelopes/dynamodb.py deleted file mode 100644 index 81b9de02315..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/envelopes/dynamodb.py +++ /dev/null @@ -1,31 +0,0 @@ -import logging -from typing import Any, Dict, List - -from pydantic import BaseModel, ValidationError -from typing_extensions import Literal - -from aws_lambda_powertools.utilities.advanced_parser.envelopes.base import BaseEnvelope -from aws_lambda_powertools.utilities.advanced_parser.schemas import DynamoDBSchema - -logger = logging.getLogger(__name__) - - -# returns a List of dictionaries which each contains two keys, "NewImage" and "OldImage". -# The values are the parsed schema models. The images' values can also be None. -# Length of the list is the record's amount in the original event. -class DynamoDBEnvelope(BaseEnvelope): - def parse(self, event: Dict[str, Any], schema: BaseModel) -> List[Dict[Literal["NewImage", "OldImage"], BaseModel]]: - try: - parsed_envelope = DynamoDBSchema(**event) - except (ValidationError, TypeError): - logger.exception("Validation exception received from input dynamodb stream event") - raise - output = [] - for record in parsed_envelope.Records: - output.append( - { - "NewImage": self._parse_user_dict_schema(record.dynamodb.NewImage, schema), - "OldImage": self._parse_user_dict_schema(record.dynamodb.OldImage, schema), - } - ) - return output diff --git a/aws_lambda_powertools/utilities/advanced_parser/envelopes/envelopes.py b/aws_lambda_powertools/utilities/advanced_parser/envelopes/envelopes.py deleted file mode 100644 index 332c1eadef0..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/envelopes/envelopes.py +++ /dev/null @@ -1,42 +0,0 @@ -import logging -from enum import Enum -from typing import Any, Dict - -from pydantic import BaseModel - -from aws_lambda_powertools.utilities.advanced_parser.envelopes.base import BaseEnvelope -from aws_lambda_powertools.utilities.advanced_parser.envelopes.dynamodb import DynamoDBEnvelope -from aws_lambda_powertools.utilities.advanced_parser.envelopes.event_bridge import EventBridgeEnvelope -from aws_lambda_powertools.utilities.advanced_parser.envelopes.sqs import SqsEnvelope - -logger = logging.getLogger(__name__) - - -"""Built-in envelopes""" - - -class Envelope(str, Enum): - SQS = "sqs" - EVENTBRIDGE = "eventbridge" - DYNAMODB_STREAM = "dynamodb_stream" - - -class InvalidEnvelopeError(Exception): - """Input envelope is not one of the Envelope enum values""" - - -# enum to BaseEnvelope handler class -__ENVELOPE_MAPPING = { - Envelope.SQS: SqsEnvelope, - Envelope.DYNAMODB_STREAM: DynamoDBEnvelope, - Envelope.EVENTBRIDGE: EventBridgeEnvelope, -} - - -def parse_envelope(event: Dict[str, Any], envelope: Envelope, schema: BaseModel): - envelope_handler: BaseEnvelope = __ENVELOPE_MAPPING.get(envelope) - if envelope_handler is None: - logger.exception("envelope must be an instance of Envelope enum") - raise InvalidEnvelopeError("envelope must be an instance of Envelope enum") - logger.debug(f"Parsing and validating event schema, envelope={str(envelope.value)}") - return envelope_handler().parse(event=event, schema=schema) diff --git a/aws_lambda_powertools/utilities/advanced_parser/envelopes/event_bridge.py b/aws_lambda_powertools/utilities/advanced_parser/envelopes/event_bridge.py deleted file mode 100644 index 00052d41da0..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/envelopes/event_bridge.py +++ /dev/null @@ -1,20 +0,0 @@ -import logging -from typing import Any, Dict - -from pydantic import BaseModel, ValidationError - -from aws_lambda_powertools.utilities.advanced_parser.envelopes.base import BaseEnvelope -from aws_lambda_powertools.utilities.advanced_parser.schemas import EventBridgeSchema - -logger = logging.getLogger(__name__) - - -# returns a parsed BaseModel object according to schema type -class EventBridgeEnvelope(BaseEnvelope): - def parse(self, event: Dict[str, Any], schema: BaseModel) -> BaseModel: - try: - parsed_envelope = EventBridgeSchema(**event) - except (ValidationError, TypeError): - logger.exception("Validation exception received from input eventbridge event") - raise - return self._parse_user_dict_schema(parsed_envelope.detail, schema) diff --git a/aws_lambda_powertools/utilities/advanced_parser/envelopes/sqs.py b/aws_lambda_powertools/utilities/advanced_parser/envelopes/sqs.py deleted file mode 100644 index 8ef2e685c4f..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/envelopes/sqs.py +++ /dev/null @@ -1,27 +0,0 @@ -import logging -from typing import Any, Dict, List, Union - -from pydantic import BaseModel, ValidationError - -from aws_lambda_powertools.utilities.advanced_parser.envelopes.base import BaseEnvelope -from aws_lambda_powertools.utilities.advanced_parser.schemas import SqsSchema - -logger = logging.getLogger(__name__) - - -# returns a list of parsed schemas of type BaseModel or plain string. -# The record's body parameter is a string. However, it can also be a JSON encoded string which -# can then be parsed into a BaseModel object. -# Note that all records will be parsed the same way so if schema is str, -# all the items in the list will be parsed as str and npt as JSON (and vice versa). -class SqsEnvelope(BaseEnvelope): - def parse(self, event: Dict[str, Any], schema: Union[BaseModel, str]) -> List[Union[BaseModel, str]]: - try: - parsed_envelope = SqsSchema(**event) - except (ValidationError, TypeError): - logger.exception("Validation exception received from input sqs event") - raise - output = [] - for record in parsed_envelope.Records: - output.append(self._parse_user_json_string_schema(record.body, schema)) - return output diff --git a/aws_lambda_powertools/utilities/advanced_parser/parser.py b/aws_lambda_powertools/utilities/advanced_parser/parser.py deleted file mode 100644 index b501d0a5146..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/parser.py +++ /dev/null @@ -1,68 +0,0 @@ -import logging -from typing import Any, Callable, Dict, Optional - -from pydantic import BaseModel, ValidationError - -from aws_lambda_powertools.middleware_factory import lambda_handler_decorator -from aws_lambda_powertools.utilities.advanced_parser.envelopes import Envelope, parse_envelope - -logger = logging.getLogger(__name__) - - -@lambda_handler_decorator -def parser( - handler: Callable[[Dict, Any], Any], - event: Dict[str, Any], - context: Dict[str, Any], - schema: BaseModel, - envelope: Optional[Envelope] = None, -) -> Any: - """Decorator to conduct advanced parsing & validation for lambda handlers events - - As Lambda follows (event, context) signature we can remove some of the boilerplate - and also capture any exception any Lambda function throws as metadata. - event will be the parsed and passed as a BaseModel pydantic class of the input type "schema" - to the lambda handler. - event will be extracted from the envelope in case envelope is not None. - In case envelope is None, the complete event is parsed to match the schema parameter BaseModel definition. - In case envelope is not None, first the event is parsed as the envelope's schema definition, and the user - message is extracted and parsed again as the schema parameter's definition. - - Example - ------- - **Lambda function using validation decorator** - - @parser(schema=MyBusiness, envelope=envelopes.EVENTBRIDGE) - def handler(event: MyBusiness , context: LambdaContext): - ... - - Parameters - ---------- - handler: input for lambda_handler_decorator, wraps the handler lambda - event: AWS event dictionary - context: AWS lambda context - schema: pydantic BaseModel class. This is the user data schema that will replace the event. - event parameter will be parsed and a new schema object will be created from it. - envelope: what envelope to extract the schema from, can be any AWS service that is currently - supported in the envelopes module. Can be None. - - Raises - ------ - err - TypeError - in case event is None - pydantic.ValidationError - event fails validation, either of the envelope - """ - lambda_handler_name = handler.__name__ - parsed_event = None - if envelope is None: - try: - logger.debug("Parsing and validating event schema, no envelope is used") - parsed_event = schema(**event) - except (ValidationError, TypeError): - logger.exception("Validation exception received from input event") - raise - else: - parsed_event = parse_envelope(event, envelope, schema) - - logger.debug(f"Calling handler {lambda_handler_name}") - return handler(parsed_event, context) diff --git a/aws_lambda_powertools/utilities/advanced_parser/schemas/__init__.py b/aws_lambda_powertools/utilities/advanced_parser/schemas/__init__.py deleted file mode 100644 index ac470a16c94..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/schemas/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from .dynamodb import DynamoDBSchema, DynamoRecordSchema, DynamoScheme -from .event_bridge import EventBridgeSchema -from .sqs import SqsRecordSchema, SqsSchema - -__all__ = [ - "DynamoDBSchema", - "EventBridgeSchema", - "DynamoScheme", - "DynamoRecordSchema", - "SqsSchema", - "SqsRecordSchema", -] diff --git a/aws_lambda_powertools/utilities/advanced_parser/schemas/dynamodb.py b/aws_lambda_powertools/utilities/advanced_parser/schemas/dynamodb.py deleted file mode 100644 index 0c4e95fc9bc..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/schemas/dynamodb.py +++ /dev/null @@ -1,45 +0,0 @@ -from datetime import date -from typing import Any, Dict, List, Optional - -from pydantic import BaseModel, root_validator -from typing_extensions import Literal - - -class DynamoScheme(BaseModel): - ApproximateCreationDateTime: Optional[date] - Keys: Dict[str, Dict[str, Any]] - NewImage: Optional[Dict[str, Any]] - OldImage: Optional[Dict[str, Any]] - SequenceNumber: str - SizeBytes: int - StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] - - # since both images are optional, they can both be None. However, at least one must - # exist in a legal schema of NEW_AND_OLD_IMAGES type - @root_validator - def check_one_image_exists(cls, values): - new_img, old_img = values.get("NewImage"), values.get("OldImage") - stream_type = values.get("StreamViewType") - if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: - raise TypeError("DynamoDB streams schema failed validation, missing both new & old stream images") - return values - - -class UserIdentity(BaseModel): - type: Literal["Service"] # noqa: VNE003, A003 - principalId: Literal["dynamodb.amazonaws.com"] - - -class DynamoRecordSchema(BaseModel): - eventID: str - eventName: Literal["INSERT", "MODIFY", "REMOVE"] - eventVersion: float - eventSource: Literal["aws:dynamodb"] - awsRegion: str - eventSourceARN: str - dynamodb: DynamoScheme - userIdentity: Optional[UserIdentity] - - -class DynamoDBSchema(BaseModel): - Records: List[DynamoRecordSchema] diff --git a/aws_lambda_powertools/utilities/advanced_parser/schemas/sqs.py b/aws_lambda_powertools/utilities/advanced_parser/schemas/sqs.py deleted file mode 100644 index 862236281f2..00000000000 --- a/aws_lambda_powertools/utilities/advanced_parser/schemas/sqs.py +++ /dev/null @@ -1,65 +0,0 @@ -import re -from datetime import datetime -from typing import Dict, List, Optional - -from pydantic import BaseModel, root_validator, validator -from typing_extensions import Literal - - -class SqsAttributesSchema(BaseModel): - ApproximateReceiveCount: str - ApproximateFirstReceiveTimestamp: datetime - MessageDeduplicationId: Optional[str] - MessageGroupId: Optional[str] - SenderId: str - SentTimestamp: datetime - SequenceNumber: Optional[str] - AWSTraceHeader: Optional[str] - - -class SqsMsgAttributeSchema(BaseModel): - stringValue: Optional[str] - binaryValue: Optional[str] - stringListValues: List[str] = [] - binaryListValues: List[str] = [] - dataType: str - - # Amazon SQS supports the logical data types String, Number, and Binary with optional custom data type - # labels with the format .custom-data-type. - # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-message-attributes - @validator("dataType") - def valid_type(cls, v): # noqa: VNE001 - pattern = re.compile("Number.*|String.*|Binary.*") - if not pattern.match(v): - raise TypeError("data type is invalid") - return v - - # validate that dataType and value are not None and match - @root_validator - def check_str_and_binary_values(cls, values): - binary_val, str_val = values.get("binaryValue", ""), values.get("stringValue", "") - data_type = values.get("dataType") - if not str_val and not binary_val: - raise TypeError("both binaryValue and stringValue are missing") - if data_type.startswith("Binary") and not binary_val: - raise TypeError("binaryValue is missing") - if (data_type.startswith("String") or data_type.startswith("Number")) and not str_val: - raise TypeError("stringValue is missing") - return values - - -class SqsRecordSchema(BaseModel): - messageId: str - receiptHandle: str - body: str - attributes: SqsAttributesSchema - messageAttributes: Dict[str, SqsMsgAttributeSchema] - md5OfBody: str - md5OfMessageAttributes: Optional[str] - eventSource: Literal["aws:sqs"] - eventSourceARN: str - awsRegion: str - - -class SqsSchema(BaseModel): - Records: List[SqsRecordSchema] diff --git a/aws_lambda_powertools/utilities/parser/__init__.py b/aws_lambda_powertools/utilities/parser/__init__.py new file mode 100644 index 00000000000..62aa4bd73d7 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/__init__.py @@ -0,0 +1,17 @@ +"""Advanced event_parser utility +""" +from . import envelopes +from .envelopes import BaseEnvelope +from .exceptions import ModelValidationError +from .parser import event_parser +from .pydantic import BaseModel, root_validator, validator + +__all__ = [ + "event_parser", + "envelopes", + "BaseEnvelope", + "BaseModel", + "validator", + "root_validator", + "ModelValidationError", +] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/__init__.py b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py new file mode 100644 index 00000000000..766021a3f92 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/__init__.py @@ -0,0 +1,6 @@ +from .base import BaseEnvelope +from .dynamodb import DynamoDBEnvelope +from .event_bridge import EventBridgeEnvelope +from .sqs import SqsEnvelope + +__all__ = ["DynamoDBEnvelope", "EventBridgeEnvelope", "SqsEnvelope", "BaseEnvelope"] diff --git a/aws_lambda_powertools/utilities/parser/envelopes/base.py b/aws_lambda_powertools/utilities/parser/envelopes/base.py new file mode 100644 index 00000000000..baf6cd33420 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/base.py @@ -0,0 +1,58 @@ +import logging +from abc import ABC, abstractmethod +from typing import Any, Dict, Union + +from pydantic import BaseModel + +logger = logging.getLogger(__name__) + + +class BaseEnvelope(ABC): + """ABC implementation for creating a supported Envelope""" + + @staticmethod + def _parse(data: Union[Dict[str, Any], str], model: BaseModel) -> Any: + """Parses envelope data against model provided + + Parameters + ---------- + data : Dict + Data to be parsed and validated + model + Data model to parse and validate data against + + Returns + ------- + Any + Parsed data + """ + if data is None: + logger.debug("Skipping parsing as event is None") + return data + + logger.debug("parsing event against model") + if isinstance(data, str): + logger.debug("parsing event as string") + return model.parse_raw(data) + + return model.parse_obj(data) + + @abstractmethod + def parse(self, data: Dict[str, Any], model: BaseModel): + """Implementation to parse data against envelope model, then against the data model + + NOTE: Call `_parse` method to fully parse data with model provided. + + Example + ------- + + **EventBridge envelope implementation example** + + def parse(...): + # 1. parses data against envelope model + parsed_envelope = EventBridgeModel(**data) + + # 2. parses portion of data within the envelope against model + return self._parse(data=parsed_envelope.detail, model=data_model) + """ + return NotImplemented # pragma: no cover diff --git a/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py b/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py new file mode 100644 index 00000000000..ef166a5c48f --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/dynamodb.py @@ -0,0 +1,45 @@ +import logging +from typing import Any, Dict, List + +from pydantic import BaseModel +from typing_extensions import Literal + +from ..models import DynamoDBStreamModel +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class DynamoDBEnvelope(BaseEnvelope): + """ DynamoDB Stream Envelope to extract data within NewImage/OldImage + + Note: Values are the parsed models. Images' values can also be None, and + length of the list is the record's amount in the original event. + """ + + def parse(self, data: Dict[str, Any], model: BaseModel) -> List[Dict[Literal["NewImage", "OldImage"], BaseModel]]: + """Parses DynamoDB Stream records found in either NewImage and OldImage with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : BaseModel + Data model provided to parse after extracting data using envelope + + Returns + ------- + List + List of records parsed with model provided + """ + parsed_envelope = DynamoDBStreamModel(**data) + output = [] + for record in parsed_envelope.Records: + output.append( + { + "NewImage": self._parse(record.dynamodb.NewImage, model), + "OldImage": self._parse(record.dynamodb.OldImage, model), + } + ) + # noinspection PyTypeChecker + return output diff --git a/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py b/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py new file mode 100644 index 00000000000..8b91266e848 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/event_bridge.py @@ -0,0 +1,31 @@ +import logging +from typing import Any, Dict + +from pydantic import BaseModel + +from ..models import EventBridgeModel +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class EventBridgeEnvelope(BaseEnvelope): + """EventBridge envelope to extract data within detail key""" + + def parse(self, data: Dict[str, Any], model: BaseModel) -> BaseModel: + """Parses data found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : BaseModel + Data model provided to parse after extracting data using envelope + + Returns + ------- + Any + Parsed detail payload with model provided + """ + parsed_envelope = EventBridgeModel(**data) + return self._parse(data=parsed_envelope.detail, model=model) diff --git a/aws_lambda_powertools/utilities/parser/envelopes/sqs.py b/aws_lambda_powertools/utilities/parser/envelopes/sqs.py new file mode 100644 index 00000000000..7bf326206f3 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/envelopes/sqs.py @@ -0,0 +1,41 @@ +import logging +from typing import Any, Dict, List, Union + +from pydantic import BaseModel + +from ..models import SqsModel +from .base import BaseEnvelope + +logger = logging.getLogger(__name__) + + +class SqsEnvelope(BaseEnvelope): + """SQS Envelope to extract array of Records + + The record's body parameter is a string, though it can also be a JSON encoded string. + Regardless of its type it'll be parsed into a BaseModel object. + + Note: Records will be parsed the same way so if model is str, + all items in the list will be parsed as str and npt as JSON (and vice versa) + """ + + def parse(self, data: Dict[str, Any], model: Union[BaseModel, str]) -> List[Union[BaseModel, str]]: + """Parses records found with model provided + + Parameters + ---------- + data : Dict + Lambda event to be parsed + model : BaseModel + Data model provided to parse after extracting data using envelope + + Returns + ------- + List + List of records parsed with model provided + """ + parsed_envelope = SqsModel(**data) + output = [] + for record in parsed_envelope.Records: + output.append(self._parse(record.body, model)) + return output diff --git a/aws_lambda_powertools/utilities/parser/exceptions.py b/aws_lambda_powertools/utilities/parser/exceptions.py new file mode 100644 index 00000000000..93e259df371 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/exceptions.py @@ -0,0 +1,10 @@ +class InvalidEnvelopeError(Exception): + """Input envelope is not callable and instance of BaseEnvelope""" + + +class ModelValidationError(Exception): + """Input data does not conform with model""" + + +class InvalidModelTypeError(Exception): + """Input data model does not implement BaseModel""" diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py new file mode 100644 index 00000000000..e58a678e959 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/__init__.py @@ -0,0 +1,12 @@ +from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel +from .event_bridge import EventBridgeModel +from .sqs import SqsModel, SqsRecordModel + +__all__ = [ + "DynamoDBStreamModel", + "EventBridgeModel", + "DynamoDBStreamChangedRecordModel", + "DynamoDBStreamRecordModel", + "SqsModel", + "SqsRecordModel", +] diff --git a/aws_lambda_powertools/utilities/parser/models/dynamodb.py b/aws_lambda_powertools/utilities/parser/models/dynamodb.py new file mode 100644 index 00000000000..7bcf68845cc --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/dynamodb.py @@ -0,0 +1,46 @@ +from datetime import date +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel +from typing_extensions import Literal + + +class DynamoDBStreamChangedRecordModel(BaseModel): + ApproximateCreationDateTime: Optional[date] + Keys: Dict[str, Dict[str, Any]] + NewImage: Optional[Dict[str, Any]] + OldImage: Optional[Dict[str, Any]] + SequenceNumber: str + SizeBytes: int + StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] + + # context on why it's commented: https://github.com/awslabs/aws-lambda-powertools-python/pull/118 + # since both images are optional, they can both be None. However, at least one must + # exist in a legal model of NEW_AND_OLD_IMAGES type + # @root_validator + # def check_one_image_exists(cls, values): # noqa: E800 + # new_img, old_img = values.get("NewImage"), values.get("OldImage") # noqa: E800 + # stream_type = values.get("StreamViewType") # noqa: E800 + # if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: # noqa: E800 + # raise TypeError("DynamoDB streams model failed validation, missing both new & old stream images") # noqa: E800,E501 + # return values # noqa: E800 + + +class UserIdentity(BaseModel): + type: Literal["Service"] # noqa: VNE003, A003 + principalId: Literal["dynamodb.amazonaws.com"] + + +class DynamoDBStreamRecordModel(BaseModel): + eventID: str + eventName: Literal["INSERT", "MODIFY", "REMOVE"] + eventVersion: float + eventSource: Literal["aws:dynamodb"] + awsRegion: str + eventSourceARN: str + dynamodb: DynamoDBStreamChangedRecordModel + userIdentity: Optional[UserIdentity] + + +class DynamoDBStreamModel(BaseModel): + Records: List[DynamoDBStreamRecordModel] diff --git a/aws_lambda_powertools/utilities/advanced_parser/schemas/event_bridge.py b/aws_lambda_powertools/utilities/parser/models/event_bridge.py similarity index 75% rename from aws_lambda_powertools/utilities/advanced_parser/schemas/event_bridge.py rename to aws_lambda_powertools/utilities/parser/models/event_bridge.py index c5e319ac28e..ab621d5da9f 100644 --- a/aws_lambda_powertools/utilities/advanced_parser/schemas/event_bridge.py +++ b/aws_lambda_powertools/utilities/parser/models/event_bridge.py @@ -4,7 +4,7 @@ from pydantic import BaseModel, Field -class EventBridgeSchema(BaseModel): +class EventBridgeModel(BaseModel): version: str id: str # noqa: A003,VNE003 source: str @@ -12,5 +12,5 @@ class EventBridgeSchema(BaseModel): time: datetime region: str resources: List[str] - detailtype: str = Field(None, alias="detail-type") + detail_type: str = Field(None, alias="detail-type") detail: Dict[str, Any] diff --git a/aws_lambda_powertools/utilities/parser/models/sqs.py b/aws_lambda_powertools/utilities/parser/models/sqs.py new file mode 100644 index 00000000000..fd708020492 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/models/sqs.py @@ -0,0 +1,65 @@ +from datetime import datetime +from typing import Dict, List, Optional + +from pydantic import BaseModel +from typing_extensions import Literal + + +class SqsAttributesModel(BaseModel): + ApproximateReceiveCount: str + ApproximateFirstReceiveTimestamp: datetime + MessageDeduplicationId: Optional[str] + MessageGroupId: Optional[str] + SenderId: str + SentTimestamp: datetime + SequenceNumber: Optional[str] + AWSTraceHeader: Optional[str] + + +class SqsMsgAttributeModel(BaseModel): + stringValue: Optional[str] + binaryValue: Optional[str] + stringListValues: List[str] = [] + binaryListValues: List[str] = [] + dataType: str + + # context on why it's commented: https://github.com/awslabs/aws-lambda-powertools-python/pull/118 + # Amazon SQS supports the logical data types String, Number, and Binary with optional custom data type + # labels with the format .custom-data-type. + # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-message-attributes + # @validator("dataType") + # def valid_type(cls, v): # noqa: VNE001,E800 # noqa: E800 + # pattern = re.compile("Number.*|String.*|Binary.*") # noqa: E800 + # if not pattern.match(v): # noqa: E800 + # raise TypeError("data type is invalid") # noqa: E800 + # return v # noqa: E800 + # + # # validate that dataType and value are not None and match + # @root_validator + # def check_str_and_binary_values(cls, values): # noqa: E800 + # binary_val, str_val = values.get("binaryValue", ""), values.get("stringValue", "") # noqa: E800 + # data_type = values.get("dataType") # noqa: E800 + # if not str_val and not binary_val: # noqa: E800 + # raise TypeError("both binaryValue and stringValue are missing") # noqa: E800 + # if data_type.startswith("Binary") and not binary_val: # noqa: E800 + # raise TypeError("binaryValue is missing") # noqa: E800 + # if (data_type.startswith("String") or data_type.startswith("Number")) and not str_val: # noqa: E800 + # raise TypeError("stringValue is missing") # noqa: E800 + # return values # noqa: E800 + + +class SqsRecordModel(BaseModel): + messageId: str + receiptHandle: str + body: str + attributes: SqsAttributesModel + messageAttributes: Dict[str, SqsMsgAttributeModel] + md5OfBody: str + md5OfMessageAttributes: Optional[str] + eventSource: Literal["aws:sqs"] + eventSourceARN: str + awsRegion: str + + +class SqsModel(BaseModel): + Records: List[SqsRecordModel] diff --git a/aws_lambda_powertools/utilities/parser/parser.py b/aws_lambda_powertools/utilities/parser/parser.py new file mode 100644 index 00000000000..a58ee90f4e9 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/parser.py @@ -0,0 +1,156 @@ +import logging +from typing import Any, Callable, Dict, Optional + +from pydantic import BaseModel, ValidationError + +from ...middleware_factory import lambda_handler_decorator +from ..typing import LambdaContext +from .envelopes.base import BaseEnvelope +from .exceptions import InvalidEnvelopeError, InvalidModelTypeError, ModelValidationError + +logger = logging.getLogger(__name__) + + +@lambda_handler_decorator +def event_parser( + handler: Callable[[Dict, Any], Any], + event: Dict[str, Any], + context: LambdaContext, + model: BaseModel, + envelope: Optional[BaseEnvelope] = None, +) -> Any: + """Lambda handler decorator to parse & validate events using Pydantic models + + It requires a model that implements Pydantic BaseModel to parse & validate the event. + + When an envelope is given, it'll use the following logic: + + 1. Parse the event against the envelope model first e.g. EnvelopeModel(**event) + 2. Envelope will extract a given key to be parsed against the model e.g. event.detail + + This is useful when you need to confirm event wrapper structure, and + b) selectively extract a portion of your payload for parsing & validation. + + NOTE: If envelope is omitted, the complete event is parsed to match the model parameter BaseModel definition. + + Example + ------- + **Lambda handler decorator to parse & validate event** + + class Order(BaseModel): + id: int + description: str + ... + + @event_parser(model=Order) + def handler(event: Order, context: LambdaContext): + ... + + **Lambda handler decorator to parse & validate event - using built-in envelope** + + class Order(BaseModel): + id: int + description: str + ... + + @event_parser(model=Order, envelope=envelopes.EVENTBRIDGE) + def handler(event: Order, context: LambdaContext): + ... + + Parameters + ---------- + handler: Callable + Method to annotate on + event: Dict + Lambda event to be parsed & validated + context: LambdaContext + Lambda context object + model: BaseModel + Your data model that will replace the event. + envelope: BaseEnvelope + Optional envelope to extract the model from + + Raises + ------ + ModelValidationError + When input event does not conform with model provided + InvalidModelTypeError + When model given does not implement BaseModel + InvalidEnvelopeError + When envelope given does not implement BaseEnvelope + """ + parsed_event = parse(event=event, model=model, envelope=envelope) + logger.debug(f"Calling handler {handler.__name__}") + return handler(parsed_event, context) + + +def parse(event: Dict[str, Any], model: BaseModel, envelope: Optional[BaseEnvelope] = None) -> Any: + """Standalone function to parse & validate events using Pydantic models + + Typically used when you need fine-grained control over error handling compared to event_parser decorator. + + Example + ------- + + **Lambda handler decorator to parse & validate event** + + from aws_lambda_powertools.utilities.parser.exceptions import ModelValidationError + + class Order(BaseModel): + id: int + description: str + ... + + def handler(event: Order, context: LambdaContext): + try: + parse(model=Order) + except ModelValidationError: + ... + + **Lambda handler decorator to parse & validate event - using built-in envelope** + + class Order(BaseModel): + id: int + description: str + ... + + def handler(event: Order, context: LambdaContext): + try: + parse(model=Order, envelope=envelopes.EVENTBRIDGE) + except ModelValidationError: + ... + + Parameters + ---------- + event: Dict + Lambda event to be parsed & validated + model: BaseModel + Your data model that will replace the event + envelope: BaseEnvelope + Optional envelope to extract the model from + + Raises + ------ + ModelValidationError + When input event does not conform with model provided + InvalidModelTypeError + When model given does not implement BaseModel + InvalidEnvelopeError + When envelope given does not implement BaseEnvelope + """ + if envelope and callable(envelope): + try: + logger.debug(f"Parsing and validating event model with envelope={envelope}") + return envelope().parse(data=event, model=model) + except AttributeError: + raise InvalidEnvelopeError(f"Envelope must implement BaseEnvelope, envelope={envelope}") + except (ValidationError, TypeError) as e: + raise ModelValidationError(f"Input event does not conform with model, envelope={envelope}") from e + + try: + logger.debug("Parsing and validating event model; no envelope used") + return model.parse_obj(event) + except (ValidationError, TypeError) as e: + raise ModelValidationError("Input event does not conform with model") from e + except AttributeError: + raise InvalidModelTypeError("Input model must implement BaseModel") diff --git a/aws_lambda_powertools/utilities/parser/pydantic.py b/aws_lambda_powertools/utilities/parser/pydantic.py new file mode 100644 index 00000000000..d2551928979 --- /dev/null +++ b/aws_lambda_powertools/utilities/parser/pydantic.py @@ -0,0 +1,8 @@ +# Pydantic has many utilities that some advanced customers typically use. +# Importing what's currently in the docs would likely miss something. +# As Pydantic export new types, new utilities, we will have to keep up +# with a project that's not used in our core functionalities. +# For this reason, we're relying on Pydantic's __all__ attr to allow customers +# to use `from aws_lambda_powertools.utilities.parser.pydantic import ` + +from pydantic import * # noqa: F403,F401 diff --git a/poetry.lock b/poetry.lock index 94bf8b20b7c..b255194ddc5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,41 +1,40 @@ [[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = "*" -version = "1.4.4" [[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "dev" -description = "Classes Without Boilerplate" name = "attrs" +version = "20.2.0" +description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.2.0" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -category = "main" -description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." name = "aws-xray-sdk" +version = "2.6.0" +description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +category = "main" optional = false python-versions = "*" -version = "2.6.0" [package.dependencies] botocore = ">=1.11.3" @@ -44,27 +43,27 @@ jsonpickle = "*" wrapt = "*" [[package]] -category = "dev" -description = "Security oriented static analyser for python code." name = "bandit" +version = "1.6.2" +description = "Security oriented static analyser for python code." +category = "dev" optional = false python-versions = "*" -version = "1.6.2" [package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" PyYAML = ">=3.13" -colorama = ">=0.3.9" six = ">=1.10.0" stevedore = ">=1.20.0" [[package]] -category = "dev" -description = "The uncompromising code formatter." name = "black" +version = "19.10b0" +description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.6" -version = "19.10b0" [package.dependencies] appdirs = "*" @@ -79,159 +78,149 @@ typed-ast = ">=1.4.0" d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -category = "main" -description = "The AWS SDK for Python" name = "boto3" +version = "1.15.16" +description = "The AWS SDK for Python" +category = "main" optional = false python-versions = "*" -version = "1.15.5" [package.dependencies] -botocore = ">=1.18.5,<1.19.0" +botocore = ">=1.18.16,<1.19.0" jmespath = ">=0.7.1,<1.0.0" s3transfer = ">=0.3.0,<0.4.0" [[package]] -category = "main" -description = "Low-level, data-driven core of boto 3." name = "botocore" +version = "1.18.16" +description = "Low-level, data-driven core of boto 3." +category = "main" optional = false python-versions = "*" -version = "1.18.5" [package.dependencies] jmespath = ">=0.7.1,<1.0.0" python-dateutil = ">=2.1,<3.0.0" - -[package.dependencies.urllib3] -python = "<3.4.0 || >=3.5.0" -version = ">=1.20,<1.26" +urllib3 = {version = ">=1.20,<1.26", markers = "python_version != \"3.4\""} [[package]] -category = "dev" -description = "Python package for providing Mozilla's CA Bundle." name = "certifi" +version = "2020.6.20" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" optional = false python-versions = "*" -version = "2020.6.20" [[package]] -category = "dev" -description = "Universal encoding detector for Python 2 and 3" name = "chardet" +version = "3.0.4" +description = "Universal encoding detector for Python 2 and 3" +category = "dev" optional = false python-versions = "*" -version = "3.0.4" [[package]] -category = "dev" -description = "Composable command line interface toolkit" name = "click" +version = "7.1.2" +description = "Composable command line interface toolkit" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "7.1.2" [[package]] -category = "dev" -description = "Cross-platform colored terminal text." -marker = "sys_platform == \"win32\" or platform_system == \"Windows\" or python_version > \"3.4\"" name = "colorama" +version = "0.4.3" +description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.3" [[package]] -category = "dev" -description = "Code coverage measurement for Python" name = "coverage" +version = "5.3" +description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.3" [package.dependencies] -[package.dependencies.toml] -optional = true -version = "*" +toml = {version = "*", optional = true, markers = "extra == \"toml\""} [package.extras] toml = ["toml"] [[package]] -category = "main" -description = "A backport of the dataclasses module for Python 3.6" -marker = "python_version < \"3.7\"" name = "dataclasses" +version = "0.7" +description = "A backport of the dataclasses module for Python 3.6" +category = "main" optional = true -python-versions = "*" -version = "0.6" +python-versions = ">=3.6, <3.7" [[package]] -category = "dev" -description = "Removes commented-out code." name = "eradicate" +version = "1.0" +description = "Removes commented-out code." +category = "dev" optional = false python-versions = "*" -version = "1.0" [[package]] -category = "main" -description = "Fastest Python implementation of JSON schema" name = "fastjsonschema" +version = "2.14.5" +description = "Fastest Python implementation of JSON schema" +category = "main" optional = false python-versions = "*" -version = "2.14.5" [package.extras] devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] -category = "dev" -description = "the modular source code checker: pep8 pyflakes and co" name = "flake8" +version = "3.8.4" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "3.8.3" [package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.6.0a1,<2.7.0" pyflakes = ">=2.2.0,<2.3.0" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" - [[package]] -category = "dev" -description = "flake8 plugin to call black as a code style validator" name = "flake8-black" +version = "0.1.2" +description = "flake8 plugin to call black as a code style validator" +category = "dev" optional = false python-versions = "*" -version = "0.1.2" [package.dependencies] black = ">=19.3b0" flake8 = ">=3.0.0" [[package]] -category = "dev" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." name = "flake8-bugbear" +version = "20.1.4" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" optional = false python-versions = ">=3.6" -version = "20.1.4" [package.dependencies] attrs = ">=19.2.0" flake8 = ">=3.0.0" [[package]] -category = "dev" -description = "Check for python builtins being used as variables or parameters." name = "flake8-builtins" +version = "1.5.3" +description = "Check for python builtins being used as variables or parameters." +category = "dev" optional = false python-versions = "*" -version = "1.5.3" [package.dependencies] flake8 = "*" @@ -240,39 +229,36 @@ flake8 = "*" test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] [[package]] -category = "dev" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." name = "flake8-comprehensions" +version = "3.2.3" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +category = "dev" optional = false python-versions = ">=3.5" -version = "3.2.3" [package.dependencies] flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" - -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] -category = "dev" -description = "ipdb/pdb statement checker plugin for flake8" name = "flake8-debugger" +version = "3.2.1" +description = "ipdb/pdb statement checker plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "3.2.1" [package.dependencies] flake8 = ">=1.5" pycodestyle = "*" [[package]] -category = "dev" -description = "Flake8 plugin to find commented out code" name = "flake8-eradicate" +version = "0.3.0" +description = "Flake8 plugin to find commented out code" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.3.0" [package.dependencies] attrs = "*" @@ -280,99 +266,93 @@ eradicate = ">=1.0,<2.0" flake8 = ">=3.5,<4.0" [[package]] -category = "dev" -description = "Check for FIXME, TODO and other temporary developer notes. Plugin for flake8." name = "flake8-fixme" +version = "1.1.1" +description = "Check for FIXME, TODO and other temporary developer notes. Plugin for flake8." +category = "dev" optional = false python-versions = "*" -version = "1.1.1" [[package]] -category = "dev" -description = "flake8 plugin that integrates isort ." name = "flake8-isort" +version = "2.9.1" +description = "flake8 plugin that integrates isort ." +category = "dev" optional = false python-versions = "*" -version = "2.9.1" [package.dependencies] flake8 = ">=3.2.1" +isort = {version = ">=4.3.5", extras = ["pyproject"]} testfixtures = "*" -[package.dependencies.isort] -extras = ["pyproject"] -version = ">=4.3.5" - [package.extras] test = ["pytest"] [[package]] -category = "dev" -description = "Polyfill package for Flake8 plugins" name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" optional = false python-versions = "*" -version = "1.0.2" [package.dependencies] flake8 = "*" [[package]] -category = "dev" -description = "A flake8 extension that helps to make more readable variables names" name = "flake8-variables-names" +version = "0.0.3" +description = "A flake8 extension that helps to make more readable variables names" +category = "dev" optional = false python-versions = "*" -version = "0.0.3" - -[package.dependencies] -setuptools = "*" [[package]] -category = "main" -description = "Clean single-source support for Python 3 and 2" name = "future" +version = "0.18.2" +description = "Clean single-source support for Python 3 and 2" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "0.18.2" [[package]] -category = "dev" -description = "Git Object Database" name = "gitdb" +version = "4.0.5" +description = "Git Object Database" +category = "dev" optional = false python-versions = ">=3.4" -version = "4.0.5" [package.dependencies] smmap = ">=3.0.1,<4" [[package]] -category = "dev" -description = "Python Git Library" name = "gitpython" +version = "3.1.9" +description = "Python Git Library" +category = "dev" optional = false python-versions = ">=3.4" -version = "3.1.8" [package.dependencies] gitdb = ">=4.0.1,<5" [[package]] -category = "dev" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" +version = "2.10" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.10" [[package]] -category = "main" -description = "Read metadata from Python packages" name = "importlib-metadata" +version = "2.0.0" +description = "Read metadata from Python packages" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "2.0.0" [package.dependencies] zipp = ">=0.5" @@ -382,12 +362,15 @@ docs = ["sphinx", "rst.linker"] testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] [[package]] -category = "dev" -description = "A Python utility / library to sort Python imports." name = "isort" +version = "4.3.21" +description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "4.3.21" + +[package.dependencies] +toml = {version = "*", optional = true, markers = "extra == \"pyproject\""} [package.extras] pipfile = ["pipreqs", "requirementslib"] @@ -396,20 +379,20 @@ requirements = ["pipreqs", "pip-api"] xdg_home = ["appdirs (>=1.4.0)"] [[package]] -category = "main" -description = "JSON Matching Expressions" name = "jmespath" +version = "0.10.0" +description = "JSON Matching Expressions" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "0.10.0" [[package]] -category = "main" -description = "Python library for serializing any arbitrary object graph into JSON" name = "jsonpickle" +version = "1.4.1" +description = "Python library for serializing any arbitrary object graph into JSON" +category = "main" optional = false python-versions = ">=2.7" -version = "1.4.1" [package.dependencies] importlib-metadata = "*" @@ -420,12 +403,12 @@ testing = ["coverage (<5)", "pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs "testing.libs" = ["demjson", "simplejson", "ujson", "yajl"] [[package]] -category = "dev" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." name = "mako" +version = "1.1.3" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.1.3" [package.dependencies] MarkupSafe = ">=0.9.2" @@ -435,12 +418,12 @@ babel = ["babel"] lingua = ["lingua"] [[package]] -category = "dev" -description = "Create Python CLI apps with little to no effort at all!" name = "mando" +version = "0.6.4" +description = "Create Python CLI apps with little to no effort at all!" +category = "dev" optional = false python-versions = "*" -version = "0.6.4" [package.dependencies] six = "*" @@ -449,129 +432,123 @@ six = "*" restructuredText = ["rst2ansi"] [[package]] -category = "dev" -description = "Python implementation of Markdown." name = "markdown" +version = "3.3" +description = "Python implementation of Markdown." +category = "dev" optional = false -python-versions = ">=3.5" -version = "3.2.2" +python-versions = ">=3.6" [package.dependencies] -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [package.extras] testing = ["coverage", "pyyaml"] [[package]] -category = "dev" -description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.1" [[package]] -category = "dev" -description = "McCabe checker, plugin for flake8" name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.6.1" [[package]] -category = "dev" -description = "More routines for operating on iterables, beyond itertools" name = "more-itertools" +version = "8.5.0" +description = "More routines for operating on iterables, beyond itertools" +category = "dev" optional = false python-versions = ">=3.5" -version = "8.5.0" [[package]] -category = "dev" -description = "Core utilities for Python packages" name = "packaging" +version = "20.4" +description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" six = "*" [[package]] -category = "dev" -description = "Utility library for gitignore style pattern matching of file paths." name = "pathspec" +version = "0.8.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.8.0" [[package]] -category = "dev" -description = "Python Build Reasonableness" name = "pbr" +version = "5.5.0" +description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" -version = "5.5.0" [[package]] -category = "dev" -description = "Auto-generate API documentation for Python projects." name = "pdoc3" +version = "0.7.5" +description = "Auto-generate API documentation for Python projects." +category = "dev" optional = false python-versions = ">= 3.5" -version = "0.7.5" [package.dependencies] mako = "*" markdown = ">=3.0" [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" [package.dependencies] -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] [[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" +version = "1.9.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.9.0" [[package]] -category = "dev" -description = "Python style guide checker" name = "pycodestyle" +version = "2.6.0" +description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.6.0" [[package]] -category = "main" -description = "Data validation and settings management using python 3.6 type hinting" name = "pydantic" +version = "1.6.1" +description = "Data validation and settings management using python 3.6 type hinting" +category = "main" optional = true python-versions = ">=3.6" -version = "1.6.1" [package.dependencies] -[package.dependencies.dataclasses] -python = "<3.7" -version = ">=0.6" +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -579,54 +556,51 @@ email = ["email-validator (>=1.0.3)"] typing_extensions = ["typing-extensions (>=3.7.2)"] [[package]] -category = "dev" -description = "passive checker of Python programs" name = "pyflakes" +version = "2.2.0" +description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.2.0" [[package]] -category = "dev" -description = "Python parsing module" name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "5.4.3" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "5.4.3" [package.dependencies] -atomicwrites = ">=1.0" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} more-itertools = ">=4.0.0" packaging = "*" pluggy = ">=0.12,<1.0" py = ">=1.5.0" wcwidth = "*" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" - [package.extras] checkqa-mypy = ["mypy (v0.761)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -category = "dev" -description = "Pytest support for asyncio." name = "pytest-asyncio" +version = "0.12.0" +description = "Pytest support for asyncio." +category = "dev" optional = false python-versions = ">= 3.5" -version = "0.12.0" [package.dependencies] pytest = ">=5.4.0" @@ -635,12 +609,12 @@ pytest = ">=5.4.0" testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] [[package]] -category = "dev" -description = "Pytest plugin for measuring coverage." name = "pytest-cov" +version = "2.10.1" +description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.10.1" [package.dependencies] coverage = ">=4.4" @@ -650,12 +624,12 @@ pytest = ">=4.6" testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] [[package]] -category = "dev" -description = "Thin-wrapper around the mock package for easier use with py.test" name = "pytest-mock" +version = "2.0.0" +description = "Thin-wrapper around the mock package for easier use with py.test" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.0.0" [package.dependencies] pytest = ">=2.7" @@ -664,58 +638,56 @@ pytest = ">=2.7" dev = ["pre-commit", "tox"] [[package]] -category = "main" -description = "Extensions to the standard Python datetime module" name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -version = "2.8.1" [package.dependencies] six = ">=1.5" [[package]] -category = "dev" -description = "YAML parser and emitter for Python" name = "pyyaml" +version = "5.3.1" +description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "5.3.1" [[package]] -category = "dev" -description = "Code Metrics in Python" name = "radon" +version = "4.3.2" +description = "Code Metrics in Python" +category = "dev" optional = false python-versions = "*" -version = "4.3.2" [package.dependencies] +colorama = {version = ">=0.4.1", markers = "python_version > \"3.4\""} +flake8-polyfill = {version = "*", optional = true, markers = "extra == \"flake8\""} future = "*" mando = ">=0.6,<0.7" -[package.dependencies.colorama] -python = ">=3.5" -version = ">=0.4.1" - [package.extras] flake8 = ["flake8-polyfill"] [[package]] -category = "dev" -description = "Alternative regular expression module, to replace re." name = "regex" +version = "2020.10.11" +description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = "*" -version = "2020.7.14" [[package]] -category = "dev" -description = "Python HTTP for Humans." name = "requests" +version = "2.24.0" +description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" @@ -728,54 +700,51 @@ security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] [[package]] -category = "main" -description = "An Amazon S3 Transfer Manager" name = "s3transfer" +version = "0.3.3" +description = "An Amazon S3 Transfer Manager" +category = "main" optional = false python-versions = "*" -version = "0.3.3" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" [[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.15.0" [[package]] -category = "dev" -description = "A pure Python implementation of a sliding window memory map manager" name = "smmap" +version = "3.0.4" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "3.0.4" [[package]] -category = "dev" -description = "Manage dynamic plugins for Python applications" name = "stevedore" +version = "3.2.2" +description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.2.2" [package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} pbr = ">=2.0.0,<2.1.0 || >2.1.0" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=1.7.0" - [[package]] -category = "dev" -description = "A collection of helpers and mock objects for unit tests and doc tests." name = "testfixtures" +version = "6.15.0" +description = "A collection of helpers and mock objects for unit tests and doc tests." +category = "dev" optional = false python-versions = "*" -version = "6.14.2" [package.extras] build = ["setuptools-git", "wheel", "twine"] @@ -783,36 +752,36 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", " test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] [[package]] -category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" name = "toml" +version = "0.10.1" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = "*" -version = "0.10.1" [[package]] -category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" name = "typed-ast" +version = "1.4.1" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = "*" -version = "1.4.1" [[package]] -category = "main" -description = "Backported and Experimental Type Hints for Python 3.5+" name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "main" optional = true python-versions = "*" -version = "3.7.4.3" [[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." name = "urllib3" +version = "1.25.10" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] @@ -820,45 +789,41 @@ secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0 socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] [[package]] -category = "dev" -description = "Measures the displayed width of unicode strings in a terminal" name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" -version = "0.2.5" [[package]] -category = "main" -description = "Module for decorators, wrappers and monkey patching." name = "wrapt" +version = "1.12.1" +description = "Module for decorators, wrappers and monkey patching." +category = "main" optional = false python-versions = "*" -version = "1.12.1" [[package]] -category = "dev" -description = "Monitor code metrics for Python on your CI server" name = "xenon" +version = "0.7.1" +description = "Monitor code metrics for Python on your CI server" +category = "dev" optional = false python-versions = "*" -version = "0.7.1" [package.dependencies] PyYAML = ">=4.2b1,<6.0" +radon = {version = ">=4,<5", extras = ["flake8"]} requests = ">=2.0,<3.0" -[package.dependencies.radon] -extras = ["flake8"] -version = ">=4,<5" - [[package]] -category = "main" -description = "Backport of pathlib-compatible object wrapper for zip files" -marker = "python_version < \"3.8\"" name = "zipp" +version = "3.3.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.6" -version = "3.2.0" [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] @@ -868,9 +833,9 @@ testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pyt pydantic = ["pydantic", "typing_extensions"] [metadata] -content-hash = "f2207b4e243108a8b2b2eee5a56f648519d2ce8cb893f4e3c8fb346a44374eaa" -lock-version = "1.0" +lock-version = "1.1" python-versions = "^3.6" +content-hash = "e18b9f99b7876adb78623fd8b2acb9a6f76a5e427c30d0c9ec7ebb5786bc4a52" [metadata.files] appdirs = [ @@ -898,12 +863,12 @@ black = [ {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, ] boto3 = [ - {file = "boto3-1.15.5-py2.py3-none-any.whl", hash = "sha256:0c464a7de522f88b581ca0d41ffa71e9be5e17fbb0456c275421f65b7c5f6a55"}, - {file = "boto3-1.15.5.tar.gz", hash = "sha256:0fce548e19d6db8e11fd0e2ae7809e1e3282080636b4062b2452bfa20e4f0233"}, + {file = "boto3-1.15.16-py2.py3-none-any.whl", hash = "sha256:557320fe8b65cfc85953e6a63d2328e8efec95bf4ec383b92fa2d01119209716"}, + {file = "boto3-1.15.16.tar.gz", hash = "sha256:454a8dfb7b367a058c7967ef6b4e2a192c318f10761769fd1003cf7f2f5a7db9"}, ] botocore = [ - {file = "botocore-1.18.5-py2.py3-none-any.whl", hash = "sha256:e3bf44fba058f6df16006b94a67650418a080a525c82521abb3cb516a4cba362"}, - {file = "botocore-1.18.5.tar.gz", hash = "sha256:7ce7a05b98ffb3170396960273383e8aade9be6026d5a762f5f40969d5d6b761"}, + {file = "botocore-1.18.16-py2.py3-none-any.whl", hash = "sha256:e586e4d6eddbca31e6447a25df9972329ea3de64b1fb0eb17e7ab0c9b91f7720"}, + {file = "botocore-1.18.16.tar.gz", hash = "sha256:f0616d2c719691b94470307cee8adf89ceb1657b7b6f9aa1bf61f9de5543dbbb"}, ] certifi = [ {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"}, @@ -958,8 +923,8 @@ coverage = [ {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, ] dataclasses = [ - {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, - {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, + {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, + {file = "dataclasses-0.7.tar.gz", hash = "sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6"}, ] eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, @@ -969,8 +934,8 @@ fastjsonschema = [ {file = "fastjsonschema-2.14.5.tar.gz", hash = "sha256:afbc235655f06356e46caa80190512e4d9222abfaca856041be5a74c665fa094"}, ] flake8 = [ - {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, - {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"}, + {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, + {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, ] flake8-black = [ {file = "flake8-black-0.1.2.tar.gz", hash = "sha256:b79d8d868bd42dc2c1f27469b92a984ecab3579ad285a8708ea5f19bf6c1f3a2"}, @@ -1017,8 +982,8 @@ gitdb = [ {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.8-py3-none-any.whl", hash = "sha256:1858f4fd089abe92ae465f01d5aaaf55e937eca565fb2c1fce35a51b5f85c910"}, - {file = "GitPython-3.1.8.tar.gz", hash = "sha256:080bf8e2cf1a2b907634761c2eaefbe83b69930c94c66ad11b65a8252959f912"}, + {file = "GitPython-3.1.9-py3-none-any.whl", hash = "sha256:138016d519bf4dd55b22c682c904ed2fd0235c3612b2f8f65ce218ff358deed8"}, + {file = "GitPython-3.1.9.tar.gz", hash = "sha256:a03f728b49ce9597a6655793207c6ab0da55519368ff5961e4a74ae475b9fa8e"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -1049,8 +1014,8 @@ mando = [ {file = "mando-0.6.4.tar.gz", hash = "sha256:79feb19dc0f097daa64a1243db578e7674909b75f88ac2220f1c065c10a0d960"}, ] markdown = [ - {file = "Markdown-3.2.2-py3-none-any.whl", hash = "sha256:c467cd6233885534bf0fe96e62e3cf46cfc1605112356c4f9981512b8174de59"}, - {file = "Markdown-3.2.2.tar.gz", hash = "sha256:1fafe3f1ecabfb514a5285fca634a53c1b32a81cb0feb154264d55bf2ff22c17"}, + {file = "Markdown-3.3-py3-none-any.whl", hash = "sha256:fbb1ba54ca41e8991dc5a561d9c6f752f5e4546f8750e56413ea50f2385761d3"}, + {file = "Markdown-3.3.tar.gz", hash = "sha256:4f4172a4e989b97f96860fa434b89895069c576e2b537c4b4eed265266a7affc"}, ] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, @@ -1186,27 +1151,33 @@ radon = [ {file = "radon-4.3.2.tar.gz", hash = "sha256:758b3ab345aa86e95f642713612a57da7c7da6d552c4dbfbe397a67601ace7dd"}, ] regex = [ - {file = "regex-2020.7.14-cp27-cp27m-win32.whl", hash = "sha256:e46d13f38cfcbb79bfdb2964b0fe12561fe633caf964a77a5f8d4e45fe5d2ef7"}, - {file = "regex-2020.7.14-cp27-cp27m-win_amd64.whl", hash = "sha256:6961548bba529cac7c07af2fd4d527c5b91bb8fe18995fed6044ac22b3d14644"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c50a724d136ec10d920661f1442e4a8b010a4fe5aebd65e0c2241ea41dbe93dc"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8a51f2c6d1f884e98846a0a9021ff6861bdb98457879f412fdc2b42d14494067"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9c568495e35599625f7b999774e29e8d6b01a6fb684d77dee1f56d41b11b40cd"}, - {file = "regex-2020.7.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:51178c738d559a2d1071ce0b0f56e57eb315bcf8f7d4cf127674b533e3101f88"}, - {file = "regex-2020.7.14-cp36-cp36m-win32.whl", hash = "sha256:9eddaafb3c48e0900690c1727fba226c4804b8e6127ea409689c3bb492d06de4"}, - {file = "regex-2020.7.14-cp36-cp36m-win_amd64.whl", hash = "sha256:14a53646369157baa0499513f96091eb70382eb50b2c82393d17d7ec81b7b85f"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1269fef3167bb52631ad4fa7dd27bf635d5a0790b8e6222065d42e91bede4162"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d0a5095d52b90ff38592bbdc2644f17c6d495762edf47d876049cfd2968fbccf"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c037fd14c5f4e308b8370b447b469ca10e69427966527edcab07f52d88388f7"}, - {file = "regex-2020.7.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bc3d98f621898b4a9bc7fecc00513eec8f40b5b83913d74ccb445f037d58cd89"}, - {file = "regex-2020.7.14-cp37-cp37m-win32.whl", hash = "sha256:46bac5ca10fb748d6c55843a931855e2727a7a22584f302dd9bb1506e69f83f6"}, - {file = "regex-2020.7.14-cp37-cp37m-win_amd64.whl", hash = "sha256:0dc64ee3f33cd7899f79a8d788abfbec168410be356ed9bd30bbd3f0a23a7204"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5ea81ea3dbd6767873c611687141ec7b06ed8bab43f68fad5b7be184a920dc99"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bbb332d45b32df41200380fff14712cb6093b61bd142272a10b16778c418e98e"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c11d6033115dc4887c456565303f540c44197f4fc1a2bfb192224a301534888e"}, - {file = "regex-2020.7.14-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:75aaa27aa521a182824d89e5ab0a1d16ca207318a6b65042b046053cfc8ed07a"}, - {file = "regex-2020.7.14-cp38-cp38-win32.whl", hash = "sha256:d6cff2276e502b86a25fd10c2a96973fdb45c7a977dca2138d661417f3728341"}, - {file = "regex-2020.7.14-cp38-cp38-win_amd64.whl", hash = "sha256:7a2dd66d2d4df34fa82c9dc85657c5e019b87932019947faece7983f2089a840"}, - {file = "regex-2020.7.14.tar.gz", hash = "sha256:3a3af27a8d23143c49a3420efe5b3f8cf1a48c6fc8bc6856b03f638abc1833bb"}, + {file = "regex-2020.10.11-cp27-cp27m-win32.whl", hash = "sha256:4f5c0fe46fb79a7adf766b365cae56cafbf352c27358fda811e4a1dc8216d0db"}, + {file = "regex-2020.10.11-cp27-cp27m-win_amd64.whl", hash = "sha256:39a5ef30bca911f5a8a3d4476f5713ed4d66e313d9fb6755b32bec8a2e519635"}, + {file = "regex-2020.10.11-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:7c4fc5a8ec91a2254bb459db27dbd9e16bba1dabff638f425d736888d34aaefa"}, + {file = "regex-2020.10.11-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d537e270b3e6bfaea4f49eaf267984bfb3628c86670e9ad2a257358d3b8f0955"}, + {file = "regex-2020.10.11-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a8240df4957a5b0e641998a5d78b3c4ea762c845d8cb8997bf820626826fde9a"}, + {file = "regex-2020.10.11-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4302153abb96859beb2c778cc4662607a34175065fc2f33a21f49eb3fbd1ccd3"}, + {file = "regex-2020.10.11-cp36-cp36m-win32.whl", hash = "sha256:c077c9d04a040dba001cf62b3aff08fd85be86bccf2c51a770c77377662a2d55"}, + {file = "regex-2020.10.11-cp36-cp36m-win_amd64.whl", hash = "sha256:46ab6070b0d2cb85700b8863b3f5504c7f75d8af44289e9562195fe02a8dd72d"}, + {file = "regex-2020.10.11-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:d629d750ebe75a88184db98f759633b0a7772c2e6f4da529f0027b4a402c0e2f"}, + {file = "regex-2020.10.11-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8e7ef296b84d44425760fe813cabd7afbb48c8dd62023018b338bbd9d7d6f2f0"}, + {file = "regex-2020.10.11-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:e490f08897cb44e54bddf5c6e27deca9b58c4076849f32aaa7a0b9f1730f2c20"}, + {file = "regex-2020.10.11-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:850339226aa4fec04916386577674bb9d69abe0048f5d1a99f91b0004bfdcc01"}, + {file = "regex-2020.10.11-cp37-cp37m-win32.whl", hash = "sha256:60c4f64d9a326fe48e8738c3dbc068e1edc41ff7895a9e3723840deec4bc1c28"}, + {file = "regex-2020.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:8ba3efdd60bfee1aa784dbcea175eb442d059b576934c9d099e381e5a9f48930"}, + {file = "regex-2020.10.11-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2308491b3e6c530a3bb38a8a4bb1dc5fd32cbf1e11ca623f2172ba17a81acef1"}, + {file = "regex-2020.10.11-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b8806649983a1c78874ec7e04393ef076805740f6319e87a56f91f1767960212"}, + {file = "regex-2020.10.11-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a2a31ee8a354fa3036d12804730e1e20d58bc4e250365ead34b9c30bbe9908c3"}, + {file = "regex-2020.10.11-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d9d53518eeed12190744d366ec4a3f39b99d7daa705abca95f87dd8b442df4ad"}, + {file = "regex-2020.10.11-cp38-cp38-win32.whl", hash = "sha256:3d5a8d007116021cf65355ada47bf405656c4b3b9a988493d26688275fde1f1c"}, + {file = "regex-2020.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:f579caecbbca291b0fcc7d473664c8c08635da2f9b1567c22ea32311c86ef68c"}, + {file = "regex-2020.10.11-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8c8c42aa5d3ac9a49829c4b28a81bebfa0378996f9e0ca5b5ab8a36870c3e5ee"}, + {file = "regex-2020.10.11-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c529ba90c1775697a65b46c83d47a2d3de70f24d96da5d41d05a761c73b063af"}, + {file = "regex-2020.10.11-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:6cf527ec2f3565248408b61dd36e380d799c2a1047eab04e13a2b0c15dd9c767"}, + {file = "regex-2020.10.11-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:671c51d352cfb146e48baee82b1ee8d6ffe357c292f5e13300cdc5c00867ebfc"}, + {file = "regex-2020.10.11-cp39-cp39-win32.whl", hash = "sha256:a63907332531a499b8cdfd18953febb5a4c525e9e7ca4ac147423b917244b260"}, + {file = "regex-2020.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1a16afbfadaadc1397353f9b32e19a65dc1d1804c80ad73a14f435348ca017ad"}, + {file = "regex-2020.10.11.tar.gz", hash = "sha256:463e770c48da76a8da82b8d4a48a541f314e0df91cbb6d873a341dbe578efafd"}, ] requests = [ {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, @@ -1229,8 +1200,8 @@ stevedore = [ {file = "stevedore-3.2.2.tar.gz", hash = "sha256:f845868b3a3a77a2489d226568abe7328b5c2d4f6a011cc759dfa99144a521f0"}, ] testfixtures = [ - {file = "testfixtures-6.14.2-py2.py3-none-any.whl", hash = "sha256:816557888877f498081c1b5c572049b4a2ddffedb77401308ff4cdc1bb9147b7"}, - {file = "testfixtures-6.14.2.tar.gz", hash = "sha256:14d9907390f5f9c7189b3d511b64f34f1072d07cc13b604a57e1bb79029376e3"}, + {file = "testfixtures-6.15.0-py2.py3-none-any.whl", hash = "sha256:e17f4f526fc90b0ac9bc7f8ca62b7dec17d9faf3d721f56bda4f0fd94d02f85a"}, + {file = "testfixtures-6.15.0.tar.gz", hash = "sha256:409f77cfbdad822d12a8ce5c4aa8fb4d0bb38073f4a5444fede3702716a2cec2"}, ] toml = [ {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, @@ -1280,6 +1251,6 @@ xenon = [ {file = "xenon-0.7.1.tar.gz", hash = "sha256:38bf283135f0636355ecf6054b6f37226af12faab152161bda1a4f9e4dc5b701"}, ] zipp = [ - {file = "zipp-3.2.0-py3-none-any.whl", hash = "sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6"}, - {file = "zipp-3.2.0.tar.gz", hash = "sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f"}, + {file = "zipp-3.3.0-py3-none-any.whl", hash = "sha256:eed8ec0b8d1416b2ca33516a37a08892442f3954dee131e92cfd92d8fe3e7066"}, + {file = "zipp-3.3.0.tar.gz", hash = "sha256:64ad89efee774d1897a58607895d80789c59778ea02185dd846ac38394a8642b"}, ] diff --git a/pyproject.toml b/pyproject.toml index 61bc92ab26a..e18cfe3e211 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,6 @@ flake8-debugger = "^3.2.1" flake8-fixme = "^1.1.1" flake8-isort = "^2.8.0" flake8-variables-names = "^0.0.3" -flake8_polyfill = "^1.0.2" isort = "^4.3.21" pytest-cov = "^2.8.1" pytest-mock = "^2.0.0" diff --git a/tests/functional/parser/conftest.py b/tests/functional/parser/conftest.py new file mode 100644 index 00000000000..27fd4b2d1f6 --- /dev/null +++ b/tests/functional/parser/conftest.py @@ -0,0 +1,46 @@ +from typing import Any, Dict + +import pytest +from pydantic import BaseModel, ValidationError + +from aws_lambda_powertools.utilities.parser import BaseEnvelope, ModelValidationError + + +@pytest.fixture +def dummy_event(): + return {"payload": {"message": "hello world"}} + + +@pytest.fixture +def dummy_schema(): + """Wanted payload structure""" + + class MyDummyModel(BaseModel): + message: str + + return MyDummyModel + + +@pytest.fixture +def dummy_envelope_schema(): + """Event wrapper structure""" + + class MyDummyEnvelopeSchema(BaseModel): + payload: Dict + + return MyDummyEnvelopeSchema + + +@pytest.fixture +def dummy_envelope(dummy_envelope_schema): + class MyDummyEnvelope(BaseEnvelope): + """Unwrap dummy event within payload key""" + + def parse(self, data: Dict[str, Any], model: BaseModel): + try: + parsed_enveloped = dummy_envelope_schema(**data) + except (ValidationError, TypeError) as e: + raise ModelValidationError("Dummy input does not conform with schema") from e + return self._parse(data=parsed_enveloped.payload, model=model) + + return MyDummyEnvelope diff --git a/tests/functional/parser/schemas.py b/tests/functional/parser/schemas.py index 3667601e630..47614cb95d8 100644 --- a/tests/functional/parser/schemas.py +++ b/tests/functional/parser/schemas.py @@ -3,13 +3,13 @@ from pydantic import BaseModel from typing_extensions import Literal -from aws_lambda_powertools.utilities.advanced_parser.schemas import ( - DynamoDBSchema, - DynamoRecordSchema, - DynamoScheme, - EventBridgeSchema, - SqsRecordSchema, - SqsSchema, +from aws_lambda_powertools.utilities.parser.models import ( + DynamoDBStreamChangedRecordModel, + DynamoDBStreamModel, + DynamoDBStreamRecordModel, + EventBridgeModel, + SqsModel, + SqsRecordModel, ) @@ -18,17 +18,17 @@ class MyDynamoBusiness(BaseModel): Id: Dict[Literal["N"], int] -class MyDynamoScheme(DynamoScheme): +class MyDynamoScheme(DynamoDBStreamChangedRecordModel): NewImage: Optional[MyDynamoBusiness] OldImage: Optional[MyDynamoBusiness] -class MyDynamoRecordSchema(DynamoRecordSchema): +class MyDynamoDBStreamRecordModel(DynamoDBStreamRecordModel): dynamodb: MyDynamoScheme -class MyAdvancedDynamoBusiness(DynamoDBSchema): - Records: List[MyDynamoRecordSchema] +class MyAdvancedDynamoBusiness(DynamoDBStreamModel): + Records: List[MyDynamoDBStreamRecordModel] class MyEventbridgeBusiness(BaseModel): @@ -36,7 +36,7 @@ class MyEventbridgeBusiness(BaseModel): state: str -class MyAdvancedEventbridgeBusiness(EventBridgeSchema): +class MyAdvancedEventbridgeBusiness(EventBridgeModel): detail: MyEventbridgeBusiness @@ -45,9 +45,9 @@ class MySqsBusiness(BaseModel): username: str -class MyAdvancedSqsRecordSchema(SqsRecordSchema): +class MyAdvancedSqsRecordModel(SqsRecordModel): body: str -class MyAdvancedSqsBusiness(SqsSchema): - Records: List[MyAdvancedSqsRecordSchema] +class MyAdvancedSqsBusiness(SqsModel): + Records: List[MyAdvancedSqsRecordModel] diff --git a/tests/functional/parser/test_dynamodb.py b/tests/functional/parser/test_dynamodb.py index 42e22cb45e7..ac5ebab40c3 100644 --- a/tests/functional/parser/test_dynamodb.py +++ b/tests/functional/parser/test_dynamodb.py @@ -1,16 +1,14 @@ from typing import Any, Dict, List import pytest -from pydantic.error_wrappers import ValidationError -from aws_lambda_powertools.utilities.advanced_parser.envelopes.envelopes import Envelope -from aws_lambda_powertools.utilities.advanced_parser.parser import parser +from aws_lambda_powertools.utilities.parser import envelopes, event_parser, exceptions from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedDynamoBusiness, MyDynamoBusiness from tests.functional.parser.utils import load_event -@parser(schema=MyDynamoBusiness, envelope=Envelope.DYNAMODB_STREAM) +@event_parser(model=MyDynamoBusiness, envelope=envelopes.DynamoDBEnvelope) def handle_dynamodb(event: List[Dict[str, MyDynamoBusiness]], _: LambdaContext): assert len(event) == 2 assert event[0]["OldImage"] is None @@ -22,7 +20,7 @@ def handle_dynamodb(event: List[Dict[str, MyDynamoBusiness]], _: LambdaContext): assert event[1]["NewImage"].Id["N"] == 101 -@parser(schema=MyAdvancedDynamoBusiness) +@event_parser(model=MyAdvancedDynamoBusiness) def handle_dynamodb_no_envelope(event: MyAdvancedDynamoBusiness, _: LambdaContext): records = event.Records record = records[0] @@ -59,41 +57,13 @@ def test_dynamo_db_stream_trigger_event_no_envelope(): handle_dynamodb_no_envelope(event_dict, LambdaContext()) -def test_validate_event_does_not_conform_with_schema_no_envelope(): +def test_validate_event_does_not_conform_with_model_no_envelope(): event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): + with pytest.raises(exceptions.ModelValidationError): handle_dynamodb_no_envelope(event_dict, LambdaContext()) -def test_validate_event_does_not_conform_with_schema(): +def test_validate_event_does_not_conform_with_model(): event_dict: Any = {"hello": "s"} - with pytest.raises(ValidationError): + with pytest.raises(exceptions.ModelValidationError): handle_dynamodb(event_dict, LambdaContext()) - - -def test_validate_event_neither_image_exists_with_schema(): - event_dict: Any = { - "Records": [ - { - "eventID": "1", - "eventName": "INSERT", - "eventVersion": "1.0", - "eventSourceARN": "eventsource_arn", - "awsRegion": "us-west-2", - "eventSource": "aws:dynamodb", - "dynamodb": { - "StreamViewType": "NEW_AND_OLD_IMAGES", - "SequenceNumber": "111", - "SizeBytes": 26, - "Keys": {"Id": {"N": "101"}}, - }, - } - ] - } - with pytest.raises(ValidationError) as exc_info: - handle_dynamodb(event_dict, LambdaContext()) - - validation_error: ValidationError = exc_info.value - assert len(validation_error.errors()) == 1 - error = validation_error.errors()[0] - assert error["msg"] == "DynamoDB streams schema failed validation, missing both new & old stream images" diff --git a/tests/functional/parser/test_eventbridge.py b/tests/functional/parser/test_eventbridge.py index 92122605886..07387e9ba0a 100644 --- a/tests/functional/parser/test_eventbridge.py +++ b/tests/functional/parser/test_eventbridge.py @@ -1,22 +1,20 @@ from typing import Any import pytest -from pydantic import ValidationError -from aws_lambda_powertools.utilities.advanced_parser.envelopes.envelopes import Envelope -from aws_lambda_powertools.utilities.advanced_parser.parser import parser +from aws_lambda_powertools.utilities.parser import envelopes, event_parser, exceptions from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedEventbridgeBusiness, MyEventbridgeBusiness from tests.functional.parser.utils import load_event -@parser(schema=MyEventbridgeBusiness, envelope=Envelope.EVENTBRIDGE) +@event_parser(model=MyEventbridgeBusiness, envelope=envelopes.EventBridgeEnvelope) def handle_eventbridge(event: MyEventbridgeBusiness, _: LambdaContext): assert event.instance_id == "i-1234567890abcdef0" assert event.state == "terminated" -@parser(schema=MyAdvancedEventbridgeBusiness) +@event_parser(model=MyAdvancedEventbridgeBusiness) def handle_eventbridge_no_envelope(event: MyAdvancedEventbridgeBusiness, _: LambdaContext): assert event.detail.instance_id == "i-1234567890abcdef0" assert event.detail.state == "terminated" @@ -28,7 +26,7 @@ def handle_eventbridge_no_envelope(event: MyAdvancedEventbridgeBusiness, _: Lamb assert event.region == "us-west-1" assert event.resources == ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"] assert event.source == "aws.ec2" - assert event.detailtype == "EC2 Instance State-change Notification" + assert event.detail_type == "EC2 Instance State-change Notification" def test_handle_eventbridge_trigger_event(): @@ -36,7 +34,7 @@ def test_handle_eventbridge_trigger_event(): handle_eventbridge(event_dict, LambdaContext()) -def test_validate_event_does_not_conform_with_user_dict_schema(): +def test_validate_event_does_not_conform_with_user_dict_model(): event_dict: Any = { "version": "0", "id": "6a7e8feb-b491-4cf7-a9f1-bf3703467718", @@ -48,7 +46,7 @@ def test_validate_event_does_not_conform_with_user_dict_schema(): "resources": ["arn:aws:ec2:us-west-1:123456789012:instance/i-1234567890abcdef0"], "detail": {}, } - with pytest.raises(ValidationError) as e: + with pytest.raises(exceptions.ModelValidationError) as e: handle_eventbridge(event_dict, LambdaContext()) print(e.exconly()) @@ -56,3 +54,8 @@ def test_validate_event_does_not_conform_with_user_dict_schema(): def test_handle_eventbridge_trigger_event_no_envelope(): event_dict = load_event("eventBridgeEvent.json") handle_eventbridge_no_envelope(event_dict, LambdaContext()) + + +def test_handle_invalid_event_with_eventbridge_envelope(): + with pytest.raises(exceptions.ModelValidationError): + handle_eventbridge(event={}, context=LambdaContext()) diff --git a/tests/functional/parser/test_parser.py b/tests/functional/parser/test_parser.py new file mode 100644 index 00000000000..162b52ee439 --- /dev/null +++ b/tests/functional/parser/test_parser.py @@ -0,0 +1,57 @@ +from typing import Dict + +import pytest + +from aws_lambda_powertools.utilities.parser import event_parser, exceptions +from aws_lambda_powertools.utilities.typing import LambdaContext + + +@pytest.mark.parametrize("invalid_value", [None, bool(), [], (), object]) +def test_parser_unsupported_event(dummy_schema, invalid_value): + @event_parser(model=dummy_schema) + def handle_no_envelope(event: Dict, _: LambdaContext): + return event + + with pytest.raises(exceptions.ModelValidationError): + handle_no_envelope(event=invalid_value, context=LambdaContext()) + + +@pytest.mark.parametrize( + "invalid_envelope,expected", [(True, ""), (["dummy"], ""), (object, exceptions.InvalidEnvelopeError)] +) +def test_parser_invalid_envelope_type(dummy_event, dummy_schema, invalid_envelope, expected): + @event_parser(model=dummy_schema, envelope=invalid_envelope) + def handle_no_envelope(event: Dict, _: LambdaContext): + return event + + if hasattr(expected, "__cause__"): + with pytest.raises(expected): + handle_no_envelope(event=dummy_event["payload"], context=LambdaContext()) + else: + handle_no_envelope(event=dummy_event["payload"], context=LambdaContext()) + + +def test_parser_schema_with_envelope(dummy_event, dummy_schema, dummy_envelope): + @event_parser(model=dummy_schema, envelope=dummy_envelope) + def handle_no_envelope(event: Dict, _: LambdaContext): + return event + + handle_no_envelope(dummy_event, LambdaContext()) + + +def test_parser_schema_no_envelope(dummy_event, dummy_schema): + @event_parser(model=dummy_schema) + def handle_no_envelope(event: Dict, _: LambdaContext): + return event + + handle_no_envelope(dummy_event["payload"], LambdaContext()) + + +@pytest.mark.parametrize("invalid_schema", [None, str, bool(), [], (), object]) +def test_parser_with_invalid_schema_type(dummy_event, invalid_schema): + @event_parser(model=invalid_schema) + def handle_no_envelope(event: Dict, _: LambdaContext): + return event + + with pytest.raises(exceptions.InvalidModelTypeError): + handle_no_envelope(event=dummy_event, context=LambdaContext()) diff --git a/tests/functional/parser/test_sqs.py b/tests/functional/parser/test_sqs.py index da1363f758a..2ee992e2fa1 100644 --- a/tests/functional/parser/test_sqs.py +++ b/tests/functional/parser/test_sqs.py @@ -1,29 +1,15 @@ from typing import Any, List import pytest -from pydantic import ValidationError -from aws_lambda_powertools.utilities.advanced_parser.envelopes.envelopes import Envelope -from aws_lambda_powertools.utilities.advanced_parser.parser import parser +from aws_lambda_powertools.utilities.parser import envelopes, event_parser, exceptions from aws_lambda_powertools.utilities.typing import LambdaContext from tests.functional.parser.schemas import MyAdvancedSqsBusiness, MySqsBusiness from tests.functional.parser.utils import load_event from tests.functional.validator.conftest import sqs_event # noqa: F401 -@parser(schema=str, envelope=Envelope.SQS) -def handle_sqs_str_body(event: List[str], _: LambdaContext): - assert len(event) == 2 - assert event[0] == "Test message." - assert event[1] == "Test message2." - - -def test_handle_sqs_trigger_event_str_body(): - event_dict = load_event("sqsEvent.json") - handle_sqs_str_body(event_dict, LambdaContext()) - - -@parser(schema=MySqsBusiness, envelope=Envelope.SQS) +@event_parser(model=MySqsBusiness, envelope=envelopes.SqsEnvelope) def handle_sqs_json_body(event: List[MySqsBusiness], _: LambdaContext): assert len(event) == 1 assert event[0].message == "hello world" @@ -34,14 +20,14 @@ def test_handle_sqs_trigger_event_json_body(sqs_event): # noqa: F811 handle_sqs_json_body(sqs_event, LambdaContext()) -def test_validate_event_does_not_conform_with_schema(): +def test_validate_event_does_not_conform_with_model(): event: Any = {"invalid": "event"} - with pytest.raises(ValidationError): + with pytest.raises(exceptions.ModelValidationError): handle_sqs_json_body(event, LambdaContext()) -def test_validate_event_does_not_conform_user_json_string_with_schema(): +def test_validate_event_does_not_conform_user_json_string_with_model(): event: Any = { "Records": [ { @@ -65,11 +51,11 @@ def test_validate_event_does_not_conform_user_json_string_with_schema(): ] } - with pytest.raises(ValidationError): + with pytest.raises(exceptions.ModelValidationError): handle_sqs_json_body(event, LambdaContext()) -@parser(schema=MyAdvancedSqsBusiness) +@event_parser(model=MyAdvancedSqsBusiness) def handle_sqs_no_envelope(event: MyAdvancedSqsBusiness, _: LambdaContext): records = event.Records record = records[0]