diff --git a/aws_lambda_powertools/utilities/parser/models/__init__.py b/aws_lambda_powertools/utilities/parser/models/__init__.py
index 45230b8c79e..923d5d057c3 100644
--- a/aws_lambda_powertools/utilities/parser/models/__init__.py
+++ b/aws_lambda_powertools/utilities/parser/models/__init__.py
@@ -4,9 +4,29 @@
from .event_bridge import EventBridgeModel
from .kinesis import KinesisDataStreamModel, KinesisDataStreamRecord, KinesisDataStreamRecordPayload
from .s3 import S3Model, S3RecordModel
-from .ses import SesModel, SesRecordModel
+from .s3_object_event import (
+ S3ObjectConfiguration,
+ S3ObjectContext,
+ S3ObjectLambdaEvent,
+ S3ObjectSessionAttributes,
+ S3ObjectSessionContext,
+ S3ObjectSessionIssuer,
+ S3ObjectUserIdentity,
+ S3ObjectUserRequest,
+)
+from .ses import (
+ SesMail,
+ SesMailCommonHeaders,
+ SesMailHeaders,
+ SesMessage,
+ SesModel,
+ SesReceipt,
+ SesReceiptAction,
+ SesReceiptVerdict,
+ SesRecordModel,
+)
from .sns import SnsModel, SnsNotificationModel, SnsRecordModel
-from .sqs import SqsModel, SqsRecordModel
+from .sqs import SqsAttributesModel, SqsModel, SqsMsgAttributeModel, SqsRecordModel
__all__ = [
"CloudWatchLogsData",
@@ -20,16 +40,34 @@
"EventBridgeModel",
"DynamoDBStreamChangedRecordModel",
"DynamoDBStreamRecordModel",
+ "DynamoDBStreamChangedRecordModel",
"KinesisDataStreamModel",
"KinesisDataStreamRecord",
"KinesisDataStreamRecordPayload",
"S3Model",
"S3RecordModel",
+ "S3ObjectLambdaEvent",
+ "S3ObjectUserIdentity",
+ "S3ObjectSessionContext",
+ "S3ObjectSessionAttributes",
+ "S3ObjectSessionIssuer",
+ "S3ObjectUserRequest",
+ "S3ObjectConfiguration",
+ "S3ObjectContext",
"SesModel",
"SesRecordModel",
+ "SesMessage",
+ "SesMail",
+ "SesMailCommonHeaders",
+ "SesMailHeaders",
+ "SesReceipt",
+ "SesReceiptAction",
+ "SesReceiptVerdict",
"SnsModel",
"SnsNotificationModel",
"SnsRecordModel",
"SqsModel",
"SqsRecordModel",
+ "SqsMsgAttributeModel",
+ "SqsAttributesModel",
]
diff --git a/aws_lambda_powertools/utilities/parser/models/s3_object_event.py b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py
new file mode 100644
index 00000000000..1fc10672746
--- /dev/null
+++ b/aws_lambda_powertools/utilities/parser/models/s3_object_event.py
@@ -0,0 +1,57 @@
+from typing import Dict, Optional
+
+from pydantic import BaseModel, HttpUrl
+
+
+class S3ObjectContext(BaseModel):
+ inputS3Url: HttpUrl
+ outputRoute: str
+ outputToken: str
+
+
+class S3ObjectConfiguration(BaseModel):
+ accessPointArn: str
+ supportingAccessPointArn: str
+ payload: str
+
+
+class S3ObjectUserRequest(BaseModel):
+ url: str
+ headers: Dict[str, str]
+
+
+class S3ObjectSessionIssuer(BaseModel):
+ type: str # noqa: A003, VNE003
+ userName: Optional[str]
+ principalId: str
+ arn: str
+ accountId: str
+
+
+class S3ObjectSessionAttributes(BaseModel):
+ creationDate: str
+ mfaAuthenticated: bool
+
+
+class S3ObjectSessionContext(BaseModel):
+ sessionIssuer: S3ObjectSessionIssuer
+ attributes: S3ObjectSessionAttributes
+
+
+class S3ObjectUserIdentity(BaseModel):
+ type: str # noqa003
+ accountId: str
+ accessKeyId: str
+ userName: Optional[str]
+ principalId: str
+ arn: str
+ sessionContext: Optional[S3ObjectSessionContext]
+
+
+class S3ObjectLambdaEvent(BaseModel):
+ xAmzRequestId: str
+ getObjectContext: S3ObjectContext
+ configuration: S3ObjectConfiguration
+ userRequest: S3ObjectUserRequest
+ userIdentity: S3ObjectUserIdentity
+ protocolVersion: str
diff --git a/docs/utilities/parser.md b/docs/utilities/parser.md
index 1c2605adc08..a718c0bd4db 100644
--- a/docs/utilities/parser.md
+++ b/docs/utilities/parser.md
@@ -149,17 +149,18 @@ Use this standalone function when you want more control over the data validation
Parser comes with the following built-in models:
-Model name | Description
-------------------------------------------------- | ----------------------------------------------------------------------------------------------------------
-**DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams
-**EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge
-**SqsModel** | Lambda Event Source payload for Amazon SQS
-**AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer
-**CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs
-**S3Model** | Lambda Event Source payload for Amazon S3
-**KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams
-**SesModel** | Lambda Event Source payload for Amazon Simple Email Service
-**SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service
+| Model name | Description |
+| -------------------------- | ------------------------------------------------------------------ |
+| **DynamoDBStreamModel** | Lambda Event Source payload for Amazon DynamoDB Streams |
+| **EventBridgeModel** | Lambda Event Source payload for Amazon EventBridge |
+| **SqsModel** | Lambda Event Source payload for Amazon SQS |
+| **AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer |
+| **CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs |
+| **S3Model** | Lambda Event Source payload for Amazon S3 |
+| **S3ObjectLambdaEvent** | Lambda Event Source payload for Amazon S3 Object Lambda |
+| **KinesisDataStreamModel** | Lambda Event Source payload for Amazon Kinesis Data Streams |
+| **SesModel** | Lambda Event Source payload for Amazon Simple Email Service |
+| **SnsModel** | Lambda Event Source payload for Amazon Simple Notification Service |
### extending built-in models
@@ -293,15 +294,15 @@ Here's an example of parsing a model found in an event coming from EventBridge,
Parser comes with the following built-in envelopes, where `Model` in the return section is your given model.
-Envelope name | Behaviour | Return
-------------------------------------------------- | ---------------------------------------------------------------------------------------------------------- | ------------------------------------
-**DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]`
-**EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model`
-**SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]`
-**CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]`
-**KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]`
-**SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]`
-**SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]`
+| Envelope name | Behaviour | Return |
+| ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- |
+| **DynamoDBStreamEnvelope** | 1. Parses data using `DynamoDBStreamModel`.
2. Parses records in `NewImage` and `OldImage` keys using your model.
3. Returns a list with a dictionary containing `NewImage` and `OldImage` keys | `List[Dict[str, Optional[Model]]]` |
+| **EventBridgeEnvelope** | 1. Parses data using `EventBridgeModel`.
2. Parses `detail` key using your model and returns it. | `Model` |
+| **SqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` |
+| **CloudWatchLogsEnvelope** | 1. Parses data using `CloudwatchLogsModel` which will base64 decode and decompress it.
2. Parses records in `message` key using your model and return them in a list. | `List[Model]` |
+| **KinesisDataStreamEnvelope** | 1. Parses data using `KinesisDataStreamModel` which will base64 decode it.
2. Parses records in in `Records` key using your model and returns them in a list. | `List[Model]` |
+| **SnsEnvelope** | 1. Parses data using `SnsModel`.
2. Parses records in `body` key using your model and return them in a list. | `List[Model]` |
+| **SnsSqsEnvelope** | 1. Parses data using `SqsModel`.
2. Parses SNS records in `body` key using `SnsNotificationModel`.
3. Parses data in `Message` key using your model and return them in a list. | `List[Model]` |
### bringing your own envelope
diff --git a/tests/functional/parser/test_s3 object_event.py b/tests/functional/parser/test_s3 object_event.py
new file mode 100644
index 00000000000..da015338cf4
--- /dev/null
+++ b/tests/functional/parser/test_s3 object_event.py
@@ -0,0 +1,67 @@
+from aws_lambda_powertools.utilities.parser import event_parser
+from aws_lambda_powertools.utilities.parser.models import S3ObjectLambdaEvent
+from aws_lambda_powertools.utilities.typing import LambdaContext
+from tests.functional.parser.utils import load_event
+
+
+@event_parser(model=S3ObjectLambdaEvent)
+def handle_s3_object_event_iam(event: S3ObjectLambdaEvent, _: LambdaContext):
+ return event
+
+
+def test_s3_object_event():
+ event = load_event("s3ObjectEventIAMUser.json")
+ parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_iam(event, LambdaContext())
+ assert parsed_event.xAmzRequestId == event["xAmzRequestId"]
+ assert parsed_event.getObjectContext is not None
+ object_context = parsed_event.getObjectContext
+ assert str(object_context.inputS3Url) == event["getObjectContext"]["inputS3Url"]
+ assert object_context.outputRoute == event["getObjectContext"]["outputRoute"]
+ assert object_context.outputToken == event["getObjectContext"]["outputToken"]
+ assert parsed_event.configuration is not None
+ configuration = parsed_event.configuration
+ assert configuration.accessPointArn == event["configuration"]["accessPointArn"]
+ assert configuration.supportingAccessPointArn == event["configuration"]["supportingAccessPointArn"]
+ assert configuration.payload == event["configuration"]["payload"]
+ assert parsed_event.userRequest is not None
+ user_request = parsed_event.userRequest
+ assert user_request.url == event["userRequest"]["url"]
+ assert user_request.headers == event["userRequest"]["headers"]
+ assert user_request.headers["Accept-Encoding"] == "identity"
+ assert parsed_event.userIdentity is not None
+ user_identity = parsed_event.userIdentity
+ assert user_identity.type == event["userIdentity"]["type"]
+ assert user_identity.principalId == event["userIdentity"]["principalId"]
+ assert user_identity.arn == event["userIdentity"]["arn"]
+ assert user_identity.accountId == event["userIdentity"]["accountId"]
+ assert user_identity.accessKeyId == event["userIdentity"]["accessKeyId"]
+ assert user_identity.userName == event["userIdentity"]["userName"]
+ assert user_identity.sessionContext is None
+ assert parsed_event.protocolVersion == event["protocolVersion"]
+
+
+@event_parser(model=S3ObjectLambdaEvent)
+def handle_s3_object_event_temp_creds(event: S3ObjectLambdaEvent, _: LambdaContext):
+ return event
+
+
+def test_s3_object_event_temp_credentials():
+ event = load_event("s3ObjectEventTempCredentials.json")
+ parsed_event: S3ObjectLambdaEvent = handle_s3_object_event_temp_creds(event, LambdaContext())
+ assert parsed_event.xAmzRequestId == event["xAmzRequestId"]
+ session_context = parsed_event.userIdentity.sessionContext
+ assert session_context is not None
+ session_issuer = session_context.sessionIssuer
+ assert session_issuer is not None
+ assert session_issuer.type == event["userIdentity"]["sessionContext"]["sessionIssuer"]["type"]
+ assert session_issuer.userName == event["userIdentity"]["sessionContext"]["sessionIssuer"]["userName"]
+ assert session_issuer.principalId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["principalId"]
+ assert session_issuer.arn == event["userIdentity"]["sessionContext"]["sessionIssuer"]["arn"]
+ assert session_issuer.accountId == event["userIdentity"]["sessionContext"]["sessionIssuer"]["accountId"]
+ session_attributes = session_context.attributes
+ assert session_attributes is not None
+ assert (
+ str(session_attributes.mfaAuthenticated).lower()
+ == event["userIdentity"]["sessionContext"]["attributes"]["mfaAuthenticated"]
+ )
+ assert session_attributes.creationDate == event["userIdentity"]["sessionContext"]["attributes"]["creationDate"]