diff --git a/aws_lambda_powertools/utilities/parser/models/s3.py b/aws_lambda_powertools/utilities/parser/models/s3.py index c4e7576e1ea..77617832c29 100644 --- a/aws_lambda_powertools/utilities/parser/models/s3.py +++ b/aws_lambda_powertools/utilities/parser/models/s3.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Literal, Optional +from typing import List, Literal, Optional, Union from pydantic import BaseModel, model_validator from pydantic.fields import Field @@ -23,7 +23,7 @@ class S3Identity(BaseModel): class S3RequestParameters(BaseModel): - sourceIPAddress: IPvAnyNetwork + sourceIPAddress: Union[IPvAnyNetwork, Literal["s3.amazonaws.com"]] class S3ResponseElements(BaseModel): @@ -45,7 +45,7 @@ class S3Object(BaseModel): key: str size: Optional[NonNegativeFloat] = None eTag: Optional[str] = None - sequencer: str + sequencer: Optional[str] = None versionId: Optional[str] = None diff --git a/tests/events/s3EventLifecycleTransition.json b/tests/events/s3EventLifecycleTransition.json new file mode 100644 index 00000000000..9974ebf8c18 --- /dev/null +++ b/tests/events/s3EventLifecycleTransition.json @@ -0,0 +1,43 @@ +{ + "Records": [ + { + "eventVersion": "2.3", + "eventSource": "aws:s3", + "awsRegion": "us-east-1", + "eventTime": "2019-09-03T19:37:27.192Z", + "eventName": "LifecycleTransition", + "userIdentity": { + "principalId": "s3.amazonaws.com" + }, + "requestParameters": { + "sourceIPAddress": "s3.amazonaws.com" + }, + "responseElements": { + "x-amz-request-id": "D82B88E5F771F645", + "x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo=" + }, + "s3": { + "s3SchemaVersion": "1.0", + "configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1", + "bucket": { + "name": "lambda-artifacts-deafc19498e3f2df", + "ownerIdentity": { + "principalId": "A3I5XTEXAMAI3E" + }, + "arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df" + }, + "object": { + "key": "/path/to/file.parquet", + "size": 12345, + "eTag": "abcdef1232423423", + "versionId": "SomeThingThere" + } + }, + "lifecycleEventData": { + "transitionEventData": { + "destinationStorageClass": "INTELLIGENT_TIERING" + } + } + } + ] +} diff --git a/tests/unit/parser/_pydantic/test_s3.py b/tests/unit/parser/_pydantic/test_s3.py index 1586f32d28e..7a9beb990c4 100644 --- a/tests/unit/parser/_pydantic/test_s3.py +++ b/tests/unit/parser/_pydantic/test_s3.py @@ -157,3 +157,44 @@ def test_s3_none_etag_value_failed_validation(): raw_event["Records"][0]["s3"]["object"]["eTag"] = None with pytest.raises(ValidationError): S3Model(**raw_event) + + +def test_s3_trigger_event_lifecycle_transition(): + raw_event = load_event("s3EventLifecycleTransition.json") + parsed_event: S3Model = S3Model(**raw_event) + + records = list(parsed_event.Records) + assert len(records) == 1 + + record: S3RecordModel = records[0] + raw_record = raw_event["Records"][0] + assert record.eventVersion == raw_record["eventVersion"] + assert record.eventSource == raw_record["eventSource"] + assert record.awsRegion == raw_record["awsRegion"] + convert_time = int(round(record.eventTime.timestamp() * 1000)) + assert convert_time == 1567539447192 + assert record.eventName == raw_record["eventName"] + assert record.glacierEventData is None + + user_identity = record.userIdentity + assert user_identity.principalId == raw_record["userIdentity"]["principalId"] + + request_parameters = record.requestParameters + assert str(request_parameters.sourceIPAddress) == "s3.amazonaws.com" + assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"] + assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"] + + s3 = record.s3 + raw_s3 = raw_event["Records"][0]["s3"] + assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"] + assert s3.configurationId == raw_record["s3"]["configurationId"] + assert s3.object.key == raw_s3["object"]["key"] + assert s3.object.size == 12345 + assert s3.object.eTag == "abcdef1232423423" + assert s3.object.versionId == "SomeThingThere" + + bucket = s3.bucket + raw_bucket = raw_record["s3"]["bucket"] + assert bucket.name == raw_bucket["name"] + assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"] + assert bucket.arn == raw_bucket["arn"]