Skip to content

Commit e8cff5c

Browse files
Merge branch 'develop' into dependabot/pip/develop/types-python-dateutil-2.9.0.20260124
2 parents 8b113c4 + f08935d commit e8cff5c

File tree

6 files changed

+260
-14
lines changed

6 files changed

+260
-14
lines changed

aws_lambda_powertools/utilities/data_classes/s3_event.py

Lines changed: 33 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -215,9 +215,14 @@ def bucket(self) -> S3Bucket:
215215

216216
@property
217217
def get_object(self) -> S3Object:
218-
"""Get the `object` property as an S3Object"""
219-
# Note: this name conflicts with existing python builtins
220-
return S3Object(self["object"])
218+
"""Get the `object` property as an S3Object
219+
220+
Note: IntelligentTiering events use 'get_object' as the actual key name,
221+
while other S3 events use 'object'. This method handles both cases.
222+
"""
223+
# IntelligentTiering events use 'get_object', others use 'object'
224+
object_data = self.get("get_object") or self["object"]
225+
return S3Object(object_data)
221226

222227

223228
class S3EventRecordGlacierRestoreEventData(DictWrapper):
@@ -242,6 +247,16 @@ def restore_event_data(self) -> S3EventRecordGlacierRestoreEventData:
242247
return S3EventRecordGlacierRestoreEventData(self["restoreEventData"])
243248

244249

250+
class S3EventRecordIntelligentTieringEventData(DictWrapper):
251+
@property
252+
def destination_access_tier(self) -> str:
253+
"""The new access tier for the object.
254+
255+
The intelligentTieringEventData key is only visible for IntelligentTiering events.
256+
"""
257+
return self["destinationAccessTier"]
258+
259+
245260
class S3EventRecord(DictWrapper):
246261
@property
247262
def event_version(self) -> str:
@@ -297,6 +312,12 @@ def glacier_event_data(self) -> S3EventRecordGlacierEventData | None:
297312
item = self.get("glacierEventData")
298313
return None if item is None else S3EventRecordGlacierEventData(item)
299314

315+
@property
316+
def intelligent_tiering_event_data(self) -> S3EventRecordIntelligentTieringEventData | None:
317+
"""The intelligentTieringEventData key is only visible for IntelligentTiering events."""
318+
item = self.get("intelligentTieringEventData")
319+
return None if item is None else S3EventRecordIntelligentTieringEventData(item)
320+
300321

301322
class S3Event(DictWrapper):
302323
"""S3 event notification
@@ -325,5 +346,12 @@ def bucket_name(self) -> str:
325346

326347
@property
327348
def object_key(self) -> str:
328-
"""Get the object key for the first s3 event record and unquote plus"""
329-
return unquote_plus(self["Records"][0]["s3"]["object"]["key"])
349+
"""Get the object key for the first s3 event record and unquote plus
350+
351+
Note: IntelligentTiering events use 'get_object' as the key name,
352+
while other S3 events use 'object'. This method handles both cases.
353+
"""
354+
s3_data = self["Records"][0]["s3"]
355+
# IntelligentTiering events use 'get_object', others use 'object'
356+
object_data = s3_data.get("get_object") or s3_data["object"]
357+
return unquote_plus(object_data["key"])

aws_lambda_powertools/utilities/parser/models/s3.py

Lines changed: 39 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,16 @@ class S3EventRecordGlacierEventData(BaseModel):
4040
)
4141

4242

43+
class S3EventRecordIntelligentTieringEventData(BaseModel):
44+
destinationAccessTier: str = Field(
45+
description="The new access tier for the object. For IntelligentTiering events.",
46+
examples=[
47+
"ARCHIVE_ACCESS",
48+
"DEEP_ARCHIVE_ACCESS",
49+
],
50+
)
51+
52+
4353
class S3Identity(BaseModel):
4454
principalId: str = Field(
4555
description="Amazon identifier of the user, role, account or services who caused the event.",
@@ -178,8 +188,9 @@ class S3Message(BaseModel):
178188
},
179189
],
180190
)
181-
object: S3Object = Field(
182-
description="The S3 object object.",
191+
object: Optional[S3Object] = Field(
192+
default=None,
193+
description="The S3 object object. Used by most S3 event types.",
183194
examples=[
184195
{
185196
"key": "b21b84d653bb07b05b1e6b33684dc11b",
@@ -189,6 +200,20 @@ class S3Message(BaseModel):
189200
},
190201
],
191202
) # noqa: A003
203+
get_object: Optional[S3Object] = Field(
204+
default=None,
205+
alias="get_object",
206+
description="The S3 object object. Used by IntelligentTiering events instead of 'object'.",
207+
examples=[
208+
{
209+
"key": "myobject",
210+
"size": 252294,
211+
"eTag": "4e9270240d7d62d5ee8dbfcb7a7a3279",
212+
"versionId": "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn",
213+
"sequencer": "0066A8D0E77DE42BC5",
214+
},
215+
],
216+
)
192217

193218

194219
class S3EventNotificationObjectModel(BaseModel):
@@ -449,11 +474,22 @@ class S3RecordModel(BaseModel):
449474
},
450475
],
451476
)
477+
intelligentTieringEventData: Optional[S3EventRecordIntelligentTieringEventData] = Field(
478+
default=None,
479+
description="The Intelligent-Tiering event data object.",
480+
examples=[
481+
{
482+
"destinationAccessTier": "ARCHIVE_ACCESS",
483+
},
484+
],
485+
)
452486

453487
@model_validator(mode="before")
454488
def validate_s3_object(cls, values):
455489
event_name = values.get("eventName")
456-
s3_object = values.get("s3").get("object")
490+
s3_data = values.get("s3")
491+
# IntelligentTiering events use 'get_object' instead of 'object'
492+
s3_object = s3_data.get("object") or s3_data.get("get_object")
457493
if ":Delete" not in event_name and (s3_object.get("size") is None or s3_object.get("eTag") is None):
458494
raise ValueError(
459495
"Size and eTag fields are required for all events except ObjectRemoved:* and LifecycleExpiration:*.",

poetry.lock

Lines changed: 6 additions & 6 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
{
2+
"Records": [
3+
{
4+
"eventVersion": "2.3",
5+
"eventSource": "aws:s3",
6+
"awsRegion": "ap-southeast-2",
7+
"eventTime": "2025-09-29T00:47:23.967Z",
8+
"eventName": "IntelligentTiering",
9+
"userIdentity": {
10+
"principalId": "s3.amazonaws.com"
11+
},
12+
"requestParameters": {
13+
"sourceIPAddress": "s3.amazonaws.com"
14+
},
15+
"responseElements": {
16+
"x-amz-request-id": "4C05BE601E268FCD",
17+
"x-amz-id-2": "Wt0dgzBP/wQ47PWP5BqoD4nAZC08NOjfdZj2prx5gVPBCI+YQtrx/UKVlnT63Dj+vse4x5koWp61HaoVyz6fUFe1yLkhNpK3"
18+
},
19+
"s3": {
20+
"s3SchemaVersion": "1.0",
21+
"configurationId": "ZDM3MGVjY2MtZTlmNC00ZTk0LThiNjItMWJiNTMwOTYwY2Rl",
22+
"bucket": {
23+
"name": "mybucket",
24+
"ownerIdentity": {
25+
"principalId": "ANY597T3BWFY2"
26+
},
27+
"arn": "arn:aws:s3:::mybucket"
28+
},
29+
"get_object": {
30+
"key": "myobject",
31+
"size": 252294,
32+
"eTag": "4e9270240d7d62d5ee8dbfcb7a7a3279",
33+
"versionId": "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn",
34+
"sequencer": "0066A8D0E77DE42BC5"
35+
}
36+
},
37+
"intelligentTieringEventData": {
38+
"destinationAccessTier": "ARCHIVE_ACCESS"
39+
}
40+
}
41+
]
42+
}
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
from aws_lambda_powertools.utilities.data_classes import S3Event
2+
from tests.functional.utils import load_event
3+
4+
5+
def test_s3_intelligent_tiering_event():
6+
"""Test S3 IntelligentTiering event with get_object field"""
7+
event = S3Event(load_event("s3EventIntelligentTiering.json"))
8+
9+
# Test first record
10+
record = event.record
11+
assert record.event_name == "IntelligentTiering"
12+
assert record.event_version == "2.3"
13+
assert record.event_source == "aws:s3"
14+
assert record.aws_region == "ap-southeast-2"
15+
16+
# Test user identity
17+
assert record.user_identity.principal_id == "s3.amazonaws.com"
18+
19+
# Test S3 object via get_object property (handles both 'object' and 'get_object' keys)
20+
s3_object = record.s3.get_object
21+
assert s3_object.key == "myobject"
22+
assert s3_object.size == 252294
23+
assert s3_object.etag == "4e9270240d7d62d5ee8dbfcb7a7a3279"
24+
assert s3_object.version_id == "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn"
25+
assert s3_object.sequencer == "0066A8D0E77DE42BC5"
26+
27+
# Test bucket
28+
assert record.s3.bucket.name == "mybucket"
29+
assert record.s3.bucket.arn == "arn:aws:s3:::mybucket"
30+
31+
# Test intelligentTieringEventData
32+
assert record.intelligent_tiering_event_data is not None
33+
assert record.intelligent_tiering_event_data.destination_access_tier == "ARCHIVE_ACCESS"
34+
35+
# Verify glacierEventData is None
36+
assert record.glacier_event_data is None
37+
38+
# Test convenience properties
39+
assert event.bucket_name == "mybucket"
40+
assert event.object_key == "myobject"
41+
42+
43+
def test_s3_intelligent_tiering_event_iteration():
44+
"""Test iterating through multiple IntelligentTiering records"""
45+
event = S3Event(load_event("s3EventIntelligentTiering.json"))
46+
47+
records = list(event.records)
48+
assert len(records) == 1
49+
50+
for record in event.records:
51+
assert record.event_name == "IntelligentTiering"
52+
assert record.s3.get_object.key == "myobject"
53+
assert record.intelligent_tiering_event_data.destination_access_tier == "ARCHIVE_ACCESS"
54+
55+
56+
def test_s3_intelligent_tiering_deep_archive_access():
57+
"""Test IntelligentTiering event with DEEP_ARCHIVE_ACCESS tier"""
58+
raw_event = load_event("s3EventIntelligentTiering.json")
59+
raw_event["Records"][0]["intelligentTieringEventData"]["destinationAccessTier"] = "DEEP_ARCHIVE_ACCESS"
60+
61+
event = S3Event(raw_event)
62+
record = event.record
63+
64+
assert record.intelligent_tiering_event_data.destination_access_tier == "DEEP_ARCHIVE_ACCESS"
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel
2+
from tests.functional.utils import load_event
3+
4+
5+
def test_s3_intelligent_tiering_event():
6+
"""Test parsing of S3 IntelligentTiering events with get_object field"""
7+
raw_event = load_event("s3EventIntelligentTiering.json")
8+
parsed_event: S3Model = S3Model(**raw_event)
9+
10+
records = list(parsed_event.Records)
11+
assert len(records) == 1
12+
13+
record: S3RecordModel = records[0]
14+
raw_record = raw_event["Records"][0]
15+
16+
# Verify basic event properties
17+
assert record.eventVersion == "2.3"
18+
assert record.eventSource == "aws:s3"
19+
assert record.awsRegion == "ap-southeast-2"
20+
assert record.eventName == "IntelligentTiering"
21+
22+
# Verify user identity
23+
user_identity = record.userIdentity
24+
assert user_identity.principalId == "s3.amazonaws.com"
25+
26+
# Verify request parameters
27+
request_parameters = record.requestParameters
28+
# Note: sourceIPAddress is "s3.amazonaws.com" for IntelligentTiering events, not an IP
29+
assert str(request_parameters.sourceIPAddress) == "s3.amazonaws.com"
30+
31+
# Verify response elements
32+
assert record.responseElements.x_amz_request_id == raw_record["responseElements"]["x-amz-request-id"]
33+
assert record.responseElements.x_amz_id_2 == raw_record["responseElements"]["x-amz-id-2"]
34+
35+
# Verify S3 message
36+
s3 = record.s3
37+
assert s3.s3SchemaVersion == raw_record["s3"]["s3SchemaVersion"]
38+
assert s3.configurationId == raw_record["s3"]["configurationId"]
39+
40+
# Verify bucket
41+
bucket = s3.bucket
42+
raw_bucket = raw_record["s3"]["bucket"]
43+
assert bucket.name == "mybucket"
44+
assert bucket.ownerIdentity.principalId == raw_bucket["ownerIdentity"]["principalId"]
45+
assert bucket.arn == "arn:aws:s3:::mybucket"
46+
47+
# Verify get_object field (IntelligentTiering uses 'get_object' instead of 'object')
48+
assert s3.get_object is not None
49+
assert s3.get_object.key == "myobject"
50+
assert s3.get_object.size == 252294
51+
assert s3.get_object.eTag == "4e9270240d7d62d5ee8dbfcb7a7a3279"
52+
assert s3.get_object.versionId == "tiogA9Ga7Xi49yfJ6lkeTxPYx7ZK75yn"
53+
assert s3.get_object.sequencer == "0066A8D0E77DE42BC5"
54+
55+
# Verify intelligentTieringEventData
56+
assert record.intelligentTieringEventData is not None
57+
assert record.intelligentTieringEventData.destinationAccessTier == "ARCHIVE_ACCESS"
58+
59+
# Verify glacierEventData is None for IntelligentTiering events
60+
assert record.glacierEventData is None
61+
62+
63+
def test_s3_intelligent_tiering_event_access_tiers():
64+
"""Test different access tier values for IntelligentTiering events"""
65+
raw_event = load_event("s3EventIntelligentTiering.json")
66+
67+
# Test ARCHIVE_ACCESS tier (from the test event)
68+
parsed_event: S3Model = S3Model(**raw_event)
69+
record = list(parsed_event.Records)[0]
70+
assert record.intelligentTieringEventData.destinationAccessTier == "ARCHIVE_ACCESS"
71+
72+
# Test DEEP_ARCHIVE_ACCESS tier
73+
raw_event["Records"][0]["intelligentTieringEventData"]["destinationAccessTier"] = "DEEP_ARCHIVE_ACCESS"
74+
parsed_event: S3Model = S3Model(**raw_event)
75+
record = list(parsed_event.Records)[0]
76+
assert record.intelligentTieringEventData.destinationAccessTier == "DEEP_ARCHIVE_ACCESS"

0 commit comments

Comments
 (0)