Skip to content

Commit

Permalink
s3hash lambda: Fix invalid checksum for some non-canonical objects wi…
Browse files Browse the repository at this point in the history
…th existing checksum (#4062)
  • Loading branch information
sir-sigurd authored Jul 31, 2024
1 parent c66c225 commit 27b1989
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 1 deletion.
1 change: 1 addition & 0 deletions lambdas/s3hash/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ where verb is one of

## Changes

- [Fixed] Fix invalid checksum for some non-canonical objects with existing checksum ([#4062](https://github.com/quiltdata/quilt/pull/4062))
- [Changed] Use per-region scratch buckets ([#3923](https://github.com/quiltdata/quilt/pull/3923))
- [Changed] Always stream bytes in legacy mode ([#3903](https://github.com/quiltdata/quilt/pull/3903))
- [Changed] Compute chunked checksums, adhere to the spec ([#3889](https://github.com/quiltdata/quilt/pull/3889))
Expand Down
4 changes: 3 additions & 1 deletion lambdas/s3hash/src/t4_lambda_s3hash/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,9 @@ def get_compliant_checksum(attrs: GetObjectAttributesOutputTypeDef) -> T.Optiona
assert "Parts" in object_parts
# Make sure we have _all_ parts.
assert len(object_parts["Parts"]) == num_parts
if all(part.get("Size") == part_size for part in object_parts["Parts"][:-1]):
expected_num_parts, remainder = divmod(attrs["ObjectSize"], part_size)
expected_part_sizes = [part_size] * expected_num_parts + ([remainder] if remainder else [])
if [part.get("Size") for part in object_parts["Parts"]] == expected_part_sizes:
return Checksum.sha256_chunked(base64.b64decode(checksum_value))

return None
Expand Down
22 changes: 22 additions & 0 deletions lambdas/s3hash/tests/test_get_compliant_checksum.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,28 @@ def test_no_sha256(obj_attrs):
None,
Checksum.sha256_chunked(base64.b64decode("bGeobZC1xyakKeDkOLWP9khl+vuOditELvPQhrT/R9M=")),
),
(
{
"Checksum": {"ChecksumSHA256": "il9Wb7Il94TeE+T+/QGErPTFP08ua1CpYEIG5p2pxz0="},
"ObjectParts": {
"TotalPartsCount": 1,
"PartNumberMarker": 0,
"NextPartNumberMarker": 1,
"MaxParts": 1000,
"IsTruncated": False,
"Parts": [
{
"PartNumber": 1,
"Size": 8388609,
"ChecksumSHA256": "RFn5V9AxqLeC3+4J0scHCktebDMTCo8grDU5P9l/xXo=",
}
],
},
"ObjectSize": 8388609,
},
Checksum.sha256(base64.b64decode("RFn5V9AxqLeC3+4J0scHCktebDMTCo8grDU5P9l/xXo=")),
None,
)
],
)
def test_single_part(obj_attrs, plain, chunked):
Expand Down

0 comments on commit 27b1989

Please sign in to comment.