From bb7e4623b2e8a446e9889e7e6d00738eda287a80 Mon Sep 17 00:00:00 2001 From: weisd Date: Fri, 24 Oct 2025 16:42:21 +0800 Subject: [PATCH 1/4] feat(e2e):add multipart upload checksum --- crates/s3s-e2e/src/advanced.rs | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/crates/s3s-e2e/src/advanced.rs b/crates/s3s-e2e/src/advanced.rs index 18c85e7e..64a472f6 100644 --- a/crates/s3s-e2e/src/advanced.rs +++ b/crates/s3s-e2e/src/advanced.rs @@ -1,5 +1,6 @@ use crate::case; +use aws_sdk_s3::types::ChecksumAlgorithm; use s3s_test::Result; use s3s_test::TestFixture; use s3s_test::TestSuite; @@ -114,7 +115,13 @@ impl Multipart { let key = self.key.as_str(); // Create multipart upload - let create_resp = s3.create_multipart_upload().bucket(bucket).key(key).send().await?; + let create_resp = s3 + .create_multipart_upload() + .bucket(bucket) + .key(key) + .checksum_algorithm(ChecksumAlgorithm::Crc32) + .send() + .await?; let upload_id = create_resp.upload_id().unwrap(); @@ -132,6 +139,8 @@ impl Multipart { .send() .await?; + let part1_checksum_crc32 = part1_resp.checksum_crc32().expect("checksum_crc32 should be present"); + let part2_resp = s3 .upload_part() .bucket(bucket) @@ -142,15 +151,19 @@ impl Multipart { .send() .await?; + let part2_checksum_crc32 = part2_resp.checksum_crc32().expect("checksum_crc32 should be present"); + // Complete multipart upload let completed_parts = vec![ aws_sdk_s3::types::CompletedPart::builder() .part_number(1) .e_tag(part1_resp.e_tag().unwrap()) + .checksum_crc32(part1_checksum_crc32) .build(), aws_sdk_s3::types::CompletedPart::builder() .part_number(2) .e_tag(part2_resp.e_tag().unwrap()) + .checksum_crc32(part2_checksum_crc32) .build(), ]; From 4a020eeeaa0b5e8f5b3de0f336b36317d168efd4 Mon Sep 17 00:00:00 2001 From: weisd Date: Fri, 24 Oct 2025 22:25:57 +0800 Subject: [PATCH 2/4] Update crates/s3s-e2e/src/advanced.rs Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- crates/s3s-e2e/src/advanced.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/s3s-e2e/src/advanced.rs b/crates/s3s-e2e/src/advanced.rs index 64a472f6..884bc089 100644 --- a/crates/s3s-e2e/src/advanced.rs +++ b/crates/s3s-e2e/src/advanced.rs @@ -151,7 +151,7 @@ impl Multipart { .send() .await?; - let part2_checksum_crc32 = part2_resp.checksum_crc32().expect("checksum_crc32 should be present"); + let part2_checksum_crc32 = part2_resp.checksum_crc32().expect("part 2 checksum_crc32 should be present in upload response"); // Complete multipart upload let completed_parts = vec![ From 6b7f6310940696eeb9e01be2b3cb72e8a4d79709 Mon Sep 17 00:00:00 2001 From: weisd Date: Fri, 24 Oct 2025 22:33:56 +0800 Subject: [PATCH 3/4] cargo fmt --- crates/s3s-e2e/src/advanced.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/s3s-e2e/src/advanced.rs b/crates/s3s-e2e/src/advanced.rs index 884bc089..ca169af0 100644 --- a/crates/s3s-e2e/src/advanced.rs +++ b/crates/s3s-e2e/src/advanced.rs @@ -151,7 +151,9 @@ impl Multipart { .send() .await?; - let part2_checksum_crc32 = part2_resp.checksum_crc32().expect("part 2 checksum_crc32 should be present in upload response"); + let part2_checksum_crc32 = part2_resp + .checksum_crc32() + .expect("part 2 checksum_crc32 should be present in upload response"); // Complete multipart upload let completed_parts = vec![ From 29f0208c6fc577892c52479f5903557b9b06ab4f Mon Sep 17 00:00:00 2001 From: weisd Date: Tue, 28 Oct 2025 16:14:38 +0800 Subject: [PATCH 4/4] feat(e2e): add test_list_objects_v2_pagination --- crates/s3s-e2e/src/basic.rs | 75 +++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/crates/s3s-e2e/src/basic.rs b/crates/s3s-e2e/src/basic.rs index 0eb24fe8..368334d7 100644 --- a/crates/s3s-e2e/src/basic.rs +++ b/crates/s3s-e2e/src/basic.rs @@ -19,6 +19,7 @@ use md5::Digest as _; pub fn register(tcx: &mut TestContext) { case!(tcx, Basic, Essential, test_list_buckets); case!(tcx, Basic, Essential, test_list_objects); + case!(tcx, Basic, Essential, test_list_objects_v2_pagination); case!(tcx, Basic, Essential, test_get_object); case!(tcx, Basic, Essential, test_delete_object); case!(tcx, Basic, Essential, test_head_operations); @@ -256,6 +257,80 @@ impl Essential { Ok(()) } + + async fn test_list_objects_v2_pagination(self: Arc) -> Result { + let s3 = &self.s3; + + let bucket = "test-max-keys"; + let content = "test"; + let keys_count = 54; + let page_size: usize = 10; + let keys: Vec<_> = (0..keys_count).map(|i| format!("file-{i:04}.txt")).collect(); + + { + // Clean up any existing files and bucket + for key in &keys { + delete_object_loose(s3, bucket, key).await?; + } + delete_bucket_loose(s3, bucket).await?; + } + + { + create_bucket(s3, bucket).await?; + + // Create 10 files + for key in &keys { + s3.put_object() + .bucket(bucket) + .key(key) + .body(ByteStream::from_static(content.as_bytes())) + .send() + .await?; + } + + let mut continuation_token = None; + let mut page: usize = 1; + + loop { + let response = s3 + .list_objects_v2() + .bucket(bucket) + .max_keys(i32::try_from(page_size).unwrap()) + .set_continuation_token(continuation_token) + .send() + .await?; + + let contents: Vec<_> = response.contents().iter().filter_map(|obj| obj.key()).collect(); + + let mut n: usize = (page - 1) * page_size + page_size - 1; + + if n >= keys_count { + n = keys_count - 1; + } + + let last_key = &keys[n]; + + assert_eq!(last_key, contents.last().unwrap()); + + if response.is_truncated().unwrap_or(false) { + continuation_token = response.next_continuation_token().map(ToString::to_string); + page += 1; + } else { + break; + } + } + } + + { + // Clean up all test files + for key in &keys { + delete_object_strict(s3, bucket, key).await?; + } + delete_bucket_strict(s3, bucket).await?; + } + + Ok(()) + } } struct Put {