From c4c5a247eb997a3dc140d960464ad8cd4ba8003d Mon Sep 17 00:00:00 2001 From: Gal Salomon Date: Tue, 16 Apr 2024 10:10:32 +0300 Subject: [PATCH 01/10] a change is the RGW error-response require changes in s3-tests Signed-off-by: Gal Salomon --- s3tests_boto3/functional/test_s3select.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 1ce4fa3..d7aa409 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -1160,6 +1160,8 @@ def test_alias(): @pytest.mark.s3select def test_alias_cyclic_refernce(): + ## TEMP : RGW may return error-status that it is not handled by this test + return number_of_rows = 10000 # purpose of test is to validate the s3select-engine is able to detect a cyclic reference to alias. @@ -1314,6 +1316,8 @@ def test_csv_definition(): @pytest.mark.s3select def test_schema_definition(): + ## TEMP : RGW may return error-status that it is not handled by this test + return number_of_rows = 10000 # purpose of test is to validate functionality using csv header info From 77f1334571416e110d27f574c7f563d8c9873d9b Mon Sep 17 00:00:00 2001 From: Gal Salomon Date: Wed, 17 Apr 2024 18:20:37 +0300 Subject: [PATCH 02/10] add handling for EventStreamError exception Signed-off-by: Gal Salomon --- s3tests_boto3/functional/test_s3select.py | 53 +++++++++++++---------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index d7aa409..86466f3 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -4,6 +4,7 @@ import string import re import json from botocore.exceptions import ClientError +from botocore.exceptions import EventStreamError import uuid @@ -277,6 +278,7 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"', s3 = get_client() result = "" result_status = {} + try: r = s3.select_object_content( Bucket=bucket, @@ -292,26 +294,34 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"', return result if progress == False: - for event in r['Payload']: - if 'Records' in event: - records = event['Records']['Payload'].decode('utf-8') - result += records - else: - result = [] - max_progress_scanned = 0 - for event in r['Payload']: - if 'Records' in event: - records = event['Records'] - result.append(records.copy()) - if 'Progress' in event: - if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned): - max_progress_scanned = event['Progress']['Details']['BytesScanned'] - result_status['Progress'] = event['Progress'] - if 'Stats' in event: - result_status['Stats'] = event['Stats'] - if 'End' in event: - result_status['End'] = event['End'] + try: + for event in r['Payload']: + if 'Records' in event: + records = event['Records']['Payload'].decode('utf-8') + result += records + + except EventStreamError as c: + result = str(c) + return result + + else: + result = [] + max_progress_scanned = 0 + for event in r['Payload']: + if 'Records' in event: + records = event['Records'] + result.append(records.copy()) + if 'Progress' in event: + if(event['Progress']['Details']['BytesScanned'] > max_progress_scanned): + max_progress_scanned = event['Progress']['Details']['BytesScanned'] + result_status['Progress'] = event['Progress'] + + if 'Stats' in event: + result_status['Stats'] = event['Stats'] + if 'End' in event: + result_status['End'] = event['End'] + if progress == False: return result @@ -1160,8 +1170,6 @@ def test_alias(): @pytest.mark.s3select def test_alias_cyclic_refernce(): - ## TEMP : RGW may return error-status that it is not handled by this test - return number_of_rows = 10000 # purpose of test is to validate the s3select-engine is able to detect a cyclic reference to alias. @@ -1316,8 +1324,6 @@ def test_csv_definition(): @pytest.mark.s3select def test_schema_definition(): - ## TEMP : RGW may return error-status that it is not handled by this test - return number_of_rows = 10000 # purpose of test is to validate functionality using csv header info @@ -1333,7 +1339,6 @@ def test_schema_definition(): # using the scheme on first line, query is using the attach schema res_use = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select c1,c3 from s3object;",csv_header_info="USE") ).replace("\n","") - # result of both queries should be the same s3select_assert_result( res_ignore, res_use) From 27f24ee4d71d18940629e274417d3747e4d61883 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Mon, 24 Jun 2024 13:26:23 -0400 Subject: [PATCH 03/10] requirements: unpin pytz version Fixes: https://tracker.ceph.com/issues/66655 Signed-off-by: Casey Bodley --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 7742d8f..724e990 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ munch >=2.0.0 gevent >=1.0 isodate >=0.4.4 requests >=2.23.0 -pytz >=2011k +pytz httplib2 lxml pytest From bebdfd1ba7c33f5dd4f68e43a19f273362ba2a71 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Wed, 26 Jun 2024 10:50:28 -0400 Subject: [PATCH 04/10] test Get/HeadObject with partNumber for single-multipart upload test_multipart_get_part() tests 'normal' multipart uploads. add a new test case for a multipart upload with a single part to tests the fix for https://tracker.ceph.com/issues/66705 Signed-off-by: Casey Bodley --- s3tests_boto3/functional/test_s3.py | 42 +++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 47cc525..8590440 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -6458,6 +6458,48 @@ def test_multipart_get_part(): assert status == 400 assert error_code == 'InvalidPart' +@pytest.mark.fails_on_dbstore +def test_multipart_single_get_part(): + bucket_name = get_new_bucket() + client = get_client() + key = "mymultipart" + + part_size = 5*1024*1024 + part_sizes = [part_size] # just one part + part_count = len(part_sizes) + total_size = sum(part_sizes) + + (upload_id, data, parts) = _multipart_upload(bucket_name, key, total_size, part_size) + + # request part before complete + e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key, PartNumber=1) + status, error_code = _get_status_and_error_code(e.response) + assert status == 404 + assert error_code == 'NoSuchKey' + + client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) + assert len(parts) == part_count + + for part, size in zip(parts, part_sizes): + response = client.head_object(Bucket=bucket_name, Key=key, PartNumber=part['PartNumber']) + assert response['PartsCount'] == part_count + assert response['ETag'] == '"{}"'.format(part['ETag']) + + response = client.get_object(Bucket=bucket_name, Key=key, PartNumber=part['PartNumber']) + assert response['PartsCount'] == part_count + assert response['ETag'] == '"{}"'.format(part['ETag']) + assert response['ContentLength'] == size + # compare contents + for chunk in response['Body'].iter_chunks(): + assert chunk.decode() == data[0:len(chunk)] + data = data[len(chunk):] + + # request PartNumber out of range + e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key, PartNumber=5) + status, error_code = _get_status_and_error_code(e.response) + assert status == 400 + assert error_code == 'InvalidPart' + @pytest.mark.fails_on_dbstore def test_non_multipart_get_part(): bucket_name = get_new_bucket() From 73ed9121f41944f15620031ca4428261510b6bc0 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Wed, 1 May 2024 13:59:09 -0400 Subject: [PATCH 05/10] add "checksum" marker, since new checksum tests reference it this removes a Pytest warning during execution Signed-off-by: Matt Benjamin --- pytest.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/pytest.ini b/pytest.ini index 73d1563..1a7d9a8 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,6 +7,7 @@ markers = auth_common bucket_policy bucket_encryption + checksum cloud_transition encryption fails_on_aws From a3dbac711542e2af3674f6b6d555397746624aee Mon Sep 17 00:00:00 2001 From: Matt Benjamin Date: Wed, 1 May 2024 14:05:52 -0400 Subject: [PATCH 06/10] test_multipart_upload_sha256: work around failures re-trying complete-multipart As described in https://tracker.ceph.com/issues/65746, retrying complete-multipart after having attempted to complete the same upload with a bad checksum argument fails with an internal error. The status code is 500, but I'm unsure if it can be retried again, or whether the upload can be aborted later. Signed-off-by: Matt Benjamin --- s3tests_boto3/functional/test_s3.py | 88 +++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 47cc525..a8fa059 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -13434,3 +13434,91 @@ def test_get_object_torrent(): status, error_code = _get_status_and_error_code(e.response) assert status == 404 assert error_code == 'NoSuchKey' + +@pytest.mark.checksum +def test_object_checksum_sha256(): + bucket = get_new_bucket() + client = get_client() + + key = "myobj" + size = 1024 + body = FakeWriteFile(size, 'A') + sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0=' + response = client.put_object(Bucket=bucket, Key=key, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=sha256sum) + assert sha256sum == response['ChecksumSHA256'] + + response = client.head_object(Bucket=bucket, Key=key) + assert 'ChecksumSHA256' not in response + response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED') + assert sha256sum == response['ChecksumSHA256'] + + e = assert_raises(ClientError, client.put_object, Bucket=bucket, Key=key, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256='bad') + status, error_code = _get_status_and_error_code(e.response) + assert status == 400 + assert error_code == 'InvalidRequest' + +@pytest.mark.checksum +def test_multipart_checksum_sha256(): + bucket = get_new_bucket() + client = get_client() + + key = "mymultipart" + response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256') + assert 'SHA256' == response['ChecksumAlgorithm'] + upload_id = response['UploadId'] + + size = 1024 + body = FakeWriteFile(size, 'A') + part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0=' + response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum) + + # should reject the bad request checksum + e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256='bad', MultipartUpload={'Parts': [ + {'ETag': response['ETag'].strip('"'), 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 1}]}) + status, error_code = _get_status_and_error_code(e.response) + assert status == 400 + assert error_code == 'InvalidRequest' + + # XXXX re-trying the complete is failing in RGW due to an internal error that appears not caused + # checksums; + # 2024-04-25T17:47:47.991-0400 7f78e3a006c0 0 req 4931907640780566174 0.011000143s s3:complete_multipart check_previously_completed() ERROR: get_obj_attrs() returned ret=-2 + # 2024-04-25T17:47:47.991-0400 7f78e3a006c0 2 req 4931907640780566174 0.011000143s s3:complete_multipart completing + # 2024-04-25T17:47:47.991-0400 7f78e3a006c0 1 req 4931907640780566174 0.011000143s s3:complete_multipart ERROR: either op_ret is negative (execute failed) or target_obj is null, op_ret: -2200 + # -2200 turns into 500, InternalError + + key = "mymultipart2" + response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256') + assert 'SHA256' == response['ChecksumAlgorithm'] + upload_id = response['UploadId'] + + size = 1024 + body = FakeWriteFile(size, 'A') + part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0=' + response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum) + + # should reject the missing part checksum + e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256='bad', MultipartUpload={'Parts': [ + {'ETag': response['ETag'].strip('"'), 'PartNumber': 1}]}) + status, error_code = _get_status_and_error_code(e.response) + assert status == 400 + assert error_code == 'InvalidRequest' + + key = "mymultipart3" + response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256') + assert 'SHA256' == response['ChecksumAlgorithm'] + upload_id = response['UploadId'] + + size = 1024 + body = FakeWriteFile(size, 'A') + part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0=' + response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum) + + composite_sha256sum = 'Ok6Cs5b96ux6+MWQkJO7UBT5sKPBeXBLwvj/hK89smg=-1' + response = client.complete_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256=composite_sha256sum, MultipartUpload={'Parts': [ + {'ETag': response['ETag'].strip('"'), 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 1}]}) + assert composite_sha256sum == response['ChecksumSHA256'] + + response = client.head_object(Bucket=bucket, Key=key) + assert 'ChecksumSHA256' not in response + response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED') + assert composite_sha256sum == response['ChecksumSHA256'] From 9577cde013ed0b6a08bfc3d710db24e4add02fc3 Mon Sep 17 00:00:00 2001 From: Matt Benjamin Date: Wed, 1 May 2024 14:15:36 -0400 Subject: [PATCH 07/10] add test_multipart_checksum_3parts tests a full multipart upload cycle with 3 unique parts, which verifies composite checksum computation and the logic to propagate parts_count to ComleteMultipart Signed-off-by: Matt Benjamin --- s3tests_boto3/functional/test_s3.py | 38 +++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index a8fa059..1a085bd 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -13522,3 +13522,41 @@ def test_multipart_checksum_sha256(): assert 'ChecksumSHA256' not in response response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED') assert composite_sha256sum == response['ChecksumSHA256'] + +@pytest.mark.checksum +def test_multipart_checksum_3parts(): + bucket = get_new_bucket() + client = get_client() + + key = "mymultipart3" + response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256') + assert 'SHA256' == response['ChecksumAlgorithm'] + upload_id = response['UploadId'] + + size = 5 * 1024 * 1024 # each part but the last must be at least 5M + body = FakeWriteFile(size, 'A') + part1_sha256sum = '275VF5loJr1YYawit0XSHREhkFXYkkPKGuoK0x9VKxI=' + response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part1_sha256sum) + etag1 = response['ETag'].strip('"') + + body = FakeWriteFile(size, 'B') + part2_sha256sum = 'mrHwOfjTL5Zwfj74F05HOQGLdUb7E5szdCbxgUSq6NM=' + response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=2, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part2_sha256sum) + etag2 = response['ETag'].strip('"') + + body = FakeWriteFile(size, 'C') + part3_sha256sum = 'Vw7oB/nKQ5xWb3hNgbyfkvDiivl+U+/Dft48nfJfDow=' + response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=3, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part3_sha256sum) + etag3 = response['ETag'].strip('"') + + composite_sha256sum = 'uWBwpe1dxI4Vw8Gf0X9ynOdw/SS6VBzfWm9giiv1sf4=-3' + response = client.complete_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256=composite_sha256sum, MultipartUpload={'Parts': [ + {'ETag': etag1, 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 1}, + {'ETag': etag2, 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 2}, + {'ETag': etag3, 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 3}]}) + assert composite_sha256sum == response['ChecksumSHA256'] + + response = client.head_object(Bucket=bucket, Key=key) + assert 'ChecksumSHA256' not in response + response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED') + assert composite_sha256sum == response['ChecksumSHA256'] From 95df503ced29ec0457d572f548b990713f7ae9c1 Mon Sep 17 00:00:00 2001 From: Matt Benjamin Date: Fri, 3 May 2024 16:25:19 -0400 Subject: [PATCH 08/10] add test_post_object_upload_checksum this tests a two-megabyte binary upload with validated (awscli-computed) SHA256 checksum, and also verifies failure when a bad checksum is provided Signed-off-by: Matt Benjamin --- s3tests_boto3/functional/test_s3.py | 53 +++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 1a085bd..b8a6142 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -13560,3 +13560,56 @@ def test_multipart_checksum_3parts(): assert 'ChecksumSHA256' not in response response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED') assert composite_sha256sum == response['ChecksumSHA256'] + +def test_post_object_upload_checksum(): + megabytes = 1024 * 1024 + min_size = 0 + max_size = 5 * megabytes + test_payload_size = 2 * megabytes + + bucket_name = get_new_bucket() + client = get_client() + + url = _get_post_url(bucket_name) + utc = pytz.utc + expires = datetime.datetime.now(utc) + datetime.timedelta(seconds=+6000) + + policy_document = {"expiration": expires.strftime("%Y-%m-%dT%H:%M:%SZ"),\ + "conditions": [\ + {"bucket": bucket_name},\ + ["starts-with", "$key", "foo_cksum_test"],\ + {"acl": "private"},\ + ["starts-with", "$Content-Type", "text/plain"],\ + ["content-length-range", min_size, max_size],\ + ]\ + } + + test_payload = b'x' * test_payload_size + + json_policy_document = json.JSONEncoder().encode(policy_document) + bytes_json_policy_document = bytes(json_policy_document, 'utf-8') + policy = base64.b64encode(bytes_json_policy_document) + aws_secret_access_key = get_main_aws_secret_key() + aws_access_key_id = get_main_aws_access_key() + + signature = base64.b64encode(hmac.new(bytes(aws_secret_access_key, 'utf-8'), policy, hashlib.sha1).digest()) + + # good checksum payload (checked via upload from awscli) + payload = OrderedDict([ ("key" , "foo_cksum_test.txt"),("AWSAccessKeyId" , aws_access_key_id),\ + ("acl" , "private"),("signature" , signature),("policy" , policy),\ + ("Content-Type" , "text/plain"),\ + ('x-amz-checksum-sha256', 'aTL9MeXa9HObn6eP93eygxsJlcwdCwCTysgGAZAgE7w='),\ + ('file', (test_payload)),]) + + r = requests.post(url, files=payload, verify=get_config_ssl_verify()) + assert r.status_code == 204 + + # bad checksum payload + payload = OrderedDict([ ("key" , "foo_cksum_test.txt"),("AWSAccessKeyId" , aws_access_key_id),\ + ("acl" , "private"),("signature" , signature),("policy" , policy),\ + ("Content-Type" , "text/plain"),\ + ('x-amz-checksum-sha256', 'sailorjerry'),\ + ('file', (test_payload)),]) + + r = requests.post(url, files=payload, verify=get_config_ssl_verify()) + assert r.status_code == 400 From c0f0b679db7f20c44553d6ddf9470afa649d5e37 Mon Sep 17 00:00:00 2001 From: Matt Benjamin Date: Sat, 22 Jun 2024 17:42:21 -0400 Subject: [PATCH 09/10] remove duplicate size assigment [rkhudov review] Signed-off-by: Matt Benjamin --- s3tests_boto3/functional/test_s3.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index b8a6142..24301ba 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -13491,7 +13491,6 @@ def test_multipart_checksum_sha256(): assert 'SHA256' == response['ChecksumAlgorithm'] upload_id = response['UploadId'] - size = 1024 body = FakeWriteFile(size, 'A') part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0=' response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum) @@ -13508,7 +13507,6 @@ def test_multipart_checksum_sha256(): assert 'SHA256' == response['ChecksumAlgorithm'] upload_id = response['UploadId'] - size = 1024 body = FakeWriteFile(size, 'A') part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0=' response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum) From 8277a9fb9aa66ab8b24f603c4df545928a813f81 Mon Sep 17 00:00:00 2001 From: Matt Benjamin Date: Wed, 3 Jul 2024 09:42:37 -0400 Subject: [PATCH 10/10] mark two tests that fail on dbstore also add @pytest.mark.checksum for new checksum tests Signed-off-by: Matt Benjamin --- s3tests_boto3/functional/test_s3.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 24301ba..20cc7c3 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -13458,6 +13458,7 @@ def test_object_checksum_sha256(): assert error_code == 'InvalidRequest' @pytest.mark.checksum +@pytest.mark.fails_on_dbstore def test_multipart_checksum_sha256(): bucket = get_new_bucket() client = get_client() @@ -13522,6 +13523,7 @@ def test_multipart_checksum_sha256(): assert composite_sha256sum == response['ChecksumSHA256'] @pytest.mark.checksum +@pytest.mark.fails_on_dbstore def test_multipart_checksum_3parts(): bucket = get_new_bucket() client = get_client() @@ -13559,6 +13561,7 @@ def test_multipart_checksum_3parts(): response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED') assert composite_sha256sum == response['ChecksumSHA256'] +@pytest.mark.checksum def test_post_object_upload_checksum(): megabytes = 1024 * 1024 min_size = 0