mirror of
https://github.com/ceph/s3-tests.git
synced 2024-11-24 19:30:38 +00:00
boto3/test_s3: range encoding helper function takes a size argument
This is to avoid a get_object call for every range check as the object size will not change during this duration and we'd most likely already know the object sizes beforehand Signed-off-by: Abhishek Lekshmanan <abhishek@suse.com>
This commit is contained in:
parent
b05a394738
commit
741f2cbc9e
1 changed files with 3 additions and 5 deletions
|
@ -9302,9 +9302,7 @@ def _multipart_upload_enc(client, bucket_name, key, size, part_size, init_header
|
||||||
|
|
||||||
return (upload_id, s, parts)
|
return (upload_id, s, parts)
|
||||||
|
|
||||||
def _check_content_using_range_enc(client, bucket_name, key, data, step, enc_headers=None):
|
def _check_content_using_range_enc(client, bucket_name, key, data, size, step, enc_headers=None):
|
||||||
response = client.get_object(Bucket=bucket_name, Key=key)
|
|
||||||
size = response['ContentLength']
|
|
||||||
for ofs in range(0, size, step):
|
for ofs in range(0, size, step):
|
||||||
toread = size - ofs
|
toread = size - ofs
|
||||||
if toread > step:
|
if toread > step:
|
||||||
|
@ -9362,8 +9360,8 @@ def test_encryption_sse_c_multipart_upload():
|
||||||
size = response['ContentLength']
|
size = response['ContentLength']
|
||||||
assert len(body) == size
|
assert len(body) == size
|
||||||
|
|
||||||
_check_content_using_range_enc(client, bucket_name, key, data, 1000000, enc_headers=enc_headers)
|
_check_content_using_range_enc(client, bucket_name, key, data, size, 1000000, enc_headers=enc_headers)
|
||||||
_check_content_using_range_enc(client, bucket_name, key, data, 10000000, enc_headers=enc_headers)
|
_check_content_using_range_enc(client, bucket_name, key, data, size, 10000000, enc_headers=enc_headers)
|
||||||
|
|
||||||
@pytest.mark.encryption
|
@pytest.mark.encryption
|
||||||
def test_encryption_sse_c_unaligned_multipart_upload():
|
def test_encryption_sse_c_unaligned_multipart_upload():
|
||||||
|
|
Loading…
Reference in a new issue