radosgw-admin: test resend of a multipart upload part

Signed-off-by: Yehuda Sadeh <yehuda@redhat.com>

Conflicts:
	s3tests/functional/test_s3.py
This commit is contained in:
Yehuda Sadeh 2015-05-12 17:06:11 -07:00
parent f041a73153
commit 2b3ba675d6

View file

@ -4352,25 +4352,56 @@ def generate_random(size, part_size=5*1024*1024):
this_part_size = min(left, part_size) this_part_size = min(left, part_size)
for y in range(this_part_size / chunk): for y in range(this_part_size / chunk):
s = s + strpart s = s + strpart
remaining = this_part_size - len(s)
if remaining > 0:
s += strpart[0:remaining]
yield s yield s
if (x == size): if (x == size):
return return
def _multipart_upload(bucket, s3_key_name, size, part_size=5*1024*1024, do_list=None, headers=None, metadata=None):
def _multipart_upload(bucket, s3_key_name, size, part_size=5*1024*1024, do_list=None, headers=None, metadata=None, resend_part=-1):
""" """
generate a multi-part upload for a random file of specifed size, generate a multi-part upload for a random file of specifed size,
if requested, generate a list of the parts if requested, generate a list of the parts
return the upload descriptor return the upload descriptor
""" """
upload = bucket.initiate_multipart_upload(s3_key_name, headers=headers, metadata=metadata) upload = bucket.initiate_multipart_upload(s3_key_name, headers=headers, metadata=metadata)
s = ''
for i, part in enumerate(generate_random(size, part_size)): for i, part in enumerate(generate_random(size, part_size)):
s += part
transfer_part(bucket, upload.id, upload.key_name, i, part) transfer_part(bucket, upload.id, upload.key_name, i, part)
if resend_part == i:
transfer_part(bucket, upload.id, upload.key_name, i, part)
if do_list is not None: if do_list is not None:
l = bucket.list_multipart_uploads() l = bucket.list_multipart_uploads()
l = list(l) l = list(l)
return upload return (upload, s)
@attr(resource='object')
@attr(method='put')
@attr(operation='check multipart upload without parts')
def test_multipart_upload_empty():
bucket = get_new_bucket()
key = "mymultipart"
(upload, data) = _multipart_upload(bucket, key, 0)
e = assert_raises(boto.exception.S3ResponseError, upload.complete_upload)
eq(e.status, 400)
eq(e.error_code, u'MalformedXML')
@attr(resource='object')
@attr(method='put')
@attr(operation='check multipart uploads with single small part')
def test_multipart_upload_small():
bucket = get_new_bucket()
key = "mymultipart"
size = 1
(upload, data) = _multipart_upload(bucket, key, size)
upload.complete_upload()
key2 = bucket.get_key(key)
eq(key2.size, size)
@attr(resource='object') @attr(resource='object')
@attr(method='put') @attr(method='put')
@ -4380,7 +4411,7 @@ def test_multipart_upload():
bucket = get_new_bucket() bucket = get_new_bucket()
key="mymultipart" key="mymultipart"
content_type='text/bla' content_type='text/bla'
upload = _multipart_upload(bucket, key, 30 * 1024 * 1024, headers={'Content-Type': content_type}, metadata={'foo': 'bar'}) (upload, data) = _multipart_upload(bucket, key, 30 * 1024 * 1024, headers={'Content-Type': content_type}, metadata={'foo': 'bar'})
upload.complete_upload() upload.complete_upload()
(obj_count, bytes_used) = _head_bucket(bucket) (obj_count, bytes_used) = _head_bucket(bucket)
@ -4395,26 +4426,51 @@ def test_multipart_upload():
@attr(resource='object') @attr(resource='object')
@attr(method='put') @attr(method='put')
@attr(operation='complete multiple multi-part upload with different sizes') @attr(operation='complete multiple multi-part upload with different sizes')
@attr(resource='object')
@attr(method='put')
@attr(operation='complete multi-part upload')
@attr(assertion='successful')
def test_multipart_upload_resend_part():
bucket = get_new_bucket()
key="mymultipart"
content_type='text/bla'
objlen = 30 * 1024 * 1024
(upload, data) = _multipart_upload(bucket, key, objlen, headers={'Content-Type': content_type}, metadata={'foo': 'bar'}, resend_part=1)
upload.complete_upload()
(obj_count, bytes_used) = _head_bucket(bucket)
# eq(obj_count, 1)
# eq(bytes_used, 30 * 1024 * 1024)
k=bucket.get_key(key)
eq(k.metadata['foo'], 'bar')
eq(k.content_type, content_type)
test_string=k.get_contents_as_string()
eq(k.size, len(test_string))
eq(k.size, objlen)
eq(test_string, data)
@attr(assertion='successful') @attr(assertion='successful')
def test_multipart_upload_multiple_sizes(): def test_multipart_upload_multiple_sizes():
bucket = get_new_bucket() bucket = get_new_bucket()
key="mymultipart" key="mymultipart"
upload = _multipart_upload(bucket, key, 5 * 1024 * 1024) (upload, data) = _multipart_upload(bucket, key, 5 * 1024 * 1024)
upload.complete_upload() upload.complete_upload()
upload = _multipart_upload(bucket, key, 5 * 1024 * 1024 + 100 * 1024) (upload, data) = _multipart_upload(bucket, key, 5 * 1024 * 1024 + 100 * 1024)
upload.complete_upload() upload.complete_upload()
upload = _multipart_upload(bucket, key, 5 * 1024 * 1024 + 600 * 1024) (upload, data) = _multipart_upload(bucket, key, 5 * 1024 * 1024 + 600 * 1024)
upload.complete_upload() upload.complete_upload()
upload = _multipart_upload(bucket, key, 10 * 1024 * 1024 + 100 * 1024) (upload, data) = _multipart_upload(bucket, key, 10 * 1024 * 1024 + 100 * 1024)
upload.complete_upload() upload.complete_upload()
upload = _multipart_upload(bucket, key, 10 * 1024 * 1024 + 600 * 1024) (upload, data) = _multipart_upload(bucket, key, 10 * 1024 * 1024 + 600 * 1024)
upload.complete_upload() upload.complete_upload()
upload = _multipart_upload(bucket, key, 10 * 1024 * 1024) (upload, data) = _multipart_upload(bucket, key, 10 * 1024 * 1024)
upload.complete_upload() upload.complete_upload()
@attr(resource='object') @attr(resource='object')
@ -4424,7 +4480,7 @@ def test_multipart_upload_multiple_sizes():
def test_multipart_upload_size_too_small(): def test_multipart_upload_size_too_small():
bucket = get_new_bucket() bucket = get_new_bucket()
key="mymultipart" key="mymultipart"
upload = _multipart_upload(bucket, key, 100 * 1024, part_size=10*1024) (upload, data) = _multipart_upload(bucket, key, 100 * 1024, part_size=10*1024)
e = assert_raises(boto.exception.S3ResponseError, upload.complete_upload) e = assert_raises(boto.exception.S3ResponseError, upload.complete_upload)
eq(e.status, 400) eq(e.status, 400)
eq(e.error_code, u'EntityTooSmall') eq(e.error_code, u'EntityTooSmall')
@ -4489,7 +4545,7 @@ def test_multipart_upload_overwrite_existing_object():
def test_abort_multipart_upload(): def test_abort_multipart_upload():
bucket = get_new_bucket() bucket = get_new_bucket()
key="mymultipart" key="mymultipart"
upload = _multipart_upload(bucket, key, 10 * 1024 * 1024) (upload, data) = _multipart_upload(bucket, key, 10 * 1024 * 1024)
upload.cancel_upload() upload.cancel_upload()
(obj_count, bytes_used) = _head_bucket(bucket) (obj_count, bytes_used) = _head_bucket(bucket)
@ -4505,11 +4561,11 @@ def test_list_multipart_upload():
bucket = get_new_bucket() bucket = get_new_bucket()
key="mymultipart" key="mymultipart"
mb = 1024 * 1024 mb = 1024 * 1024
upload1 = _multipart_upload(bucket, key, 5 * mb, do_list = True) (upload1, data) = _multipart_upload(bucket, key, 5 * mb, do_list = True)
upload2 = _multipart_upload(bucket, key, 6 * mb, do_list = True) (upload2, data) = _multipart_upload(bucket, key, 6 * mb, do_list = True)
key2="mymultipart2" key2="mymultipart2"
upload3 = _multipart_upload(bucket, key2, 5 * mb, do_list = True) (upload3, data) = _multipart_upload(bucket, key2, 5 * mb, do_list = True)
l = bucket.list_multipart_uploads() l = bucket.list_multipart_uploads()
l = list(l) l = list(l)