Compare commits

...

4 commits

Author SHA1 Message Date
Matthew Wodrich
2b28737d60 generate_objects: use rewind=True in upload_objects
Use the rewind=True argument when uploading objects to make
realistic.py's use of upload_objects compatible with boto>=2.4.1, which
will try to seek to the end of the fp if rewind is not True.
2013-03-01 16:02:19 -08:00
Yehuda Sadeh
d9a8eaac4d test_s3: check stats correct after multipart upload
Signed-off-by: Yehuda Sadeh <yehuda@inktank.com>
2013-02-06 10:31:19 -08:00
Yehuda Sadeh
9ce2f75648 rgw: remove fails_on_dho tags
Signed-off-by: Yehuda Sadeh <yehuda@inktank.com>
2013-01-18 10:23:09 -08:00
Yehuda Sadeh
f8e101f6ac test_s3: add test_object_copy_canned_acl
test copy object with canned acl modification

Signed-off-by: Yehuda Sadeh <yehuda@inktank.com>
2013-01-17 13:40:23 -08:00
3 changed files with 31 additions and 20 deletions

View file

@ -278,7 +278,6 @@ def test_object_create_bad_expect_unreadable():
@attr(operation='create w/empty content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
@attr('fails_on_rgw')
def test_object_create_bad_contentlength_empty():
key = _setup_bad_object({'Content-Length': ''})
@ -294,7 +293,6 @@ def test_object_create_bad_contentlength_empty():
@attr(operation='create w/negative content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_object_create_bad_contentlength_negative():
key = _setup_bad_object({'Content-Length': -1})
@ -322,7 +320,6 @@ def test_object_create_bad_contentlength_none():
@attr(operation='create w/non-graphic content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_object_create_bad_contentlength_unreadable():
key = _setup_bad_object({'Content-Length': '\x07'})
@ -403,7 +400,6 @@ def test_object_create_bad_contenttype_none():
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_object_create_bad_contenttype_unreadable():
key = _setup_bad_object({'Content-Type': '\x08'})
@ -464,7 +460,6 @@ def test_object_create_bad_authorization_invalid():
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_object_create_bad_authorization_unreadable():
key = _setup_bad_object({'Authorization': '\x07'})
@ -720,7 +715,6 @@ def _create_new_connection():
@attr(operation='create w/empty content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
@attr('fails_on_rgw')
def test_bucket_create_bad_contentlength_empty():
conn = _create_new_connection()
@ -737,7 +731,6 @@ def test_bucket_create_bad_contentlength_empty():
@attr(operation='create w/negative content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_bucket_create_bad_contentlength_negative():
_add_custom_headers({'Content-Length': -1})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
@ -761,7 +754,6 @@ def test_bucket_create_bad_contentlength_none():
@attr(operation='create w/non-graphic content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_bucket_create_bad_contentlength_unreadable():
_add_custom_headers({'Content-Length': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
@ -822,7 +814,6 @@ def test_bucket_create_bad_authorization_invalid():
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_bucket_create_bad_authorization_unreadable():
_add_custom_headers({'Authorization': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)

View file

@ -496,7 +496,6 @@ def test_bucket_list_maxkeys_invalid():
@attr('fails_on_rgw')
@attr('fails_on_dho')
@attr(resource='bucket')
@attr(method='get')
@attr(operation='list all keys')
@ -1144,7 +1143,6 @@ def _head_bucket(bucket, authenticated=True):
@attr(method='head')
@attr(operation='head bucket')
@attr(assertion='succeeds')
@attr('fails_on_dho')
def test_bucket_head():
bucket = _setup_bucket_request('private')
@ -1155,7 +1153,6 @@ def test_bucket_head():
@attr(method='head')
@attr(operation='read bucket extended information')
@attr(assertion='extended information is getting updated')
@attr('fails_on_dho')
def test_bucket_head_extended():
bucket = _setup_bucket_request('private')
@ -1213,7 +1210,6 @@ def test_object_raw_authenticated():
@attr(method='get')
@attr(operation='authenticated on private bucket/private object with modified response headers')
@attr(assertion='succeeds')
@attr('fails_on_dho')
@attr('fails_on_rgw')
def test_object_raw_response_headers():
(bucket, key) = _setup_request('private', 'private')
@ -2322,7 +2318,6 @@ def test_bucket_acl_no_grants():
@attr(method='ACLs')
@attr(operation='add second FULL_CONTROL user')
@attr(assertion='works for S3, fails for DHO')
@attr('fails_on_dho')
def test_bucket_acl_grant_email():
bucket = get_new_bucket()
# add alt user
@ -2394,7 +2389,6 @@ def test_bucket_acl_revoke_all():
@attr(operation='set/enable/disable logging target')
@attr(assertion='operations succeed')
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_logging_toggle():
bucket = get_new_bucket()
log_bucket = s3.main.create_bucket(bucket.name + '-log')
@ -2720,7 +2714,6 @@ def test_bucket_recreate_not_overriding():
@attr(method='put')
@attr(operation='create and list objects with special names')
@attr(assertion='special names work')
@attr('fails_on_dho')
def test_bucket_create_special_key_names():
key_names = [' ', '%', '_', '_ ', '_ _', '__']
bucket = _create_keys(keys=key_names)
@ -2759,7 +2752,6 @@ def test_object_copy_same_bucket():
@attr(method='put')
@attr(operation='copy object to itself')
@attr(assertion='fails')
@attr('fails_on_dho')
def test_object_copy_to_itself():
bucket = get_new_bucket()
key = bucket.new_key('foo123bar')
@ -2773,7 +2765,6 @@ def test_object_copy_to_itself():
@attr(method='put')
@attr(operation='modify object metadata by copying')
@attr(assertion='fails')
@attr('fails_on_dho')
def test_object_copy_to_itself_with_metadata():
bucket = get_new_bucket()
key = bucket.new_key('foo123bar')
@ -2815,6 +2806,27 @@ def test_object_copy_not_owned_bucket():
except AttributeError:
pass
@attr(resource='object')
@attr(method='put')
@attr(operation='copy object and change acl')
@attr(assertion='works')
def test_object_copy_canned_acl():
bucket = get_new_bucket()
key = bucket.new_key('foo123bar')
key.set_contents_from_string('foo')
# use COPY directive
key2 = bucket.copy_key('bar321foo', bucket.name, 'foo123bar', headers={'x-amz-acl': 'public-read'})
res = _make_request('GET', bucket, key2)
eq(res.status, 200)
eq(res.reason, 'OK')
# use REPLACE directive
key3 = bucket.copy_key('bar321foo2', bucket.name, 'foo123bar', headers={'x-amz-acl': 'public-read'}, metadata={'abc': 'def'})
res = _make_request('GET', bucket, key3)
eq(res.status, 200)
eq(res.reason, 'OK')
def transfer_part(bucket, mp_id, mp_keyname, i, part):
"""Transfer a part of a multipart upload. Designed to be run in parallel.
"""
@ -2870,6 +2882,11 @@ def test_multipart_upload():
upload = _multipart_upload(bucket, key, 30)
upload.complete_upload()
(obj_count, bytes_used) = _head_bucket(bucket)
eq(obj_count, 1)
eq(bytes_used, 30 * 1024 * 1024)
@attr(resource='object')
@attr(method='put')
@attr(operation='abort multi-part upload')
@ -2880,6 +2897,10 @@ def test_abort_multipart_upload():
upload = _multipart_upload(bucket, key, 10)
upload.cancel_upload()
(obj_count, bytes_used) = _head_bucket(bucket)
eq(obj_count, 0)
eq(bytes_used, 0)
@attr(resource='object')
@attr(method='put')
@ -3217,7 +3238,6 @@ def test_atomic_dual_write_8mb():
@attr(operation='write file in deleted bucket')
@attr(assertion='fail 404')
@attr('fails_on_aws')
@attr('fails_on_dho')
def test_atomic_write_bucket_gone():
bucket = get_new_bucket()

View file

@ -46,7 +46,7 @@ def upload_objects(bucket, files, seed):
print >> sys.stderr, 'sending file with size %dB' % fp.size
key = Key(bucket)
key.key = name_generator.next()
key.set_contents_from_file(fp)
key.set_contents_from_file(fp, rewind=True)
key.set_acl('public-read')
keys.append(key)