diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index 6b35369..eb925bc 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -2399,17 +2399,13 @@ def _head_bucket(bucket, authenticated=True): eq(res.status, 200) eq(res.reason, 'OK') - result = {} - obj_count = res.getheader('x-rgw-object-count') - if obj_count != None: - result['x-rgw-object-count'] = int(obj_count) + assert obj_count is not None, "x-rgw-object-count wasn't returned" bytes_used = res.getheader('x-rgw-bytes-used') - if bytes_used is not None: - result['x-rgw-bytes-used'] = int(bytes_used) + assert bytes_used is not None, "x-rgw-bytes-used wasn't returned" - return result + return (int(obj_count), int(bytes_used)) @attr(resource='bucket') @@ -2417,33 +2413,30 @@ def _head_bucket(bucket, authenticated=True): @attr(operation='head bucket') @attr(assertion='succeeds') def test_bucket_head(): - bucket = get_new_bucket() + bucket = _setup_bucket_request('private') _head_bucket(bucket) -# This test relies on Ceph extensions. -# http://tracker.ceph.com/issues/2313 -@attr('fails_on_aws') @attr(resource='bucket') @attr(method='head') @attr(operation='read bucket extended information') @attr(assertion='extended information is getting updated') def test_bucket_head_extended(): - bucket = get_new_bucket() + bucket = _setup_bucket_request('private') - result = _head_bucket(bucket) + (obj_count, bytes_used) = _head_bucket(bucket) - eq(result.get('x-rgw-object-count', 0), 0) - eq(result.get('x-rgw-bytes-used', 0), 0) + eq(obj_count, 0) + eq(bytes_used, 0) _create_keys(bucket, keys=['foo', 'bar', 'baz']) - result = _head_bucket(bucket) + (obj_count, bytes_used) = _head_bucket(bucket) - eq(result.get('x-rgw-object-count', 3), 3) + eq(obj_count, 3) - assert result.get('x-rgw-bytes-used', 9) > 0 + assert bytes_used > 0 @attr(resource='bucket.acl') @@ -4493,10 +4486,10 @@ def test_multipart_upload(): upload = _multipart_upload(bucket, key, 30 * 1024 * 1024, headers={'Content-Type': content_type}, metadata={'foo': 'bar'}) upload.complete_upload() - result = _head_bucket(bucket) + (obj_count, bytes_used) = _head_bucket(bucket) - eq(result.get('x-rgw-object-count', 1), 1) - eq(result.get('x-rgw-bytes-used', 30 * 1024 * 1024), 30 * 1024 * 1024) + eq(obj_count, 1) + eq(bytes_used, 30 * 1024 * 1024) k=bucket.get_key(key) eq(k.metadata['foo'], 'bar') @@ -4602,10 +4595,10 @@ def test_abort_multipart_upload(): upload = _multipart_upload(bucket, key, 10 * 1024 * 1024) upload.cancel_upload() - result = _head_bucket(bucket) + (obj_count, bytes_used) = _head_bucket(bucket) - eq(result.get('x-rgw-object-count', 0), 0) - eq(result.get('x-rgw-bytes-used', 0), 0) + eq(obj_count, 0) + eq(bytes_used, 0) def test_abort_multipart_upload_not_found(): bucket = get_new_bucket()