Compare commits

...

4 commits

Author SHA1 Message Date
Yehuda Sadeh
7504073866 Merge pull request #39 from fzylogic/congress
disable 2 tests that currently fail in DHO
2015-01-20 18:05:58 -08:00
Jeremy Hanmer
6b4c00559d disable 2 tests that currently fail in DHO
Disable these 2 tests that are currently failing in DHO:
s3tests.functional.test_headers.test_object_create_bad_contentlength_mismatch_above
s3tests.functional.test_s3.test_cors_origin_response
2015-01-20 17:29:27 -08:00
Yehuda Sadeh
2ba0de0021 Revert "rgw: remove fails_on_dho tags"
This reverts commit 9ce2f75648.
2015-01-20 15:41:58 -08:00
Yehuda Sadeh
53e6f52d86 test_s3: add is_slow_backend
for slow ec region copy

Signed-off-by: Yehuda Sadeh <yehuda@inktank.com>
(cherry picked from commit 97beb9c816)
2014-10-27 09:10:22 -07:00
3 changed files with 39 additions and 2 deletions

View file

@ -26,6 +26,9 @@ def get_prefix():
assert prefix is not None
return prefix
def is_slow_backend():
return slow_backend
def choose_bucket_prefix(template, max_len=30):
"""
Choose a prefix for our test buckets, so they're easy to identify.
@ -237,6 +240,7 @@ def setup():
global prefix
global targets
global slow_backend
try:
template = cfg.get('fixtures', 'bucket prefix')
@ -244,6 +248,11 @@ def setup():
template = 'test-{random}-'
prefix = choose_bucket_prefix(template=template)
try:
slow_backend = cfg.getboolean('fixtures', 'slow backend')
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
slow_backend = False
# pull the default_region out, if it exists
try:
default_region = cfg.get('fixtures', 'default_region')

View file

@ -278,6 +278,7 @@ def test_object_create_bad_expect_unreadable():
@attr(operation='create w/empty content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
@attr('fails_on_rgw')
def test_object_create_bad_contentlength_empty():
key = _setup_bad_object({'Content-Length': ''})
@ -293,6 +294,7 @@ def test_object_create_bad_contentlength_empty():
@attr(operation='create w/negative content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_object_create_bad_contentlength_negative():
key = _setup_bad_object({'Content-Length': -1})
@ -319,6 +321,7 @@ def test_object_create_bad_contentlength_none():
@attr(operation='create w/non-graphic content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_object_create_bad_contentlength_unreadable():
key = _setup_bad_object({'Content-Length': '\x07'})
@ -334,6 +337,7 @@ def test_object_create_bad_contentlength_unreadable():
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_object_create_bad_contentlength_mismatch_above():
content = 'bar'
length = len(content) + 1
@ -399,6 +403,7 @@ def test_object_create_bad_contenttype_none():
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_object_create_bad_contenttype_unreadable():
key = _setup_bad_object({'Content-Type': '\x08'})
@ -459,6 +464,7 @@ def test_object_create_bad_authorization_invalid():
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_object_create_bad_authorization_unreadable():
key = _setup_bad_object({'Authorization': '\x07'})
@ -714,6 +720,7 @@ def _create_new_connection():
@attr(operation='create w/empty content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
@attr('fails_on_rgw')
def test_bucket_create_bad_contentlength_empty():
conn = _create_new_connection()
@ -730,6 +737,7 @@ def test_bucket_create_bad_contentlength_empty():
@attr(operation='create w/negative content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_bucket_create_bad_contentlength_negative():
_add_custom_headers({'Content-Length': -1})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
@ -752,6 +760,7 @@ def test_bucket_create_bad_contentlength_none():
@attr(operation='create w/non-graphic content length')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_dho')
def test_bucket_create_bad_contentlength_unreadable():
_add_custom_headers({'Content-Length': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)
@ -812,6 +821,7 @@ def test_bucket_create_bad_authorization_invalid():
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_bucket_create_bad_authorization_unreadable():
_add_custom_headers({'Authorization': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket)

View file

@ -47,6 +47,7 @@ from . import (
targets,
config,
get_prefix,
is_slow_backend,
)
@ -559,6 +560,7 @@ def test_bucket_list_maxkeys_invalid():
@attr('fails_on_rgw')
@attr('fails_on_dho')
@attr(resource='bucket')
@attr(method='get')
@attr(operation='list all keys')
@ -2182,6 +2184,7 @@ def _head_bucket(bucket, authenticated=True):
@attr(method='head')
@attr(operation='head bucket')
@attr(assertion='succeeds')
@attr('fails_on_dho')
def test_bucket_head():
bucket = _setup_bucket_request('private')
@ -2192,6 +2195,7 @@ def test_bucket_head():
@attr(method='head')
@attr(operation='read bucket extended information')
@attr(assertion='extended information is getting updated')
@attr('fails_on_dho')
def test_bucket_head_extended():
bucket = _setup_bucket_request('private')
@ -2249,6 +2253,7 @@ def test_object_raw_authenticated():
@attr(method='get')
@attr(operation='authenticated on private bucket/private object with modified response headers')
@attr(assertion='succeeds')
@attr('fails_on_dho')
@attr('fails_on_rgw')
def test_object_raw_response_headers():
(bucket, key) = _setup_request('private', 'private')
@ -3581,6 +3586,7 @@ def test_bucket_header_acl_grants():
@attr(method='ACLs')
@attr(operation='add second FULL_CONTROL user')
@attr(assertion='works for S3, fails for DHO')
@attr('fails_on_dho')
def test_bucket_acl_grant_email():
bucket = get_new_bucket()
# add alt user
@ -3652,6 +3658,7 @@ def test_bucket_acl_revoke_all():
@attr(operation='set/enable/disable logging target')
@attr(assertion='operations succeed')
@attr('fails_on_rgw')
@attr('fails_on_dho')
def test_logging_toggle():
bucket = get_new_bucket()
log_bucket = get_new_bucket(targets.main.default, bucket.name + '-log')
@ -3977,6 +3984,7 @@ def test_bucket_recreate_not_overriding():
@attr(method='put')
@attr(operation='create and list objects with special names')
@attr(assertion='special names work')
@attr('fails_on_dho')
def test_bucket_create_special_key_names():
key_names = [' ', '%', '_', '_ ', '_ _', '__']
bucket = _create_keys(keys=key_names)
@ -4029,6 +4037,7 @@ def test_object_copy_same_bucket():
@attr(method='put')
@attr(operation='copy object to itself')
@attr(assertion='fails')
@attr('fails_on_dho')
def test_object_copy_to_itself():
bucket = get_new_bucket()
key = bucket.new_key('foo123bar')
@ -4042,6 +4051,7 @@ def test_object_copy_to_itself():
@attr(method='put')
@attr(operation='modify object metadata by copying')
@attr(assertion='fails')
@attr('fails_on_dho')
def test_object_copy_to_itself_with_metadata():
bucket = get_new_bucket()
key = bucket.new_key('foo123bar')
@ -4087,6 +4097,7 @@ def test_object_copy_not_owned_bucket():
@attr(method='put')
@attr(operation='copy object and change acl')
@attr(assertion='works')
@attr('fails_on_dho')
def test_object_copy_canned_acl():
bucket = get_new_bucket()
key = bucket.new_key('foo123bar')
@ -4401,6 +4412,7 @@ def _cors_request_and_check(func, url, headers, expect_status, expect_allow_orig
@attr(method='get')
@attr(operation='check cors response when origin header set')
@attr(assertion='returning cors header')
@attr('fails_on_dho')
def test_cors_origin_response():
cfg = CORSConfiguration()
bucket = get_new_bucket()
@ -4711,6 +4723,7 @@ def test_atomic_dual_write_8mb():
@attr(operation='write file in deleted bucket')
@attr(assertion='fail 404')
@attr('fails_on_aws')
@attr('fails_on_dho')
def test_atomic_write_bucket_gone():
bucket = get_new_bucket()
@ -4806,6 +4819,7 @@ def test_region_bucket_create_master_access_remove_secondary():
e = assert_raises(boto.exception.S3ResponseError, master_conn.get_bucket, bucket.name)
eq(e.status, 404)
@attr(resource='object')
@attr(method='copy')
@attr(operation='copy object between regions, verify')
@ -4821,8 +4835,12 @@ def test_region_copy_object():
print 'created new dest bucket ', dest_bucket.name
region_sync_meta(targets.main, dest)
for file_size in (1024, 1024 * 1024, 10 * 1024 * 1024,
100 * 1024 * 1024):
if is_slow_backend():
sizes = (1024, 10 * 1024 * 1024)
else:
sizes = (1024, 10 * 1024 * 1024, 100 * 1024 * 1024)
for file_size in sizes:
for (k2, r) in targets.main.iteritems():
if r == dest_conn:
continue