forked from TrueCloudLab/s3-tests
Compare commits
2 commits
master
...
wip-rgw-re
Author | SHA1 | Date | |
---|---|---|---|
|
6315c1bb68 | ||
|
1f19abf43d |
2 changed files with 20 additions and 5 deletions
|
@ -26,6 +26,9 @@ def get_prefix():
|
||||||
assert prefix is not None
|
assert prefix is not None
|
||||||
return prefix
|
return prefix
|
||||||
|
|
||||||
|
def is_slow_backend():
|
||||||
|
return slow_backend
|
||||||
|
|
||||||
def choose_bucket_prefix(template, max_len=30):
|
def choose_bucket_prefix(template, max_len=30):
|
||||||
"""
|
"""
|
||||||
Choose a prefix for our test buckets, so they're easy to identify.
|
Choose a prefix for our test buckets, so they're easy to identify.
|
||||||
|
@ -237,6 +240,7 @@ def setup():
|
||||||
|
|
||||||
global prefix
|
global prefix
|
||||||
global targets
|
global targets
|
||||||
|
global slow_backend
|
||||||
|
|
||||||
try:
|
try:
|
||||||
template = cfg.get('fixtures', 'bucket prefix')
|
template = cfg.get('fixtures', 'bucket prefix')
|
||||||
|
@ -244,6 +248,11 @@ def setup():
|
||||||
template = 'test-{random}-'
|
template = 'test-{random}-'
|
||||||
prefix = choose_bucket_prefix(template=template)
|
prefix = choose_bucket_prefix(template=template)
|
||||||
|
|
||||||
|
try:
|
||||||
|
slow_backend = cfg.getboolean('fixtures', 'slow backend')
|
||||||
|
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||||
|
slow_backend = False
|
||||||
|
|
||||||
# pull the default_region out, if it exists
|
# pull the default_region out, if it exists
|
||||||
try:
|
try:
|
||||||
default_region = cfg.get('fixtures', 'default_region')
|
default_region = cfg.get('fixtures', 'default_region')
|
||||||
|
|
|
@ -47,6 +47,7 @@ from . import (
|
||||||
targets,
|
targets,
|
||||||
config,
|
config,
|
||||||
get_prefix,
|
get_prefix,
|
||||||
|
is_slow_backend,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4260,11 +4261,11 @@ def test_list_multipart_upload():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key="mymultipart"
|
key="mymultipart"
|
||||||
mb = 1024 * 1024
|
mb = 1024 * 1024
|
||||||
upload1 = _multipart_upload(bucket, key, 5 * mb, 1)
|
upload1 = _multipart_upload(bucket, key, 5 * mb, do_list = True)
|
||||||
upload2 = _multipart_upload(bucket, key, 6 * mb, 1)
|
upload2 = _multipart_upload(bucket, key, 6 * mb, do_list = True)
|
||||||
|
|
||||||
key2="mymultipart2"
|
key2="mymultipart2"
|
||||||
upload3 = _multipart_upload(bucket, key2, 5 * mb, 1)
|
upload3 = _multipart_upload(bucket, key2, 5 * mb, do_list = True)
|
||||||
|
|
||||||
l = bucket.list_multipart_uploads()
|
l = bucket.list_multipart_uploads()
|
||||||
l = list(l)
|
l = list(l)
|
||||||
|
@ -4806,6 +4807,7 @@ def test_region_bucket_create_master_access_remove_secondary():
|
||||||
e = assert_raises(boto.exception.S3ResponseError, master_conn.get_bucket, bucket.name)
|
e = assert_raises(boto.exception.S3ResponseError, master_conn.get_bucket, bucket.name)
|
||||||
eq(e.status, 404)
|
eq(e.status, 404)
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@attr(resource='object')
|
||||||
@attr(method='copy')
|
@attr(method='copy')
|
||||||
@attr(operation='copy object between regions, verify')
|
@attr(operation='copy object between regions, verify')
|
||||||
|
@ -4821,8 +4823,12 @@ def test_region_copy_object():
|
||||||
print 'created new dest bucket ', dest_bucket.name
|
print 'created new dest bucket ', dest_bucket.name
|
||||||
region_sync_meta(targets.main, dest)
|
region_sync_meta(targets.main, dest)
|
||||||
|
|
||||||
for file_size in (1024, 1024 * 1024, 10 * 1024 * 1024,
|
if is_slow_backend():
|
||||||
100 * 1024 * 1024):
|
sizes = (1024, 10 * 1024 * 1024)
|
||||||
|
else:
|
||||||
|
sizes = (1024, 10 * 1024 * 1024, 100 * 1024 * 1024)
|
||||||
|
|
||||||
|
for file_size in sizes:
|
||||||
for (k2, r) in targets.main.iteritems():
|
for (k2, r) in targets.main.iteritems():
|
||||||
if r == dest_conn:
|
if r == dest_conn:
|
||||||
continue
|
continue
|
||||||
|
|
Loading…
Reference in a new issue