mirror of
https://github.com/ceph/s3-tests.git
synced 2024-11-21 23:29:47 +00:00
s3tests: modify cross region copy, sync triggering
Signed-off-by: Yehuda Sadeh <yehuda@inktank.com>
This commit is contained in:
parent
b4441e3057
commit
3b733245c5
2 changed files with 46 additions and 25 deletions
|
@ -179,6 +179,9 @@ class RegionsConn:
|
||||||
self.master = None
|
self.master = None
|
||||||
self.secondaries = []
|
self.secondaries = []
|
||||||
|
|
||||||
|
def iteritems(self):
|
||||||
|
return self.m.iteritems()
|
||||||
|
|
||||||
def add(self, name, conn):
|
def add(self, name, conn):
|
||||||
self.m[name] = conn
|
self.m[name] = conn
|
||||||
if not self.default:
|
if not self.default:
|
||||||
|
|
|
@ -4560,12 +4560,18 @@ def test_region_bucket_create_secondary_access_remove_master():
|
||||||
|
|
||||||
conn.delete_bucket(bucket)
|
conn.delete_bucket(bucket)
|
||||||
|
|
||||||
def region_sync_meta(conf):
|
# syncs all the regions except for the one passed in
|
||||||
if conf.sync_agent_addr:
|
def region_sync_meta(targets, region):
|
||||||
ret = requests.post('http://{addr}:{port}/metadata/partial'.format(addr = conf.sync_agent_addr, port = conf.sync_agent_port))
|
|
||||||
eq(ret.status_code, 200)
|
for (k, r) in targets.iteritems():
|
||||||
if conf.sync_meta_wait:
|
if r == region:
|
||||||
time.sleep(conf.sync_meta_wait)
|
continue
|
||||||
|
conf = r.conf
|
||||||
|
if conf.sync_agent_addr:
|
||||||
|
ret = requests.post('http://{addr}:{port}/metadata/partial'.format(addr = conf.sync_agent_addr, port = conf.sync_agent_port))
|
||||||
|
eq(ret.status_code, 200)
|
||||||
|
if conf.sync_meta_wait:
|
||||||
|
time.sleep(conf.sync_meta_wait)
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@attr(resource='bucket')
|
||||||
@attr(method='get')
|
@attr(method='get')
|
||||||
|
@ -4582,7 +4588,7 @@ def test_region_bucket_create_master_access_remove_secondary():
|
||||||
conn = r.connection
|
conn = r.connection
|
||||||
bucket = get_new_bucket(master)
|
bucket = get_new_bucket(master)
|
||||||
|
|
||||||
region_sync_meta(r.conf)
|
region_sync_meta(targets.main, master)
|
||||||
|
|
||||||
e = assert_raises(boto.exception.S3ResponseError, conn.get_bucket, bucket.name)
|
e = assert_raises(boto.exception.S3ResponseError, conn.get_bucket, bucket.name)
|
||||||
eq(e.status, 301)
|
eq(e.status, 301)
|
||||||
|
@ -4590,7 +4596,6 @@ def test_region_bucket_create_master_access_remove_secondary():
|
||||||
e = assert_raises(boto.exception.S3ResponseError, conn.delete_bucket, bucket.name)
|
e = assert_raises(boto.exception.S3ResponseError, conn.delete_bucket, bucket.name)
|
||||||
eq(e.status, 301)
|
eq(e.status, 301)
|
||||||
|
|
||||||
|
|
||||||
master_conn.delete_bucket(bucket)
|
master_conn.delete_bucket(bucket)
|
||||||
|
|
||||||
@attr(resource='object')
|
@attr(resource='object')
|
||||||
|
@ -4601,26 +4606,39 @@ def test_region_bucket_create_master_access_remove_secondary():
|
||||||
def test_region_copy_object():
|
def test_region_copy_object():
|
||||||
assert_can_test_multiregion()
|
assert_can_test_multiregion()
|
||||||
|
|
||||||
master = targets.main.master
|
for (k, dest) in targets.main.iteritems():
|
||||||
master_conn = master.connection
|
dest_conn = dest.connection
|
||||||
|
|
||||||
master_bucket = get_new_bucket(master)
|
dest_bucket = get_new_bucket(dest)
|
||||||
for file_size in (1024, 1024 * 1024, 10 * 1024 * 1024,
|
print 'created new dest bucket ', dest_bucket.name
|
||||||
100 * 1024 * 1024):
|
region_sync_meta(targets.main, dest)
|
||||||
for r in targets.main.secondaries:
|
|
||||||
conn = r.connection
|
|
||||||
bucket = get_new_bucket(r)
|
|
||||||
|
|
||||||
content = 'testcontent'
|
for file_size in (1024, 1024 * 1024, 10 * 1024 * 1024,
|
||||||
|
100 * 1024 * 1024):
|
||||||
|
for (k2, r) in targets.main.iteritems():
|
||||||
|
if r == dest_conn:
|
||||||
|
continue
|
||||||
|
conn = r.connection
|
||||||
|
|
||||||
key = bucket.new_key('testobj')
|
bucket = get_new_bucket(r)
|
||||||
fp_a = FakeWriteFile(file_size, 'A')
|
print 'created bucket', bucket.name
|
||||||
key.set_contents_from_file(fp_a)
|
region_sync_meta(targets.main, r)
|
||||||
|
|
||||||
dest_key = master_bucket.copy_key('testobj-dest', bucket.name, key.name)
|
content = 'testcontent'
|
||||||
|
|
||||||
# verify dest
|
key = bucket.new_key('testobj')
|
||||||
_verify_atomic_key_data(dest_key, file_size, 'A')
|
fp_a = FakeWriteFile(file_size, 'A')
|
||||||
|
key.set_contents_from_file(fp_a)
|
||||||
|
|
||||||
bucket.delete_key(key.name)
|
dest_key = dest_bucket.copy_key('testobj-dest', bucket.name, key.name)
|
||||||
conn.delete_bucket(bucket)
|
|
||||||
|
# verify dest
|
||||||
|
_verify_atomic_key_data(dest_key, file_size, 'A')
|
||||||
|
|
||||||
|
bucket.delete_key(key.name)
|
||||||
|
print 'removing bucket', bucket.name
|
||||||
|
conn.delete_bucket(bucket)
|
||||||
|
|
||||||
|
dest_bucket.delete_key(dest_key.name)
|
||||||
|
|
||||||
|
dest_conn.delete_bucket(dest_bucket)
|
||||||
|
|
Loading…
Reference in a new issue