forked from TrueCloudLab/s3-tests
Compare commits
4 commits
Author | SHA1 | Date | |
---|---|---|---|
|
b518fd2022 | ||
|
f26be5eff7 | ||
|
d9da7172c1 | ||
|
53e6f52d86 |
5 changed files with 85 additions and 15 deletions
|
@ -41,5 +41,4 @@ virtualenv --no-site-packages --distribute virtualenv
|
||||||
# easy_install, and we really wanted pip; next line will fail if pip
|
# easy_install, and we really wanted pip; next line will fail if pip
|
||||||
# requirements.txt does not match setup.py requirements -- sucky but
|
# requirements.txt does not match setup.py requirements -- sucky but
|
||||||
# good enough for now
|
# good enough for now
|
||||||
./virtualenv/bin/python setup.py develop \
|
./virtualenv/bin/python setup.py develop
|
||||||
--allow-hosts None
|
|
||||||
|
|
|
@ -26,6 +26,9 @@ def get_prefix():
|
||||||
assert prefix is not None
|
assert prefix is not None
|
||||||
return prefix
|
return prefix
|
||||||
|
|
||||||
|
def is_slow_backend():
|
||||||
|
return slow_backend
|
||||||
|
|
||||||
def choose_bucket_prefix(template, max_len=30):
|
def choose_bucket_prefix(template, max_len=30):
|
||||||
"""
|
"""
|
||||||
Choose a prefix for our test buckets, so they're easy to identify.
|
Choose a prefix for our test buckets, so they're easy to identify.
|
||||||
|
@ -237,6 +240,7 @@ def setup():
|
||||||
|
|
||||||
global prefix
|
global prefix
|
||||||
global targets
|
global targets
|
||||||
|
global slow_backend
|
||||||
|
|
||||||
try:
|
try:
|
||||||
template = cfg.get('fixtures', 'bucket prefix')
|
template = cfg.get('fixtures', 'bucket prefix')
|
||||||
|
@ -244,6 +248,11 @@ def setup():
|
||||||
template = 'test-{random}-'
|
template = 'test-{random}-'
|
||||||
prefix = choose_bucket_prefix(template=template)
|
prefix = choose_bucket_prefix(template=template)
|
||||||
|
|
||||||
|
try:
|
||||||
|
slow_backend = cfg.getboolean('fixtures', 'slow backend')
|
||||||
|
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||||
|
slow_backend = False
|
||||||
|
|
||||||
# pull the default_region out, if it exists
|
# pull the default_region out, if it exists
|
||||||
try:
|
try:
|
||||||
default_region = cfg.get('fixtures', 'default_region')
|
default_region = cfg.get('fixtures', 'default_region')
|
||||||
|
|
|
@ -47,6 +47,7 @@ from . import (
|
||||||
targets,
|
targets,
|
||||||
config,
|
config,
|
||||||
get_prefix,
|
get_prefix,
|
||||||
|
is_slow_backend,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -4806,6 +4807,7 @@ def test_region_bucket_create_master_access_remove_secondary():
|
||||||
e = assert_raises(boto.exception.S3ResponseError, master_conn.get_bucket, bucket.name)
|
e = assert_raises(boto.exception.S3ResponseError, master_conn.get_bucket, bucket.name)
|
||||||
eq(e.status, 404)
|
eq(e.status, 404)
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@attr(resource='object')
|
||||||
@attr(method='copy')
|
@attr(method='copy')
|
||||||
@attr(operation='copy object between regions, verify')
|
@attr(operation='copy object between regions, verify')
|
||||||
|
@ -4821,8 +4823,12 @@ def test_region_copy_object():
|
||||||
print 'created new dest bucket ', dest_bucket.name
|
print 'created new dest bucket ', dest_bucket.name
|
||||||
region_sync_meta(targets.main, dest)
|
region_sync_meta(targets.main, dest)
|
||||||
|
|
||||||
for file_size in (1024, 1024 * 1024, 10 * 1024 * 1024,
|
if is_slow_backend():
|
||||||
100 * 1024 * 1024):
|
sizes = (1024, 10 * 1024 * 1024)
|
||||||
|
else:
|
||||||
|
sizes = (1024, 10 * 1024 * 1024, 100 * 1024 * 1024)
|
||||||
|
|
||||||
|
for file_size in sizes:
|
||||||
for (k2, r) in targets.main.iteritems():
|
for (k2, r) in targets.main.iteritems():
|
||||||
if r == dest_conn:
|
if r == dest_conn:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -21,7 +21,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
|
||||||
objname = rand.choice(file_names)
|
objname = rand.choice(file_names)
|
||||||
key = bucket.new_key(objname)
|
key = bucket.new_key(objname)
|
||||||
|
|
||||||
fp = realistic.FileVerifier()
|
fp = realistic.FileValidator()
|
||||||
result = dict(
|
result = dict(
|
||||||
type='r',
|
type='r',
|
||||||
bucket=bucket.name,
|
bucket=bucket.name,
|
||||||
|
@ -31,7 +31,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
try:
|
try:
|
||||||
key.get_contents_to_file(fp)
|
key.get_contents_to_file(fp._file)
|
||||||
except gevent.GreenletExit:
|
except gevent.GreenletExit:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -50,7 +50,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
|
||||||
end = time.time()
|
end = time.time()
|
||||||
|
|
||||||
if not fp.valid():
|
if not fp.valid():
|
||||||
m='md5sum check failed start={s} ({se}) end={e} size={sz} obj={o}'.format(s=time.ctime(start), se=start, e=end, sz=fp.size, o=objname)
|
m='md5sum check failed start={s} ({se}) end={e} size={sz} obj={o}'.format(s=time.ctime(start), se=start, e=end, sz=fp._file.tell(), o=objname)
|
||||||
result.update(
|
result.update(
|
||||||
error=dict(
|
error=dict(
|
||||||
msg=m,
|
msg=m,
|
||||||
|
@ -63,13 +63,13 @@ def reader(bucket, worker_id, file_names, queue, rand):
|
||||||
result.update(
|
result.update(
|
||||||
start=start,
|
start=start,
|
||||||
duration=int(round(elapsed * NANOSECOND)),
|
duration=int(round(elapsed * NANOSECOND)),
|
||||||
chunks=fp.chunks,
|
|
||||||
)
|
)
|
||||||
queue.put(result)
|
queue.put(result)
|
||||||
|
|
||||||
def writer(bucket, worker_id, file_names, files, queue, rand):
|
def writer(bucket, worker_id, file_names, files, queue, rand):
|
||||||
while True:
|
while True:
|
||||||
fp = next(files)
|
fp = next(files)
|
||||||
|
fp.seek(0)
|
||||||
objname = rand.choice(file_names)
|
objname = rand.choice(file_names)
|
||||||
key = bucket.new_key(objname)
|
key = bucket.new_key(objname)
|
||||||
|
|
||||||
|
@ -104,7 +104,6 @@ def writer(bucket, worker_id, file_names, files, queue, rand):
|
||||||
result.update(
|
result.update(
|
||||||
start=start,
|
start=start,
|
||||||
duration=int(round(elapsed * NANOSECOND)),
|
duration=int(round(elapsed * NANOSECOND)),
|
||||||
chunks=fp.last_chunks,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
queue.put(result)
|
queue.put(result)
|
||||||
|
|
|
@ -12,6 +12,62 @@ import os
|
||||||
NANOSECOND = int(1e9)
|
NANOSECOND = int(1e9)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_file_contents(size):
|
||||||
|
"""
|
||||||
|
A helper function to generate binary contents for a given size, and
|
||||||
|
calculates the md5 hash of the contents appending itself at the end of the
|
||||||
|
blob.
|
||||||
|
It uses sha1's hexdigest which is 40 chars long. So any binary generated
|
||||||
|
should remove the last 40 chars from the blob to retrieve the original hash
|
||||||
|
and binary so that validity can be proved.
|
||||||
|
"""
|
||||||
|
size = int(size)
|
||||||
|
contents = os.urandom(size)
|
||||||
|
content_hash = hashlib.sha1(contents).hexdigest()
|
||||||
|
return contents + content_hash
|
||||||
|
|
||||||
|
|
||||||
|
class FileValidator(object):
|
||||||
|
|
||||||
|
def __init__(self, f=None):
|
||||||
|
self._file = tempfile.SpooledTemporaryFile()
|
||||||
|
self.original_hash = None
|
||||||
|
self.new_hash = None
|
||||||
|
if f:
|
||||||
|
f.seek(0)
|
||||||
|
shutil.copyfileobj(f, self._file)
|
||||||
|
|
||||||
|
def valid(self):
|
||||||
|
"""
|
||||||
|
Returns True if this file looks valid. The file is valid if the end
|
||||||
|
of the file has the md5 digest for the first part of the file.
|
||||||
|
"""
|
||||||
|
self._file.seek(0)
|
||||||
|
contents = self._file.read()
|
||||||
|
self.original_hash, binary = contents[-40:], contents[:-40]
|
||||||
|
self.new_hash = hashlib.sha1(binary).hexdigest()
|
||||||
|
if not self.new_hash == self.original_hash:
|
||||||
|
print 'original hash: ', self.original_hash
|
||||||
|
print 'new hash: ', self.new_hash
|
||||||
|
print 'size: ', self._file.tell()
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
# XXX not sure if we need all of these
|
||||||
|
def seek(self, offset, whence=os.SEEK_SET):
|
||||||
|
self._file.seek(offset, whence)
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
return self._file.tell()
|
||||||
|
|
||||||
|
def read(self, size=-1):
|
||||||
|
return self._file.read(size)
|
||||||
|
|
||||||
|
def write(self, data):
|
||||||
|
self._file.write(data)
|
||||||
|
self._file.seek(0)
|
||||||
|
|
||||||
|
|
||||||
class RandomContentFile(object):
|
class RandomContentFile(object):
|
||||||
def __init__(self, size, seed):
|
def __init__(self, size, seed):
|
||||||
self.size = size
|
self.size = size
|
||||||
|
@ -182,6 +238,7 @@ def files(mean, stddev, seed=None):
|
||||||
break
|
break
|
||||||
yield RandomContentFile(size=size, seed=rand.getrandbits(32))
|
yield RandomContentFile(size=size, seed=rand.getrandbits(32))
|
||||||
|
|
||||||
|
|
||||||
def files2(mean, stddev, seed=None, numfiles=10):
|
def files2(mean, stddev, seed=None, numfiles=10):
|
||||||
"""
|
"""
|
||||||
Yields file objects with effectively random contents, where the
|
Yields file objects with effectively random contents, where the
|
||||||
|
@ -192,17 +249,17 @@ def files2(mean, stddev, seed=None, numfiles=10):
|
||||||
stores `numfiles` files and yields them in a loop.
|
stores `numfiles` files and yields them in a loop.
|
||||||
"""
|
"""
|
||||||
# pre-compute all the files (and save with TemporaryFiles)
|
# pre-compute all the files (and save with TemporaryFiles)
|
||||||
rand_files = files(mean, stddev, seed)
|
|
||||||
fs = []
|
fs = []
|
||||||
for _ in xrange(numfiles):
|
for _ in xrange(numfiles):
|
||||||
f = next(rand_files)
|
|
||||||
t = tempfile.SpooledTemporaryFile()
|
t = tempfile.SpooledTemporaryFile()
|
||||||
shutil.copyfileobj(f, t)
|
t.write(generate_file_contents(random.normalvariate(mean, stddev)))
|
||||||
|
t.seek(0)
|
||||||
fs.append(t)
|
fs.append(t)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
for f in fs:
|
for f in fs:
|
||||||
yield PrecomputedContentFile(f)
|
yield f
|
||||||
|
|
||||||
|
|
||||||
def names(mean, stddev, charset=None, seed=None):
|
def names(mean, stddev, charset=None, seed=None):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in a new issue