forked from TrueCloudLab/s3-tests
Whitespace cleanup.
This commit is contained in:
parent
e91a3aea66
commit
3fe595abd4
6 changed files with 58 additions and 52 deletions
|
@ -1,5 +1,5 @@
|
||||||
from boto.auth_handler import AuthHandler
|
from boto.auth_handler import AuthHandler
|
||||||
|
|
||||||
class AnonymousAuthHandler(AuthHandler):
|
class AnonymousAuthHandler(AuthHandler):
|
||||||
def add_auth(self, http_request, **kwargs):
|
def add_auth(self, http_request, **kwargs):
|
||||||
return # Nothing to do for anonymous access!
|
return # Nothing to do for anonymous access!
|
||||||
|
|
13
common.py
13
common.py
|
@ -108,11 +108,11 @@ def setup():
|
||||||
|
|
||||||
kwargs = bunch.Bunch()
|
kwargs = bunch.Bunch()
|
||||||
conn_args = bunch.Bunch(
|
conn_args = bunch.Bunch(
|
||||||
port = 'port',
|
port='port',
|
||||||
host = 'host',
|
host='host',
|
||||||
is_secure = 'is_secure',
|
is_secure='is_secure',
|
||||||
access_key = 'aws_access_key_id',
|
access_key='aws_access_key_id',
|
||||||
secret_key = 'aws_secret_access_key',
|
secret_key='aws_secret_access_key',
|
||||||
)
|
)
|
||||||
for cfg_key in conn_args.keys():
|
for cfg_key in conn_args.keys():
|
||||||
conn_key = conn_args[cfg_key]
|
conn_key = conn_args[cfg_key]
|
||||||
|
@ -158,5 +158,4 @@ def get_new_bucket(connection=None):
|
||||||
return bucket
|
return bucket
|
||||||
|
|
||||||
def teardown():
|
def teardown():
|
||||||
nuke_prefixed_buckets()
|
nuke_prefixed_buckets()
|
||||||
|
|
||||||
|
|
|
@ -11,8 +11,8 @@ import sys
|
||||||
|
|
||||||
def parse_opts():
|
def parse_opts():
|
||||||
parser = OptionParser();
|
parser = OptionParser();
|
||||||
parser.add_option('-O' , '--outfile', help='write output to FILE. Defaults to STDOUT', metavar='FILE')
|
parser.add_option('-O', '--outfile', help='write output to FILE. Defaults to STDOUT', metavar='FILE')
|
||||||
parser.add_option('-b' , '--bucket', dest='bucket', help='push objects to BUCKET', metavar='BUCKET')
|
parser.add_option('-b', '--bucket', dest='bucket', help='push objects to BUCKET', metavar='BUCKET')
|
||||||
parser.add_option('--seed', dest='seed', help='optional seed for the random number generator')
|
parser.add_option('--seed', dest='seed', help='optional seed for the random number generator')
|
||||||
|
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
@ -42,7 +42,7 @@ def upload_objects(bucket, files, seed):
|
||||||
list of boto S3 key objects
|
list of boto S3 key objects
|
||||||
"""
|
"""
|
||||||
keys = []
|
keys = []
|
||||||
name_generator = realistic.names(15, 4,seed=seed)
|
name_generator = realistic.names(15, 4, seed=seed)
|
||||||
|
|
||||||
for fp in files:
|
for fp in files:
|
||||||
print >> sys.stderr, 'sending file with size %dB' % fp.size
|
print >> sys.stderr, 'sending file with size %dB' % fp.size
|
||||||
|
@ -113,4 +113,3 @@ if __name__ == '__main__':
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
common.teardown()
|
common.teardown()
|
||||||
|
|
||||||
|
|
|
@ -12,9 +12,10 @@ import realistic
|
||||||
import common
|
import common
|
||||||
|
|
||||||
class Result:
|
class Result:
|
||||||
TYPE_NONE = 0
|
TYPE_NONE = 0
|
||||||
TYPE_READER = 1
|
TYPE_READER = 1
|
||||||
TYPE_WRITER = 2
|
TYPE_WRITER = 2
|
||||||
|
|
||||||
def __init__(self, name, type=TYPE_NONE, time=0, success=True, size=0, details=''):
|
def __init__(self, name, type=TYPE_NONE, time=0, success=True, size=0, details=''):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.type = type
|
self.type = type
|
||||||
|
@ -24,7 +25,7 @@ class Result:
|
||||||
self.details = details
|
self.details = details
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
type_dict = {Result.TYPE_NONE : 'None', Result.TYPE_READER : 'Reader', Result.TYPE_WRITER : 'Writer'}
|
type_dict = {Result.TYPE_NONE: 'None', Result.TYPE_READER: 'Reader', Result.TYPE_WRITER: 'Writer'}
|
||||||
type_s = type_dict[self.type]
|
type_s = type_dict[self.type]
|
||||||
if self.success:
|
if self.success:
|
||||||
status = 'Success'
|
status = 'Success'
|
||||||
|
@ -37,7 +38,7 @@ class Result:
|
||||||
name=self.name,
|
name=self.name,
|
||||||
size=self.size,
|
size=self.size,
|
||||||
time=self.time,
|
time=self.time,
|
||||||
mbps=(self.size/self.time/1024.0),
|
mbps=self.size / self.time / 1024.0,
|
||||||
details=self.details
|
details=self.details
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -52,13 +53,15 @@ def reader(seconds, bucket, name=None, queue=None):
|
||||||
end = time.clock()
|
end = time.clock()
|
||||||
elapsed = end - start
|
elapsed = end - start
|
||||||
if queue:
|
if queue:
|
||||||
queue.put(Result(name,
|
queue.put(
|
||||||
type=Result.TYPE_READER,
|
Result(
|
||||||
time=elapsed,
|
name,
|
||||||
success=fp.valid(),
|
type=Result.TYPE_READER,
|
||||||
size=(fp.size/1024)
|
time=elapsed,
|
||||||
|
success=fp.valid(),
|
||||||
|
size=fp.size / 1024,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
|
||||||
count += 1
|
count += 1
|
||||||
if count == 0:
|
if count == 0:
|
||||||
gevent.sleep(1)
|
gevent.sleep(1)
|
||||||
|
@ -71,7 +74,12 @@ def writer(seconds, bucket, name=None, queue=None, quantity=1, file_size=1, file
|
||||||
if file_name_seed != None:
|
if file_name_seed != None:
|
||||||
r2 = file_name_seed
|
r2 = file_name_seed
|
||||||
|
|
||||||
files = generate_objects.get_random_files(quantity, 1024*file_size, 1024*file_stddev, r)
|
files = generate_objects.get_random_files(
|
||||||
|
quantity=quantity,
|
||||||
|
mean=1024 * file_size,
|
||||||
|
stddev=1024 * file_stddev,
|
||||||
|
seed=r,
|
||||||
|
)
|
||||||
|
|
||||||
start = time.clock()
|
start = time.clock()
|
||||||
generate_objects.upload_objects(bucket, files, r2)
|
generate_objects.upload_objects(bucket, files, r2)
|
||||||
|
@ -189,4 +197,3 @@ def main():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
@ -95,7 +95,7 @@ def files(mean, stddev, seed=None):
|
||||||
re-download the object and place the contents into a file called
|
re-download the object and place the contents into a file called
|
||||||
``foo``, the following should print two identical lines:
|
``foo``, the following should print two identical lines:
|
||||||
|
|
||||||
python -c 'import sys, hashlib; data=sys.stdin.read(); print hashlib.md5(data[:-16]).hexdigest(); print "".join("%02x" % ord(c) for c in data[-16:])' <foo
|
python -c 'import sys, hashlib; data=sys.stdin.read(); print hashlib.md5(data[:-16]).hexdigest(); print "".join("%02x" % ord(c) for c in data[-16:])' <foo
|
||||||
|
|
||||||
Except for objects shorter than 16 bytes, where the second line
|
Except for objects shorter than 16 bytes, where the second line
|
||||||
will be proportionally shorter.
|
will be proportionally shorter.
|
||||||
|
|
13
test_s3.py
13
test_s3.py
|
@ -200,7 +200,7 @@ def check_grants(got, want):
|
||||||
eq(len(got), len(want))
|
eq(len(got), len(want))
|
||||||
got = sorted(got, key=operator.attrgetter('id'))
|
got = sorted(got, key=operator.attrgetter('id'))
|
||||||
want = sorted(want, key=operator.itemgetter('id'))
|
want = sorted(want, key=operator.itemgetter('id'))
|
||||||
for g,w in zip(got, want):
|
for g, w in zip(got, want):
|
||||||
w = dict(w)
|
w = dict(w)
|
||||||
eq(g.permission, w.pop('permission'))
|
eq(g.permission, w.pop('permission'))
|
||||||
eq(g.id, w.pop('id'))
|
eq(g.id, w.pop('id'))
|
||||||
|
@ -944,7 +944,7 @@ def generate_random(mb_size):
|
||||||
s = ''
|
s = ''
|
||||||
left = mb_size - x
|
left = mb_size - x
|
||||||
this_part_size = min(left, part_size_mb)
|
this_part_size = min(left, part_size_mb)
|
||||||
for y in range(this_part_size * mb / chunk):
|
for y in range(this_part_size * mb / chunk):
|
||||||
s = s + strpart
|
s = s + strpart
|
||||||
yield s
|
yield s
|
||||||
if (x == mb_size):
|
if (x == mb_size):
|
||||||
|
@ -989,9 +989,10 @@ def test_list_multipart_upload():
|
||||||
|
|
||||||
def _simple_http_req_100_cont(host, port, method, resource):
|
def _simple_http_req_100_cont(host, port, method, resource):
|
||||||
req = '{method} {resource} HTTP/1.1\r\nHost: {host}\r\nAccept-Encoding: identity\r\nContent-Length: 123\r\nExpect: 100-continue\r\n\r\n'.format(
|
req = '{method} {resource} HTTP/1.1\r\nHost: {host}\r\nAccept-Encoding: identity\r\nContent-Length: 123\r\nExpect: 100-continue\r\n\r\n'.format(
|
||||||
method = method,
|
method=method,
|
||||||
resource = resource,
|
resource=resource,
|
||||||
host = host)
|
host=host,
|
||||||
|
)
|
||||||
|
|
||||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
s.settimeout(5)
|
s.settimeout(5)
|
||||||
|
@ -1014,7 +1015,7 @@ def _simple_http_req_100_cont(host, port, method, resource):
|
||||||
def test_100_continue():
|
def test_100_continue():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
objname = 'testobj'
|
objname = 'testobj'
|
||||||
resource = '/{bucket}/{obj}'.format(bucket = bucket.name, obj = objname)
|
resource = '/{bucket}/{obj}'.format(bucket=bucket.name, obj=objname)
|
||||||
|
|
||||||
status = _simple_http_req_100_cont(s3.main.host, s3.main.port, 'PUT', resource)
|
status = _simple_http_req_100_cont(s3.main.host, s3.main.port, 'PUT', resource)
|
||||||
eq(status, '403')
|
eq(status, '403')
|
||||||
|
|
Loading…
Reference in a new issue