Whitespace cleanup.

This commit is contained in:
Tommi Virtanen 2011-07-11 13:19:54 -07:00
parent e91a3aea66
commit 3fe595abd4
6 changed files with 58 additions and 52 deletions

View file

@ -108,11 +108,11 @@ def setup():
kwargs = bunch.Bunch() kwargs = bunch.Bunch()
conn_args = bunch.Bunch( conn_args = bunch.Bunch(
port = 'port', port='port',
host = 'host', host='host',
is_secure = 'is_secure', is_secure='is_secure',
access_key = 'aws_access_key_id', access_key='aws_access_key_id',
secret_key = 'aws_secret_access_key', secret_key='aws_secret_access_key',
) )
for cfg_key in conn_args.keys(): for cfg_key in conn_args.keys():
conn_key = conn_args[cfg_key] conn_key = conn_args[cfg_key]
@ -159,4 +159,3 @@ def get_new_bucket(connection=None):
def teardown(): def teardown():
nuke_prefixed_buckets() nuke_prefixed_buckets()

View file

@ -11,8 +11,8 @@ import sys
def parse_opts(): def parse_opts():
parser = OptionParser(); parser = OptionParser();
parser.add_option('-O' , '--outfile', help='write output to FILE. Defaults to STDOUT', metavar='FILE') parser.add_option('-O', '--outfile', help='write output to FILE. Defaults to STDOUT', metavar='FILE')
parser.add_option('-b' , '--bucket', dest='bucket', help='push objects to BUCKET', metavar='BUCKET') parser.add_option('-b', '--bucket', dest='bucket', help='push objects to BUCKET', metavar='BUCKET')
parser.add_option('--seed', dest='seed', help='optional seed for the random number generator') parser.add_option('--seed', dest='seed', help='optional seed for the random number generator')
return parser.parse_args() return parser.parse_args()
@ -42,7 +42,7 @@ def upload_objects(bucket, files, seed):
list of boto S3 key objects list of boto S3 key objects
""" """
keys = [] keys = []
name_generator = realistic.names(15, 4,seed=seed) name_generator = realistic.names(15, 4, seed=seed)
for fp in files: for fp in files:
print >> sys.stderr, 'sending file with size %dB' % fp.size print >> sys.stderr, 'sending file with size %dB' % fp.size
@ -113,4 +113,3 @@ if __name__ == '__main__':
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()
common.teardown() common.teardown()

View file

@ -15,6 +15,7 @@ class Result:
TYPE_NONE = 0 TYPE_NONE = 0
TYPE_READER = 1 TYPE_READER = 1
TYPE_WRITER = 2 TYPE_WRITER = 2
def __init__(self, name, type=TYPE_NONE, time=0, success=True, size=0, details=''): def __init__(self, name, type=TYPE_NONE, time=0, success=True, size=0, details=''):
self.name = name self.name = name
self.type = type self.type = type
@ -24,7 +25,7 @@ class Result:
self.details = details self.details = details
def __repr__(self): def __repr__(self):
type_dict = {Result.TYPE_NONE : 'None', Result.TYPE_READER : 'Reader', Result.TYPE_WRITER : 'Writer'} type_dict = {Result.TYPE_NONE: 'None', Result.TYPE_READER: 'Reader', Result.TYPE_WRITER: 'Writer'}
type_s = type_dict[self.type] type_s = type_dict[self.type]
if self.success: if self.success:
status = 'Success' status = 'Success'
@ -37,7 +38,7 @@ class Result:
name=self.name, name=self.name,
size=self.size, size=self.size,
time=self.time, time=self.time,
mbps=(self.size/self.time/1024.0), mbps=self.size / self.time / 1024.0,
details=self.details details=self.details
) )
@ -52,12 +53,14 @@ def reader(seconds, bucket, name=None, queue=None):
end = time.clock() end = time.clock()
elapsed = end - start elapsed = end - start
if queue: if queue:
queue.put(Result(name, queue.put(
Result(
name,
type=Result.TYPE_READER, type=Result.TYPE_READER,
time=elapsed, time=elapsed,
success=fp.valid(), success=fp.valid(),
size=(fp.size/1024) size=fp.size / 1024,
) ),
) )
count += 1 count += 1
if count == 0: if count == 0:
@ -71,7 +74,12 @@ def writer(seconds, bucket, name=None, queue=None, quantity=1, file_size=1, file
if file_name_seed != None: if file_name_seed != None:
r2 = file_name_seed r2 = file_name_seed
files = generate_objects.get_random_files(quantity, 1024*file_size, 1024*file_stddev, r) files = generate_objects.get_random_files(
quantity=quantity,
mean=1024 * file_size,
stddev=1024 * file_stddev,
seed=r,
)
start = time.clock() start = time.clock()
generate_objects.upload_objects(bucket, files, r2) generate_objects.upload_objects(bucket, files, r2)
@ -189,4 +197,3 @@ def main():
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -200,7 +200,7 @@ def check_grants(got, want):
eq(len(got), len(want)) eq(len(got), len(want))
got = sorted(got, key=operator.attrgetter('id')) got = sorted(got, key=operator.attrgetter('id'))
want = sorted(want, key=operator.itemgetter('id')) want = sorted(want, key=operator.itemgetter('id'))
for g,w in zip(got, want): for g, w in zip(got, want):
w = dict(w) w = dict(w)
eq(g.permission, w.pop('permission')) eq(g.permission, w.pop('permission'))
eq(g.id, w.pop('id')) eq(g.id, w.pop('id'))
@ -989,9 +989,10 @@ def test_list_multipart_upload():
def _simple_http_req_100_cont(host, port, method, resource): def _simple_http_req_100_cont(host, port, method, resource):
req = '{method} {resource} HTTP/1.1\r\nHost: {host}\r\nAccept-Encoding: identity\r\nContent-Length: 123\r\nExpect: 100-continue\r\n\r\n'.format( req = '{method} {resource} HTTP/1.1\r\nHost: {host}\r\nAccept-Encoding: identity\r\nContent-Length: 123\r\nExpect: 100-continue\r\n\r\n'.format(
method = method, method=method,
resource = resource, resource=resource,
host = host) host=host,
)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(5) s.settimeout(5)
@ -1014,7 +1015,7 @@ def _simple_http_req_100_cont(host, port, method, resource):
def test_100_continue(): def test_100_continue():
bucket = get_new_bucket() bucket = get_new_bucket()
objname = 'testobj' objname = 'testobj'
resource = '/{bucket}/{obj}'.format(bucket = bucket.name, obj = objname) resource = '/{bucket}/{obj}'.format(bucket=bucket.name, obj=objname)
status = _simple_http_req_100_cont(s3.main.host, s3.main.port, 'PUT', resource) status = _simple_http_req_100_cont(s3.main.host, s3.main.port, 'PUT', resource)
eq(status, '403') eq(status, '403')