print foo -> print(foo)

Signed-off-by: Sage Weil <sage@redhat.com>
This commit is contained in:
Sage Weil 2019-12-14 09:19:20 -06:00
parent e3761a30b3
commit 2c134616d2
10 changed files with 61 additions and 61 deletions

View file

@ -57,7 +57,7 @@ def main():
def calculate_stats(options, total, durations, min_time, max_time, errors, def calculate_stats(options, total, durations, min_time, max_time, errors,
success): success):
print 'Calculating statistics...' print('Calculating statistics...')
f = sys.stdin f = sys.stdin
if options.input: if options.input:
@ -81,13 +81,13 @@ def calculate_stats(options, total, durations, min_time, max_time, errors,
end = start + duration / float(NANOSECONDS) end = start + duration / float(NANOSECONDS)
if options.verbose: if options.verbose:
print "[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \ print("[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \
"{data:>11.2f} KB".format( "{data:>11.2f} KB".format(
type=type_, type=type_,
start=start, start=start,
end=end, end=end,
data=data_size / 1024.0, # convert to KB data=data_size / 1024.0, # convert to KB
) ))
# update time boundaries # update time boundaries
prev = min_time.setdefault(type_, start) prev = min_time.setdefault(type_, start)
@ -121,7 +121,7 @@ def print_results(total, durations, min_time, max_time, errors, success):
trans_long = max(durations[type_]) / float(NANOSECONDS) trans_long = max(durations[type_]) / float(NANOSECONDS)
trans_short = min(durations[type_]) / float(NANOSECONDS) trans_short = min(durations[type_]) / float(NANOSECONDS)
print OUTPUT_FORMAT.format( print(OUTPUT_FORMAT.format(
type=type_, type=type_,
trans_success=trans_success, trans_success=trans_success,
trans_fail=trans_fail, trans_fail=trans_fail,
@ -135,7 +135,7 @@ def print_results(total, durations, min_time, max_time, errors, success):
conc=conc, conc=conc,
trans_long=trans_long, trans_long=trans_long,
trans_short=trans_short, trans_short=trans_short,
) ))
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View file

@ -51,10 +51,10 @@ def nuke_bucket(bucket):
while deleted_cnt: while deleted_cnt:
deleted_cnt = 0 deleted_cnt = 0
for key in bucket.list(): for key in bucket.list():
print 'Cleaning bucket {bucket} key {key}'.format( print('Cleaning bucket {bucket} key {key}'.format(
bucket=bucket, bucket=bucket,
key=key, key=key,
) ))
key.set_canned_acl('private') key.set_canned_acl('private')
key.delete() key.delete()
deleted_cnt += 1 deleted_cnt += 1
@ -67,20 +67,20 @@ def nuke_bucket(bucket):
and e.body == ''): and e.body == ''):
e.error_code = 'AccessDenied' e.error_code = 'AccessDenied'
if e.error_code != 'AccessDenied': if e.error_code != 'AccessDenied':
print 'GOT UNWANTED ERROR', e.error_code print('GOT UNWANTED ERROR', e.error_code)
raise raise
# seems like we're not the owner of the bucket; ignore # seems like we're not the owner of the bucket; ignore
pass pass
def nuke_prefixed_buckets(): def nuke_prefixed_buckets():
for name, conn in s3.items(): for name, conn in s3.items():
print 'Cleaning buckets from connection {name}'.format(name=name) print('Cleaning buckets from connection {name}'.format(name=name))
for bucket in conn.get_all_buckets(): for bucket in conn.get_all_buckets():
if bucket.name.startswith(prefix): if bucket.name.startswith(prefix):
print 'Cleaning bucket {bucket}'.format(bucket=bucket) print('Cleaning bucket {bucket}'.format(bucket=bucket))
nuke_bucket(bucket) nuke_bucket(bucket)
print 'Done with cleanup of test buckets.' print('Done with cleanup of test buckets.')
def read_config(fp): def read_config(fp):
config = bunch.Bunch() config = bunch.Bunch()

View file

@ -43,7 +43,7 @@ def upload_objects(bucket, files, seed):
name_generator = realistic.names(15, 4, seed=seed) name_generator = realistic.names(15, 4, seed=seed)
for fp in files: for fp in files:
print >> sys.stderr, 'sending file with size %dB' % fp.size sys.stderr.write('sending file with size %dB\n' % fp.size)
key = Key(bucket) key = Key(bucket)
key.key = name_generator.next() key.key = name_generator.next()
key.set_contents_from_file(fp, rewind=True) key.set_contents_from_file(fp, rewind=True)
@ -94,18 +94,18 @@ def _main():
bucket.set_acl('public-read') bucket.set_acl('public-read')
keys = [] keys = []
print >> OUTFILE, 'bucket: %s' % bucket.name OUTFILE.write('bucket: %s\n' % bucket.name)
print >> sys.stderr, 'setup complete, generating files' sys.stderr.write('setup complete, generating files\n')
for profile in common.config.file_generation.groups: for profile in common.config.file_generation.groups:
seed = random.random() seed = random.random()
files = get_random_files(profile[0], profile[1], profile[2], seed) files = get_random_files(profile[0], profile[1], profile[2], seed)
keys += upload_objects(bucket, files, seed) keys += upload_objects(bucket, files, seed)
print >> sys.stderr, 'finished sending files. generating urls' sys.stderr.write('finished sending files. generating urls\n')
for key in keys: for key in keys:
print >> OUTFILE, key.generate_url(0, query_auth=False) OUTFILE.write(key.generate_url(0, query_auth=False) + '\n')
print >> sys.stderr, 'done' sys.stderr.write('done\n')
def main(): def main():

View file

@ -57,7 +57,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
), ),
) )
print "ERROR:", m print("ERROR:", m)
else: else:
elapsed = end - start elapsed = end - start
result.update( result.update(
@ -158,16 +158,16 @@ def main():
for name in ['names', 'contents', 'writer', 'reader']: for name in ['names', 'contents', 'writer', 'reader']:
seeds.setdefault(name, rand.randrange(2**32)) seeds.setdefault(name, rand.randrange(2**32))
print 'Using random seeds: {seeds}'.format(seeds=seeds) print('Using random seeds: {seeds}'.format(seeds=seeds))
# setup bucket and other objects # setup bucket and other objects
bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30) bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
bucket = conn.create_bucket(bucket_name) bucket = conn.create_bucket(bucket_name)
print "Created bucket: {name}".format(name=bucket.name) print("Created bucket: {name}".format(name=bucket.name))
# check flag for deterministic file name creation # check flag for deterministic file name creation
if not config.readwrite.get('deterministic_file_names'): if not config.readwrite.get('deterministic_file_names'):
print 'Creating random file names' print('Creating random file names')
file_names = realistic.names( file_names = realistic.names(
mean=15, mean=15,
stddev=4, stddev=4,
@ -176,7 +176,7 @@ def main():
file_names = itertools.islice(file_names, config.readwrite.files.num) file_names = itertools.islice(file_names, config.readwrite.files.num)
file_names = list(file_names) file_names = list(file_names)
else: else:
print 'Creating file names that are deterministic' print('Creating file names that are deterministic')
file_names = [] file_names = []
for x in xrange(config.readwrite.files.num): for x in xrange(config.readwrite.files.num):
file_names.append('test_file_{num}'.format(num=x)) file_names.append('test_file_{num}'.format(num=x))
@ -191,7 +191,7 @@ def main():
# warmup - get initial set of files uploaded if there are any writers specified # warmup - get initial set of files uploaded if there are any writers specified
if config.readwrite.writers > 0: if config.readwrite.writers > 0:
print "Uploading initial set of {num} files".format(num=config.readwrite.files.num) print("Uploading initial set of {num} files".format(num=config.readwrite.files.num))
warmup_pool = gevent.pool.Pool(size=100) warmup_pool = gevent.pool.Pool(size=100)
for file_name in file_names: for file_name in file_names:
fp = next(files) fp = next(files)
@ -204,9 +204,9 @@ def main():
warmup_pool.join() warmup_pool.join()
# main work # main work
print "Starting main worker loop." print("Starting main worker loop.")
print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev) print("Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev))
print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers) print("Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers))
group = gevent.pool.Group() group = gevent.pool.Group()
rand_writer = random.Random(seeds['writer']) rand_writer = random.Random(seeds['writer'])
@ -246,7 +246,7 @@ def main():
# wait for all the tests to finish # wait for all the tests to finish
group.join() group.join()
print 'post-join, queue size {size}'.format(size=q.qsize()) print('post-join, queue size {size}'.format(size=q.qsize()))
if q.qsize() > 0: if q.qsize() > 0:
for temp_dict in q: for temp_dict in q:

View file

@ -47,9 +47,9 @@ class FileValidator(object):
self.original_hash, binary = contents[-40:], contents[:-40] self.original_hash, binary = contents[-40:], contents[:-40]
self.new_hash = hashlib.sha1(binary).hexdigest() self.new_hash = hashlib.sha1(binary).hexdigest()
if not self.new_hash == self.original_hash: if not self.new_hash == self.original_hash:
print 'original hash: ', self.original_hash print('original hash: ', self.original_hash)
print 'new hash: ', self.new_hash print('new hash: ', self.new_hash)
print 'size: ', self._file.tell() print('size: ', self._file.tell())
return False return False
return True return True

View file

@ -141,12 +141,12 @@ def main():
for name in ['names', 'contents', 'writer', 'reader']: for name in ['names', 'contents', 'writer', 'reader']:
seeds.setdefault(name, rand.randrange(2**32)) seeds.setdefault(name, rand.randrange(2**32))
print 'Using random seeds: {seeds}'.format(seeds=seeds) print('Using random seeds: {seeds}'.format(seeds=seeds))
# setup bucket and other objects # setup bucket and other objects
bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30) bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
bucket = conn.create_bucket(bucket_name) bucket = conn.create_bucket(bucket_name)
print "Created bucket: {name}".format(name=bucket.name) print("Created bucket: {name}".format(name=bucket.name))
objnames = realistic.names( objnames = realistic.names(
mean=15, mean=15,
stddev=4, stddev=4,
@ -163,10 +163,10 @@ def main():
logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout) logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)
print "Writing {num} objects with {w} workers...".format( print("Writing {num} objects with {w} workers...".format(
num=config.roundtrip.files.num, num=config.roundtrip.files.num,
w=config.roundtrip.writers, w=config.roundtrip.writers,
) ))
pool = gevent.pool.Pool(size=config.roundtrip.writers) pool = gevent.pool.Pool(size=config.roundtrip.writers)
start = time.time() start = time.time()
for objname in objnames: for objname in objnames:
@ -186,10 +186,10 @@ def main():
duration=int(round(elapsed * NANOSECOND)), duration=int(round(elapsed * NANOSECOND)),
)) ))
print "Reading {num} objects with {w} workers...".format( print("Reading {num} objects with {w} workers...".format(
num=config.roundtrip.files.num, num=config.roundtrip.files.num,
w=config.roundtrip.readers, w=config.roundtrip.readers,
) ))
# avoid accessing them in the same order as the writing # avoid accessing them in the same order as the writing
rand.shuffle(objnames) rand.shuffle(objnames)
pool = gevent.pool.Pool(size=config.roundtrip.readers) pool = gevent.pool.Pool(size=config.roundtrip.readers)

View file

@ -43,7 +43,7 @@ def upload_objects(bucket, files, seed):
name_generator = realistic.names(15, 4, seed=seed) name_generator = realistic.names(15, 4, seed=seed)
for fp in files: for fp in files:
print >> sys.stderr, 'sending file with size %dB' % fp.size sys.stderr.write('sending file with size %dB\n' % fp.size)
key = Key(bucket) key = Key(bucket)
key.key = name_generator.next() key.key = name_generator.next()
key.set_contents_from_file(fp, rewind=True) key.set_contents_from_file(fp, rewind=True)
@ -94,18 +94,18 @@ def _main():
bucket.set_acl('public-read') bucket.set_acl('public-read')
keys = [] keys = []
print >> OUTFILE, 'bucket: %s' % bucket.name OUTFILE.write('bucket: %s\n' % bucket.name)
print >> sys.stderr, 'setup complete, generating files' sys.stderr.write('setup complete, generating files\n')
for profile in common.config.file_generation.groups: for profile in common.config.file_generation.groups:
seed = random.random() seed = random.random()
files = get_random_files(profile[0], profile[1], profile[2], seed) files = get_random_files(profile[0], profile[1], profile[2], seed)
keys += upload_objects(bucket, files, seed) keys += upload_objects(bucket, files, seed)
print >> sys.stderr, 'finished sending files. generating urls' sys.stderr.write('finished sending files. generating urls\n')
for key in keys: for key in keys:
print >> OUTFILE, key.generate_url(0, query_auth=False) OUTFILE.write(key.generate_url(0, query_auth=False) + '\n')
print >> sys.stderr, 'done' sys.stderr.write('done\n')
def main(): def main():

View file

@ -57,7 +57,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
traceback=traceback.format_exc(), traceback=traceback.format_exc(),
), ),
) )
print "ERROR:", m print("ERROR:", m)
else: else:
elapsed = end - start elapsed = end - start
result.update( result.update(
@ -158,16 +158,16 @@ def main():
for name in ['names', 'contents', 'writer', 'reader']: for name in ['names', 'contents', 'writer', 'reader']:
seeds.setdefault(name, rand.randrange(2**32)) seeds.setdefault(name, rand.randrange(2**32))
print 'Using random seeds: {seeds}'.format(seeds=seeds) print('Using random seeds: {seeds}'.format(seeds=seeds))
# setup bucket and other objects # setup bucket and other objects
bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30) bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
bucket = conn.create_bucket(bucket_name) bucket = conn.create_bucket(bucket_name)
print "Created bucket: {name}".format(name=bucket.name) print("Created bucket: {name}".format(name=bucket.name))
# check flag for deterministic file name creation # check flag for deterministic file name creation
if not config.readwrite.get('deterministic_file_names'): if not config.readwrite.get('deterministic_file_names'):
print 'Creating random file names' print('Creating random file names')
file_names = realistic.names( file_names = realistic.names(
mean=15, mean=15,
stddev=4, stddev=4,
@ -176,7 +176,7 @@ def main():
file_names = itertools.islice(file_names, config.readwrite.files.num) file_names = itertools.islice(file_names, config.readwrite.files.num)
file_names = list(file_names) file_names = list(file_names)
else: else:
print 'Creating file names that are deterministic' print('Creating file names that are deterministic')
file_names = [] file_names = []
for x in xrange(config.readwrite.files.num): for x in xrange(config.readwrite.files.num):
file_names.append('test_file_{num}'.format(num=x)) file_names.append('test_file_{num}'.format(num=x))
@ -191,7 +191,7 @@ def main():
# warmup - get initial set of files uploaded if there are any writers specified # warmup - get initial set of files uploaded if there are any writers specified
if config.readwrite.writers > 0: if config.readwrite.writers > 0:
print "Uploading initial set of {num} files".format(num=config.readwrite.files.num) print("Uploading initial set of {num} files".format(num=config.readwrite.files.num))
warmup_pool = gevent.pool.Pool(size=100) warmup_pool = gevent.pool.Pool(size=100)
for file_name in file_names: for file_name in file_names:
fp = next(files) fp = next(files)
@ -204,9 +204,9 @@ def main():
warmup_pool.join() warmup_pool.join()
# main work # main work
print "Starting main worker loop." print("Starting main worker loop.")
print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev) print("Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev))
print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers) print("Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers))
group = gevent.pool.Group() group = gevent.pool.Group()
rand_writer = random.Random(seeds['writer']) rand_writer = random.Random(seeds['writer'])
@ -246,7 +246,7 @@ def main():
# wait for all the tests to finish # wait for all the tests to finish
group.join() group.join()
print 'post-join, queue size {size}'.format(size=q.qsize()) print('post-join, queue size {size}'.format(size=q.qsize()))
if q.qsize() > 0: if q.qsize() > 0:
for temp_dict in q: for temp_dict in q:

View file

@ -47,9 +47,9 @@ class FileValidator(object):
self.original_hash, binary = contents[-40:], contents[:-40] self.original_hash, binary = contents[-40:], contents[:-40]
self.new_hash = hashlib.sha1(binary).hexdigest() self.new_hash = hashlib.sha1(binary).hexdigest()
if not self.new_hash == self.original_hash: if not self.new_hash == self.original_hash:
print 'original hash: ', self.original_hash print('original hash: ', self.original_hash)
print 'new hash: ', self.new_hash print('new hash: ', self.new_hash)
print 'size: ', self._file.tell() print('size: ', self._file.tell())
return False return False
return True return True

View file

@ -141,12 +141,12 @@ def main():
for name in ['names', 'contents', 'writer', 'reader']: for name in ['names', 'contents', 'writer', 'reader']:
seeds.setdefault(name, rand.randrange(2**32)) seeds.setdefault(name, rand.randrange(2**32))
print 'Using random seeds: {seeds}'.format(seeds=seeds) print('Using random seeds: {seeds}'.format(seeds=seeds))
# setup bucket and other objects # setup bucket and other objects
bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30) bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
bucket = conn.create_bucket(bucket_name) bucket = conn.create_bucket(bucket_name)
print "Created bucket: {name}".format(name=bucket.name) print("Created bucket: {name}".format(name=bucket.name))
objnames = realistic.names( objnames = realistic.names(
mean=15, mean=15,
stddev=4, stddev=4,
@ -163,10 +163,10 @@ def main():
logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout) logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)
print "Writing {num} objects with {w} workers...".format( print("Writing {num} objects with {w} workers...".format(
num=config.roundtrip.files.num, num=config.roundtrip.files.num,
w=config.roundtrip.writers, w=config.roundtrip.writers,
) ))
pool = gevent.pool.Pool(size=config.roundtrip.writers) pool = gevent.pool.Pool(size=config.roundtrip.writers)
start = time.time() start = time.time()
for objname in objnames: for objname in objnames:
@ -186,10 +186,10 @@ def main():
duration=int(round(elapsed * NANOSECOND)), duration=int(round(elapsed * NANOSECOND)),
)) ))
print "Reading {num} objects with {w} workers...".format( print("Reading {num} objects with {w} workers...".format(
num=config.roundtrip.files.num, num=config.roundtrip.files.num,
w=config.roundtrip.readers, w=config.roundtrip.readers,
) ))
# avoid accessing them in the same order as the writing # avoid accessing them in the same order as the writing
rand.shuffle(objnames) rand.shuffle(objnames)
pool = gevent.pool.Pool(size=config.roundtrip.readers) pool = gevent.pool.Pool(size=config.roundtrip.readers)