From 2c134616d22ea88eb19a8428bc8d68f4eab6e49e Mon Sep 17 00:00:00 2001 From: Sage Weil Date: Sat, 14 Dec 2019 09:19:20 -0600 Subject: [PATCH] print foo -> print(foo) Signed-off-by: Sage Weil --- s3tests/analysis/rwstats.py | 10 +++++----- s3tests/common.py | 12 ++++++------ s3tests/generate_objects.py | 12 ++++++------ s3tests/readwrite.py | 20 ++++++++++---------- s3tests/realistic.py | 6 +++--- s3tests/roundtrip.py | 12 ++++++------ s3tests_boto3/generate_objects.py | 12 ++++++------ s3tests_boto3/readwrite.py | 20 ++++++++++---------- s3tests_boto3/realistic.py | 6 +++--- s3tests_boto3/roundtrip.py | 12 ++++++------ 10 files changed, 61 insertions(+), 61 deletions(-) diff --git a/s3tests/analysis/rwstats.py b/s3tests/analysis/rwstats.py index 80b135e..26d27af 100644 --- a/s3tests/analysis/rwstats.py +++ b/s3tests/analysis/rwstats.py @@ -57,7 +57,7 @@ def main(): def calculate_stats(options, total, durations, min_time, max_time, errors, success): - print 'Calculating statistics...' + print('Calculating statistics...') f = sys.stdin if options.input: @@ -81,13 +81,13 @@ def calculate_stats(options, total, durations, min_time, max_time, errors, end = start + duration / float(NANOSECONDS) if options.verbose: - print "[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \ + print("[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \ "{data:>11.2f} KB".format( type=type_, start=start, end=end, data=data_size / 1024.0, # convert to KB - ) + )) # update time boundaries prev = min_time.setdefault(type_, start) @@ -121,7 +121,7 @@ def print_results(total, durations, min_time, max_time, errors, success): trans_long = max(durations[type_]) / float(NANOSECONDS) trans_short = min(durations[type_]) / float(NANOSECONDS) - print OUTPUT_FORMAT.format( + print(OUTPUT_FORMAT.format( type=type_, trans_success=trans_success, trans_fail=trans_fail, @@ -135,7 +135,7 @@ def print_results(total, durations, min_time, max_time, errors, success): conc=conc, trans_long=trans_long, trans_short=trans_short, - ) + )) if __name__ == '__main__': main() diff --git a/s3tests/common.py b/s3tests/common.py index 9a325c0..4a94f0d 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -51,10 +51,10 @@ def nuke_bucket(bucket): while deleted_cnt: deleted_cnt = 0 for key in bucket.list(): - print 'Cleaning bucket {bucket} key {key}'.format( + print('Cleaning bucket {bucket} key {key}'.format( bucket=bucket, key=key, - ) + )) key.set_canned_acl('private') key.delete() deleted_cnt += 1 @@ -67,20 +67,20 @@ def nuke_bucket(bucket): and e.body == ''): e.error_code = 'AccessDenied' if e.error_code != 'AccessDenied': - print 'GOT UNWANTED ERROR', e.error_code + print('GOT UNWANTED ERROR', e.error_code) raise # seems like we're not the owner of the bucket; ignore pass def nuke_prefixed_buckets(): for name, conn in s3.items(): - print 'Cleaning buckets from connection {name}'.format(name=name) + print('Cleaning buckets from connection {name}'.format(name=name)) for bucket in conn.get_all_buckets(): if bucket.name.startswith(prefix): - print 'Cleaning bucket {bucket}'.format(bucket=bucket) + print('Cleaning bucket {bucket}'.format(bucket=bucket)) nuke_bucket(bucket) - print 'Done with cleanup of test buckets.' + print('Done with cleanup of test buckets.') def read_config(fp): config = bunch.Bunch() diff --git a/s3tests/generate_objects.py b/s3tests/generate_objects.py index 420235a..c25665b 100644 --- a/s3tests/generate_objects.py +++ b/s3tests/generate_objects.py @@ -43,7 +43,7 @@ def upload_objects(bucket, files, seed): name_generator = realistic.names(15, 4, seed=seed) for fp in files: - print >> sys.stderr, 'sending file with size %dB' % fp.size + sys.stderr.write('sending file with size %dB\n' % fp.size) key = Key(bucket) key.key = name_generator.next() key.set_contents_from_file(fp, rewind=True) @@ -94,18 +94,18 @@ def _main(): bucket.set_acl('public-read') keys = [] - print >> OUTFILE, 'bucket: %s' % bucket.name - print >> sys.stderr, 'setup complete, generating files' + OUTFILE.write('bucket: %s\n' % bucket.name) + sys.stderr.write('setup complete, generating files\n') for profile in common.config.file_generation.groups: seed = random.random() files = get_random_files(profile[0], profile[1], profile[2], seed) keys += upload_objects(bucket, files, seed) - print >> sys.stderr, 'finished sending files. generating urls' + sys.stderr.write('finished sending files. generating urls\n') for key in keys: - print >> OUTFILE, key.generate_url(0, query_auth=False) + OUTFILE.write(key.generate_url(0, query_auth=False) + '\n') - print >> sys.stderr, 'done' + sys.stderr.write('done\n') def main(): diff --git a/s3tests/readwrite.py b/s3tests/readwrite.py index 64f490e..2064b47 100644 --- a/s3tests/readwrite.py +++ b/s3tests/readwrite.py @@ -57,7 +57,7 @@ def reader(bucket, worker_id, file_names, queue, rand): traceback=traceback.format_exc(), ), ) - print "ERROR:", m + print("ERROR:", m) else: elapsed = end - start result.update( @@ -158,16 +158,16 @@ def main(): for name in ['names', 'contents', 'writer', 'reader']: seeds.setdefault(name, rand.randrange(2**32)) - print 'Using random seeds: {seeds}'.format(seeds=seeds) + print('Using random seeds: {seeds}'.format(seeds=seeds)) # setup bucket and other objects bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30) bucket = conn.create_bucket(bucket_name) - print "Created bucket: {name}".format(name=bucket.name) + print("Created bucket: {name}".format(name=bucket.name)) # check flag for deterministic file name creation if not config.readwrite.get('deterministic_file_names'): - print 'Creating random file names' + print('Creating random file names') file_names = realistic.names( mean=15, stddev=4, @@ -176,7 +176,7 @@ def main(): file_names = itertools.islice(file_names, config.readwrite.files.num) file_names = list(file_names) else: - print 'Creating file names that are deterministic' + print('Creating file names that are deterministic') file_names = [] for x in xrange(config.readwrite.files.num): file_names.append('test_file_{num}'.format(num=x)) @@ -191,7 +191,7 @@ def main(): # warmup - get initial set of files uploaded if there are any writers specified if config.readwrite.writers > 0: - print "Uploading initial set of {num} files".format(num=config.readwrite.files.num) + print("Uploading initial set of {num} files".format(num=config.readwrite.files.num)) warmup_pool = gevent.pool.Pool(size=100) for file_name in file_names: fp = next(files) @@ -204,9 +204,9 @@ def main(): warmup_pool.join() # main work - print "Starting main worker loop." - print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev) - print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers) + print("Starting main worker loop.") + print("Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev)) + print("Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers)) group = gevent.pool.Group() rand_writer = random.Random(seeds['writer']) @@ -246,7 +246,7 @@ def main(): # wait for all the tests to finish group.join() - print 'post-join, queue size {size}'.format(size=q.qsize()) + print('post-join, queue size {size}'.format(size=q.qsize())) if q.qsize() > 0: for temp_dict in q: diff --git a/s3tests/realistic.py b/s3tests/realistic.py index 783a754..e4e1aa6 100644 --- a/s3tests/realistic.py +++ b/s3tests/realistic.py @@ -47,9 +47,9 @@ class FileValidator(object): self.original_hash, binary = contents[-40:], contents[:-40] self.new_hash = hashlib.sha1(binary).hexdigest() if not self.new_hash == self.original_hash: - print 'original hash: ', self.original_hash - print 'new hash: ', self.new_hash - print 'size: ', self._file.tell() + print('original hash: ', self.original_hash) + print('new hash: ', self.new_hash) + print('size: ', self._file.tell()) return False return True diff --git a/s3tests/roundtrip.py b/s3tests/roundtrip.py index 6486f9c..841439d 100644 --- a/s3tests/roundtrip.py +++ b/s3tests/roundtrip.py @@ -141,12 +141,12 @@ def main(): for name in ['names', 'contents', 'writer', 'reader']: seeds.setdefault(name, rand.randrange(2**32)) - print 'Using random seeds: {seeds}'.format(seeds=seeds) + print('Using random seeds: {seeds}'.format(seeds=seeds)) # setup bucket and other objects bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30) bucket = conn.create_bucket(bucket_name) - print "Created bucket: {name}".format(name=bucket.name) + print("Created bucket: {name}".format(name=bucket.name)) objnames = realistic.names( mean=15, stddev=4, @@ -163,10 +163,10 @@ def main(): logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout) - print "Writing {num} objects with {w} workers...".format( + print("Writing {num} objects with {w} workers...".format( num=config.roundtrip.files.num, w=config.roundtrip.writers, - ) + )) pool = gevent.pool.Pool(size=config.roundtrip.writers) start = time.time() for objname in objnames: @@ -186,10 +186,10 @@ def main(): duration=int(round(elapsed * NANOSECOND)), )) - print "Reading {num} objects with {w} workers...".format( + print("Reading {num} objects with {w} workers...".format( num=config.roundtrip.files.num, w=config.roundtrip.readers, - ) + )) # avoid accessing them in the same order as the writing rand.shuffle(objnames) pool = gevent.pool.Pool(size=config.roundtrip.readers) diff --git a/s3tests_boto3/generate_objects.py b/s3tests_boto3/generate_objects.py index 420235a..c25665b 100644 --- a/s3tests_boto3/generate_objects.py +++ b/s3tests_boto3/generate_objects.py @@ -43,7 +43,7 @@ def upload_objects(bucket, files, seed): name_generator = realistic.names(15, 4, seed=seed) for fp in files: - print >> sys.stderr, 'sending file with size %dB' % fp.size + sys.stderr.write('sending file with size %dB\n' % fp.size) key = Key(bucket) key.key = name_generator.next() key.set_contents_from_file(fp, rewind=True) @@ -94,18 +94,18 @@ def _main(): bucket.set_acl('public-read') keys = [] - print >> OUTFILE, 'bucket: %s' % bucket.name - print >> sys.stderr, 'setup complete, generating files' + OUTFILE.write('bucket: %s\n' % bucket.name) + sys.stderr.write('setup complete, generating files\n') for profile in common.config.file_generation.groups: seed = random.random() files = get_random_files(profile[0], profile[1], profile[2], seed) keys += upload_objects(bucket, files, seed) - print >> sys.stderr, 'finished sending files. generating urls' + sys.stderr.write('finished sending files. generating urls\n') for key in keys: - print >> OUTFILE, key.generate_url(0, query_auth=False) + OUTFILE.write(key.generate_url(0, query_auth=False) + '\n') - print >> sys.stderr, 'done' + sys.stderr.write('done\n') def main(): diff --git a/s3tests_boto3/readwrite.py b/s3tests_boto3/readwrite.py index 64f490e..2064b47 100644 --- a/s3tests_boto3/readwrite.py +++ b/s3tests_boto3/readwrite.py @@ -57,7 +57,7 @@ def reader(bucket, worker_id, file_names, queue, rand): traceback=traceback.format_exc(), ), ) - print "ERROR:", m + print("ERROR:", m) else: elapsed = end - start result.update( @@ -158,16 +158,16 @@ def main(): for name in ['names', 'contents', 'writer', 'reader']: seeds.setdefault(name, rand.randrange(2**32)) - print 'Using random seeds: {seeds}'.format(seeds=seeds) + print('Using random seeds: {seeds}'.format(seeds=seeds)) # setup bucket and other objects bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30) bucket = conn.create_bucket(bucket_name) - print "Created bucket: {name}".format(name=bucket.name) + print("Created bucket: {name}".format(name=bucket.name)) # check flag for deterministic file name creation if not config.readwrite.get('deterministic_file_names'): - print 'Creating random file names' + print('Creating random file names') file_names = realistic.names( mean=15, stddev=4, @@ -176,7 +176,7 @@ def main(): file_names = itertools.islice(file_names, config.readwrite.files.num) file_names = list(file_names) else: - print 'Creating file names that are deterministic' + print('Creating file names that are deterministic') file_names = [] for x in xrange(config.readwrite.files.num): file_names.append('test_file_{num}'.format(num=x)) @@ -191,7 +191,7 @@ def main(): # warmup - get initial set of files uploaded if there are any writers specified if config.readwrite.writers > 0: - print "Uploading initial set of {num} files".format(num=config.readwrite.files.num) + print("Uploading initial set of {num} files".format(num=config.readwrite.files.num)) warmup_pool = gevent.pool.Pool(size=100) for file_name in file_names: fp = next(files) @@ -204,9 +204,9 @@ def main(): warmup_pool.join() # main work - print "Starting main worker loop." - print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev) - print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers) + print("Starting main worker loop.") + print("Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev)) + print("Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers)) group = gevent.pool.Group() rand_writer = random.Random(seeds['writer']) @@ -246,7 +246,7 @@ def main(): # wait for all the tests to finish group.join() - print 'post-join, queue size {size}'.format(size=q.qsize()) + print('post-join, queue size {size}'.format(size=q.qsize())) if q.qsize() > 0: for temp_dict in q: diff --git a/s3tests_boto3/realistic.py b/s3tests_boto3/realistic.py index 783a754..e4e1aa6 100644 --- a/s3tests_boto3/realistic.py +++ b/s3tests_boto3/realistic.py @@ -47,9 +47,9 @@ class FileValidator(object): self.original_hash, binary = contents[-40:], contents[:-40] self.new_hash = hashlib.sha1(binary).hexdigest() if not self.new_hash == self.original_hash: - print 'original hash: ', self.original_hash - print 'new hash: ', self.new_hash - print 'size: ', self._file.tell() + print('original hash: ', self.original_hash) + print('new hash: ', self.new_hash) + print('size: ', self._file.tell()) return False return True diff --git a/s3tests_boto3/roundtrip.py b/s3tests_boto3/roundtrip.py index 6486f9c..841439d 100644 --- a/s3tests_boto3/roundtrip.py +++ b/s3tests_boto3/roundtrip.py @@ -141,12 +141,12 @@ def main(): for name in ['names', 'contents', 'writer', 'reader']: seeds.setdefault(name, rand.randrange(2**32)) - print 'Using random seeds: {seeds}'.format(seeds=seeds) + print('Using random seeds: {seeds}'.format(seeds=seeds)) # setup bucket and other objects bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30) bucket = conn.create_bucket(bucket_name) - print "Created bucket: {name}".format(name=bucket.name) + print("Created bucket: {name}".format(name=bucket.name)) objnames = realistic.names( mean=15, stddev=4, @@ -163,10 +163,10 @@ def main(): logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout) - print "Writing {num} objects with {w} workers...".format( + print("Writing {num} objects with {w} workers...".format( num=config.roundtrip.files.num, w=config.roundtrip.writers, - ) + )) pool = gevent.pool.Pool(size=config.roundtrip.writers) start = time.time() for objname in objnames: @@ -186,10 +186,10 @@ def main(): duration=int(round(elapsed * NANOSECOND)), )) - print "Reading {num} objects with {w} workers...".format( + print("Reading {num} objects with {w} workers...".format( num=config.roundtrip.files.num, w=config.roundtrip.readers, - ) + )) # avoid accessing them in the same order as the writing rand.shuffle(objnames) pool = gevent.pool.Pool(size=config.roundtrip.readers)