From 0088fbd7d61206d9861f47ed7924f02ace7c8b19 Mon Sep 17 00:00:00 2001 From: Pavel Karpy Date: Tue, 25 Oct 2022 17:43:20 +0300 Subject: [PATCH] [#30] preset: PEP 8 refactor Signed-off-by: Pavel Karpy --- scenarios/preset/preset_grpc.py | 22 ++++++++------- scenarios/preset/preset_s3.py | 47 +++++++++++++++------------------ 2 files changed, 33 insertions(+), 36 deletions(-) diff --git a/scenarios/preset/preset_grpc.py b/scenarios/preset/preset_grpc.py index b4e0c5d..09ff41c 100755 --- a/scenarios/preset/preset_grpc.py +++ b/scenarios/preset/preset_grpc.py @@ -27,7 +27,7 @@ print(args) def main(): container_list = [] objects_struct = [] - payload_filepath='/tmp/data_file' + payload_filepath = '/tmp/data_file' if args.update: # Open file @@ -41,7 +41,7 @@ def main(): for run in containers_runs: if run.result() is not None: - container_list.append(run.result()) + container_list.append(run.result()) print("Create containers: Completed") @@ -54,20 +54,20 @@ def main(): for container in container_list: print(f" > Upload objects for container {container}") with ProcessPoolExecutor(max_workers=50) as executor: - objects_runs = {executor.submit(upload_object, container, payload_filepath): _ for _ in range(int(args.preload_obj))} + objects_runs = {executor.submit(upload_object, container, payload_filepath): _ for _ in + range(int(args.preload_obj))} for run in objects_runs: if run.result() is not None: - objects_struct.append({'container': container, 'object': run.result()}) + objects_struct.append({'container': container, 'object': run.result()}) print(f" > Upload objects for container {container}: Completed") - print("Upload objects to each container: Completed") - data = { 'containers': container_list, 'objects': objects_struct, 'obj_size': args.size + " Kb" } + data = {'containers': container_list, 'objects': objects_struct, 'obj_size': args.size + " Kb"} with open(args.out, 'w') as f: - json.dump(data, f, ensure_ascii=False) + json.dump(data, f, ensure_ascii=False) print(f"Result:") print(f" > Total Containers has been created: {len(container_list)}.") @@ -75,8 +75,8 @@ def main(): def random_payload(payload_filepath): - with open('%s'%payload_filepath, 'wb') as fout: - fout.write(os.urandom(1024*int(args.size))) + with open('%s' % payload_filepath, 'wb') as fout: + fout.write(os.urandom(1024 * int(args.size))) def execute_cmd(cmd_line): @@ -84,7 +84,7 @@ def execute_cmd(cmd_line): output = "" try: output = check_output(args, stderr=STDOUT).decode() - success = True + success = True except CalledProcessError as e: output = e.output.decode() @@ -104,6 +104,7 @@ def create_container(): fst_str = output.split('\n')[0] except Exception: print(f"Got empty output: {output}") + return splitted = fst_str.split(": ") if len(splitted) != 2: raise ValueError(f"no CID was parsed from command output: \t{fst_str}") @@ -123,6 +124,7 @@ def upload_object(container, payload_filepath): snd_str = out.split('\n')[1] except: print(f"Got empty input: {out}") + return splitted = snd_str.split(": ") if len(splitted) != 2: raise Exception(f"no OID was parsed from command output: \t{snd_str}") diff --git a/scenarios/preset/preset_s3.py b/scenarios/preset/preset_s3.py index c74eb19..2685be6 100755 --- a/scenarios/preset/preset_s3.py +++ b/scenarios/preset/preset_s3.py @@ -1,16 +1,14 @@ #!/usr/bin/python3 -from multiprocessing import Process -import uuid -import shlex -from subprocess import check_output, CalledProcessError, STDOUT +import argparse import json -import os -import argparse, sys - +import os +import shlex +import uuid from concurrent.futures import ProcessPoolExecutor +from subprocess import check_output, CalledProcessError, STDOUT -parser=argparse.ArgumentParser() +parser = argparse.ArgumentParser() parser.add_argument('--size', help='Upload objects size in kb.') parser.add_argument('--buckets', help='Number of buckets to create.') @@ -21,15 +19,14 @@ parser.add_argument('--update', help='True/False, False by default. Save existed parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.') parser.add_argument('--versioning', help='True/False, False by default.') -args=parser.parse_args() +args = parser.parse_args() print(args) def main(): bucket_list = [] objects_struct = [] - payload_filepath='/tmp/data_file' - + payload_filepath = '/tmp/data_file' if args.update: # Open file @@ -39,13 +36,13 @@ def main(): # Get CID list else: print(f"Create buckets: {args.buckets}") - + with ProcessPoolExecutor(max_workers=10) as executor: buckets_runs = {executor.submit(create_bucket): _ for _ in range(int(args.buckets))} for run in buckets_runs: if run.result() is not None: - bucket_list.append(run.result()) + bucket_list.append(run.result()) print("Create buckets: Completed") @@ -62,16 +59,15 @@ def main(): for run in objects_runs: if run.result() is not None: - objects_struct.append({'bucket': bucket, 'object': run.result()}) + objects_struct.append({'bucket': bucket, 'object': run.result()}) print(f" > Upload objects for bucket {bucket}: Completed") - print("Upload objects to each bucket: Completed") - data = { 'buckets': bucket_list, 'objects': objects_struct, 'obj_size': args.size + " Kb" } + data = {'buckets': bucket_list, 'objects': objects_struct, 'obj_size': args.size + " Kb"} with open(args.out, 'w') as f: - json.dump(data, f, ensure_ascii=False) + json.dump(data, f, ensure_ascii=False) print(f"Result:") print(f" > Total Buckets has been created: {len(bucket_list)}.") @@ -79,15 +75,16 @@ def main(): def random_payload(payload_filepath): - with open('%s'%payload_filepath, 'wb') as fout: - fout.write(os.urandom(1024*int(args.size))) + with open('%s' % payload_filepath, 'wb') as fout: + fout.write(os.urandom(1024 * int(args.size))) + def execute_cmd(cmd_line): args = shlex.split(cmd_line) output = "" try: output = check_output(args, stderr=STDOUT).decode() - success = True + success = True except CalledProcessError as e: output = e.output.decode() @@ -98,7 +95,7 @@ def execute_cmd(cmd_line): def create_bucket(): bucket_create_marker = False - + location = "" if args.location: location = f"--create-bucket-configuration 'LocationConstraint={args.location}'" @@ -108,7 +105,7 @@ def create_bucket(): cmd_line_ver = f"aws --no-verify-ssl s3api put-bucket-versioning --bucket {bucket_name} --versioning-configuration Status=Enabled --endpoint http://{args.endpoint} " out, success = execute_cmd(cmd_line) - + if not success: if "succeeded and you already own it" in out: bucket_create_marker = True @@ -118,7 +115,7 @@ def create_bucket(): bucket_create_marker = True print(f"cmd: {cmd_line}") - if (bucket_create_marker == True and args.versioning == "True"): + if bucket_create_marker and args.versioning == "True": out, success = execute_cmd(cmd_line_ver) if not success: print(f" > Bucket versioning has not been applied for bucket {bucket_name}.") @@ -131,7 +128,7 @@ def create_bucket(): def upload_object(bucket, payload_filepath): object_name = str(uuid.uuid4()) - cmd_line = f"aws s3api put-object --bucket {bucket} --key {object_name} --body {payload_filepath} --endpoint http://{args.endpoint} " + cmd_line = f"aws s3api put-object --bucket {bucket} --key {object_name} --body {payload_filepath} --endpoint http://{args.endpoint}" out, success = execute_cmd(cmd_line) if not success: @@ -142,5 +139,3 @@ def upload_object(bucket, payload_filepath): if __name__ == "__main__": main() - - \ No newline at end of file