[#30] preset: PEP 8 refactor

Signed-off-by: Pavel Karpy <carpawell@nspcc.ru>
This commit is contained in:
Pavel Karpy 2022-10-25 17:43:20 +03:00 committed by fyrchik
parent eaff41c055
commit 0088fbd7d6
2 changed files with 33 additions and 36 deletions

View file

@ -27,7 +27,7 @@ print(args)
def main(): def main():
container_list = [] container_list = []
objects_struct = [] objects_struct = []
payload_filepath='/tmp/data_file' payload_filepath = '/tmp/data_file'
if args.update: if args.update:
# Open file # Open file
@ -54,20 +54,20 @@ def main():
for container in container_list: for container in container_list:
print(f" > Upload objects for container {container}") print(f" > Upload objects for container {container}")
with ProcessPoolExecutor(max_workers=50) as executor: with ProcessPoolExecutor(max_workers=50) as executor:
objects_runs = {executor.submit(upload_object, container, payload_filepath): _ for _ in range(int(args.preload_obj))} objects_runs = {executor.submit(upload_object, container, payload_filepath): _ for _ in
range(int(args.preload_obj))}
for run in objects_runs: for run in objects_runs:
if run.result() is not None: if run.result() is not None:
objects_struct.append({'container': container, 'object': run.result()}) objects_struct.append({'container': container, 'object': run.result()})
print(f" > Upload objects for container {container}: Completed") print(f" > Upload objects for container {container}: Completed")
print("Upload objects to each container: Completed") print("Upload objects to each container: Completed")
data = { 'containers': container_list, 'objects': objects_struct, 'obj_size': args.size + " Kb" } data = {'containers': container_list, 'objects': objects_struct, 'obj_size': args.size + " Kb"}
with open(args.out, 'w') as f: with open(args.out, 'w') as f:
json.dump(data, f, ensure_ascii=False) json.dump(data, f, ensure_ascii=False)
print(f"Result:") print(f"Result:")
print(f" > Total Containers has been created: {len(container_list)}.") print(f" > Total Containers has been created: {len(container_list)}.")
@ -75,8 +75,8 @@ def main():
def random_payload(payload_filepath): def random_payload(payload_filepath):
with open('%s'%payload_filepath, 'wb') as fout: with open('%s' % payload_filepath, 'wb') as fout:
fout.write(os.urandom(1024*int(args.size))) fout.write(os.urandom(1024 * int(args.size)))
def execute_cmd(cmd_line): def execute_cmd(cmd_line):
@ -104,6 +104,7 @@ def create_container():
fst_str = output.split('\n')[0] fst_str = output.split('\n')[0]
except Exception: except Exception:
print(f"Got empty output: {output}") print(f"Got empty output: {output}")
return
splitted = fst_str.split(": ") splitted = fst_str.split(": ")
if len(splitted) != 2: if len(splitted) != 2:
raise ValueError(f"no CID was parsed from command output: \t{fst_str}") raise ValueError(f"no CID was parsed from command output: \t{fst_str}")
@ -123,6 +124,7 @@ def upload_object(container, payload_filepath):
snd_str = out.split('\n')[1] snd_str = out.split('\n')[1]
except: except:
print(f"Got empty input: {out}") print(f"Got empty input: {out}")
return
splitted = snd_str.split(": ") splitted = snd_str.split(": ")
if len(splitted) != 2: if len(splitted) != 2:
raise Exception(f"no OID was parsed from command output: \t{snd_str}") raise Exception(f"no OID was parsed from command output: \t{snd_str}")

View file

@ -1,16 +1,14 @@
#!/usr/bin/python3 #!/usr/bin/python3
from multiprocessing import Process import argparse
import uuid
import shlex
from subprocess import check_output, CalledProcessError, STDOUT
import json import json
import os import os
import argparse, sys import shlex
import uuid
from concurrent.futures import ProcessPoolExecutor from concurrent.futures import ProcessPoolExecutor
from subprocess import check_output, CalledProcessError, STDOUT
parser=argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--size', help='Upload objects size in kb.') parser.add_argument('--size', help='Upload objects size in kb.')
parser.add_argument('--buckets', help='Number of buckets to create.') parser.add_argument('--buckets', help='Number of buckets to create.')
@ -21,15 +19,14 @@ parser.add_argument('--update', help='True/False, False by default. Save existed
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.') parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.')
parser.add_argument('--versioning', help='True/False, False by default.') parser.add_argument('--versioning', help='True/False, False by default.')
args=parser.parse_args() args = parser.parse_args()
print(args) print(args)
def main(): def main():
bucket_list = [] bucket_list = []
objects_struct = [] objects_struct = []
payload_filepath='/tmp/data_file' payload_filepath = '/tmp/data_file'
if args.update: if args.update:
# Open file # Open file
@ -65,13 +62,12 @@ def main():
objects_struct.append({'bucket': bucket, 'object': run.result()}) objects_struct.append({'bucket': bucket, 'object': run.result()})
print(f" > Upload objects for bucket {bucket}: Completed") print(f" > Upload objects for bucket {bucket}: Completed")
print("Upload objects to each bucket: Completed") print("Upload objects to each bucket: Completed")
data = { 'buckets': bucket_list, 'objects': objects_struct, 'obj_size': args.size + " Kb" } data = {'buckets': bucket_list, 'objects': objects_struct, 'obj_size': args.size + " Kb"}
with open(args.out, 'w') as f: with open(args.out, 'w') as f:
json.dump(data, f, ensure_ascii=False) json.dump(data, f, ensure_ascii=False)
print(f"Result:") print(f"Result:")
print(f" > Total Buckets has been created: {len(bucket_list)}.") print(f" > Total Buckets has been created: {len(bucket_list)}.")
@ -79,8 +75,9 @@ def main():
def random_payload(payload_filepath): def random_payload(payload_filepath):
with open('%s'%payload_filepath, 'wb') as fout: with open('%s' % payload_filepath, 'wb') as fout:
fout.write(os.urandom(1024*int(args.size))) fout.write(os.urandom(1024 * int(args.size)))
def execute_cmd(cmd_line): def execute_cmd(cmd_line):
args = shlex.split(cmd_line) args = shlex.split(cmd_line)
@ -118,7 +115,7 @@ def create_bucket():
bucket_create_marker = True bucket_create_marker = True
print(f"cmd: {cmd_line}") print(f"cmd: {cmd_line}")
if (bucket_create_marker == True and args.versioning == "True"): if bucket_create_marker and args.versioning == "True":
out, success = execute_cmd(cmd_line_ver) out, success = execute_cmd(cmd_line_ver)
if not success: if not success:
print(f" > Bucket versioning has not been applied for bucket {bucket_name}.") print(f" > Bucket versioning has not been applied for bucket {bucket_name}.")
@ -131,7 +128,7 @@ def create_bucket():
def upload_object(bucket, payload_filepath): def upload_object(bucket, payload_filepath):
object_name = str(uuid.uuid4()) object_name = str(uuid.uuid4())
cmd_line = f"aws s3api put-object --bucket {bucket} --key {object_name} --body {payload_filepath} --endpoint http://{args.endpoint} " cmd_line = f"aws s3api put-object --bucket {bucket} --key {object_name} --body {payload_filepath} --endpoint http://{args.endpoint}"
out, success = execute_cmd(cmd_line) out, success = execute_cmd(cmd_line)
if not success: if not success:
@ -142,5 +139,3 @@ def upload_object(bucket, payload_filepath):
if __name__ == "__main__": if __name__ == "__main__":
main() main()