diff --git a/scenarios/preset/helpers/aws_cli.py b/scenarios/preset/helpers/aws_cli.py index a0ecb62..befd27b 100644 --- a/scenarios/preset/helpers/aws_cli.py +++ b/scenarios/preset/helpers/aws_cli.py @@ -4,8 +4,6 @@ from helpers.cmd import execute_cmd def create_bucket(endpoint, versioning, location): - bucket_create_marker = False - if location: location = f"--create-bucket-configuration 'LocationConstraint={location}'" bucket_name = str(uuid.uuid4()) @@ -17,16 +15,13 @@ def create_bucket(endpoint, versioning, location): out, success = execute_cmd(cmd_line) - if not success: - if "succeeded and you already own it" in out: - bucket_create_marker = True - else: - print(f" > Bucket {bucket_name} has not been created:\n{out}") - else: - bucket_create_marker = True - print(f"cmd: {cmd_line}") + if not success and "succeeded and you already own it" not in out: + print(f" > Bucket {bucket_name} has not been created:\n{out}") + return False + + print(f"cmd: {cmd_line}") - if bucket_create_marker and versioning == "True": + if versioning == "True": out, success = execute_cmd(cmd_line_ver) if not success: print(f" > Bucket versioning has not been applied for bucket {bucket_name}:\n{out}") diff --git a/scenarios/preset/preset_grpc.py b/scenarios/preset/preset_grpc.py index 6b3f834..16b2544 100755 --- a/scenarios/preset/preset_grpc.py +++ b/scenarios/preset/preset_grpc.py @@ -12,8 +12,8 @@ from concurrent.futures import ProcessPoolExecutor from helpers.cmd import random_payload from helpers.frostfs_cli import create_container, upload_object -ERROR_NO_CONTAINERS = 1 -ERROR_NO_OBJECTS = 2 +ERROR_WRONG_CONTAINERS_COUNT = 1 +ERROR_WRONG_OBJECTS_COUNT = 2 MAX_WORKERS = 50 parser = argparse.ArgumentParser() @@ -46,17 +46,21 @@ def main(): wallet = args.wallet wallet_config = args.config workers = int(args.workers) + objects_per_container = int(args.preload_obj) + ignore_errors = True if args.ignore_errors else False if args.update: # Open file with open(args.out) as f: data_json = json.load(f) container_list = data_json['containers'] + containers_count = len(container_list) else: - print(f"Create containers: {args.containers}") + containers_count = int(args.containers) + print(f"Create containers: {containers_count}") with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: containers_runs = {executor.submit(create_container, endpoints[random.randrange(len(endpoints))], - args.policy, wallet, wallet_config): _ for _ in range(int(args.containers))} + args.policy, wallet, wallet_config): _ for _ in range(containers_count)} for run in containers_runs: if run.result(): @@ -65,10 +69,10 @@ def main(): print("Create containers: Completed") print(f" > Containers: {container_list}") - if not container_list: - print("No containers to work with") + if containers_count == 0 or len(container_list) != containers_count: + print(f"Containers mismatch in preset: expected {containers_count}, created {len(container_list)}") if not ignore_errors: - sys.exit(ERROR_NO_CONTAINERS) + sys.exit(ERROR_WRONG_CONTAINERS_COUNT) print(f"Upload objects to each container: {args.preload_obj} ") payload_file = tempfile.NamedTemporaryFile() @@ -79,7 +83,7 @@ def main(): print(f" > Upload objects for container {container}") with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: objects_runs = {executor.submit(upload_object, container, payload_file.name, - endpoints[random.randrange(len(endpoints))], wallet, wallet_config): _ for _ in range(int(args.preload_obj))} + endpoints[random.randrange(len(endpoints))], wallet, wallet_config): _ for _ in range(objects_per_container)} for run in objects_runs: if run.result(): @@ -88,10 +92,11 @@ def main(): print("Upload objects to each container: Completed") - if int(args.preload_obj) > 0 and not objects_list: - print("No objects were uploaded") + total_objects = objects_per_container * containers_count + if total_objects > 0 and len(objects_list) != total_objects: + print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}") if not ignore_errors: - sys.exit(ERROR_NO_OBJECTS) + sys.exit(ERROR_WRONG_OBJECTS_COUNT) data = {'containers': container_list, 'objects': objects_list, 'obj_size': args.size + " Kb"} diff --git a/scenarios/preset/preset_s3.py b/scenarios/preset/preset_s3.py index 140d5ea..f49c929 100755 --- a/scenarios/preset/preset_s3.py +++ b/scenarios/preset/preset_s3.py @@ -26,8 +26,8 @@ parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Def args = parser.parse_args() print(args) -ERROR_NO_BUCKETS = 1 -ERROR_NO_OBJECTS = 2 +ERROR_WRONG_CONTAINERS_COUNT = 1 +ERROR_WRONG_OBJECTS_COUNT = 2 MAX_WORKERS = 50 def main(): @@ -36,32 +36,36 @@ def main(): ignore_errors = True if args.ignore_errors else False workers = int(args.workers) + objects_per_bucket = int(args.preload_obj) + if args.update: # Open file with open(args.out) as f: data_json = json.load(f) bucket_list = data_json['buckets'] + buckets_count = len(bucket_list) # Get CID list else: - print(f"Create buckets: {args.buckets}") + buckets_count = int(args.buckets) + print(f"Create buckets: {buckets_count}") with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: buckets_runs = {executor.submit(create_bucket, args.endpoint, args.versioning, - args.location): _ for _ in range(int(args.buckets))} + args.location): _ for _ in range(buckets_count)} for run in buckets_runs: - if run.result() is not None: + if run.result(): bucket_list.append(run.result()) print("Create buckets: Completed") print(f" > Buckets: {bucket_list}") - if not bucket_list: - print("No buckets to work with") + if buckets_count == 0 or len(bucket_list) != buckets_count: + print(f"Buckets mismatch in preset: expected {buckets_count}, created {len(bucket_list)}") if not ignore_errors: - sys.exit(ERROR_NO_BUCKETS) + sys.exit(ERROR_WRONG_CONTAINERS_COUNT) - print(f"Upload objects to each bucket: {args.preload_obj} ") + print(f"Upload objects to each bucket: {objects_per_bucket} ") payload_file = tempfile.NamedTemporaryFile() random_payload(payload_file, args.size) print(" > Create random payload: Completed") @@ -70,19 +74,20 @@ def main(): print(f" > Upload objects for bucket {bucket}") with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: objects_runs = {executor.submit(upload_object, bucket, payload_file.name, - args.endpoint): _ for _ in range(int(args.preload_obj))} + args.endpoint): _ for _ in range(objects_per_bucket)} for run in objects_runs: - if run.result() is not None: + if run.result(): objects_list.append({'bucket': bucket, 'object': run.result()}) print(f" > Upload objects for bucket {bucket}: Completed") print("Upload objects to each bucket: Completed") - if int(args.preload_obj) > 0 and not objects_list: - print("No objects were uploaded") + total_objects = objects_per_bucket * buckets_count + if total_objects > 0 and len(objects_list) != total_objects: + print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}") if not ignore_errors: - sys.exit(ERROR_NO_OBJECTS) + sys.exit(ERROR_WRONG_OBJECTS_COUNT) data = {'buckets': bucket_list, 'objects': objects_list, 'obj_size': args.size + " Kb"}