[#67] Fail k6 if preset fails

Signed-off-by: Andrey Berezin <a.berezin@yadro.com>
This commit is contained in:
Andrey Berezin 2023-05-23 19:04:38 +03:00 committed by Evgenii Stratonikov
parent 925fe3ec83
commit 6151005b4d
3 changed files with 41 additions and 36 deletions

View file

@ -4,8 +4,6 @@ from helpers.cmd import execute_cmd
def create_bucket(endpoint, versioning, location): def create_bucket(endpoint, versioning, location):
bucket_create_marker = False
if location: if location:
location = f"--create-bucket-configuration 'LocationConstraint={location}'" location = f"--create-bucket-configuration 'LocationConstraint={location}'"
bucket_name = str(uuid.uuid4()) bucket_name = str(uuid.uuid4())
@ -17,16 +15,13 @@ def create_bucket(endpoint, versioning, location):
out, success = execute_cmd(cmd_line) out, success = execute_cmd(cmd_line)
if not success: if not success and "succeeded and you already own it" not in out:
if "succeeded and you already own it" in out: print(f" > Bucket {bucket_name} has not been created:\n{out}")
bucket_create_marker = True return False
else:
print(f" > Bucket {bucket_name} has not been created:\n{out}") print(f"cmd: {cmd_line}")
else:
bucket_create_marker = True
print(f"cmd: {cmd_line}")
if bucket_create_marker and versioning == "True": if versioning == "True":
out, success = execute_cmd(cmd_line_ver) out, success = execute_cmd(cmd_line_ver)
if not success: if not success:
print(f" > Bucket versioning has not been applied for bucket {bucket_name}:\n{out}") print(f" > Bucket versioning has not been applied for bucket {bucket_name}:\n{out}")

View file

@ -12,8 +12,8 @@ from concurrent.futures import ProcessPoolExecutor
from helpers.cmd import random_payload from helpers.cmd import random_payload
from helpers.frostfs_cli import create_container, upload_object from helpers.frostfs_cli import create_container, upload_object
ERROR_NO_CONTAINERS = 1 ERROR_WRONG_CONTAINERS_COUNT = 1
ERROR_NO_OBJECTS = 2 ERROR_WRONG_OBJECTS_COUNT = 2
MAX_WORKERS = 50 MAX_WORKERS = 50
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@ -46,17 +46,21 @@ def main():
wallet = args.wallet wallet = args.wallet
wallet_config = args.config wallet_config = args.config
workers = int(args.workers) workers = int(args.workers)
objects_per_container = int(args.preload_obj)
ignore_errors = True if args.ignore_errors else False ignore_errors = True if args.ignore_errors else False
if args.update: if args.update:
# Open file # Open file
with open(args.out) as f: with open(args.out) as f:
data_json = json.load(f) data_json = json.load(f)
container_list = data_json['containers'] container_list = data_json['containers']
containers_count = len(container_list)
else: else:
print(f"Create containers: {args.containers}") containers_count = int(args.containers)
print(f"Create containers: {containers_count}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
containers_runs = {executor.submit(create_container, endpoints[random.randrange(len(endpoints))], containers_runs = {executor.submit(create_container, endpoints[random.randrange(len(endpoints))],
args.policy, wallet, wallet_config): _ for _ in range(int(args.containers))} args.policy, wallet, wallet_config): _ for _ in range(containers_count)}
for run in containers_runs: for run in containers_runs:
if run.result(): if run.result():
@ -65,10 +69,10 @@ def main():
print("Create containers: Completed") print("Create containers: Completed")
print(f" > Containers: {container_list}") print(f" > Containers: {container_list}")
if not container_list: if containers_count == 0 or len(container_list) != containers_count:
print("No containers to work with") print(f"Containers mismatch in preset: expected {containers_count}, created {len(container_list)}")
if not ignore_errors: if not ignore_errors:
sys.exit(ERROR_NO_CONTAINERS) sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
print(f"Upload objects to each container: {args.preload_obj} ") print(f"Upload objects to each container: {args.preload_obj} ")
payload_file = tempfile.NamedTemporaryFile() payload_file = tempfile.NamedTemporaryFile()
@ -79,7 +83,7 @@ def main():
print(f" > Upload objects for container {container}") print(f" > Upload objects for container {container}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
objects_runs = {executor.submit(upload_object, container, payload_file.name, objects_runs = {executor.submit(upload_object, container, payload_file.name,
endpoints[random.randrange(len(endpoints))], wallet, wallet_config): _ for _ in range(int(args.preload_obj))} endpoints[random.randrange(len(endpoints))], wallet, wallet_config): _ for _ in range(objects_per_container)}
for run in objects_runs: for run in objects_runs:
if run.result(): if run.result():
@ -88,10 +92,11 @@ def main():
print("Upload objects to each container: Completed") print("Upload objects to each container: Completed")
if int(args.preload_obj) > 0 and not objects_list: total_objects = objects_per_container * containers_count
print("No objects were uploaded") if total_objects > 0 and len(objects_list) != total_objects:
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
if not ignore_errors: if not ignore_errors:
sys.exit(ERROR_NO_OBJECTS) sys.exit(ERROR_WRONG_OBJECTS_COUNT)
data = {'containers': container_list, 'objects': objects_list, 'obj_size': args.size + " Kb"} data = {'containers': container_list, 'objects': objects_list, 'obj_size': args.size + " Kb"}

View file

@ -26,8 +26,8 @@ parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Def
args = parser.parse_args() args = parser.parse_args()
print(args) print(args)
ERROR_NO_BUCKETS = 1 ERROR_WRONG_CONTAINERS_COUNT = 1
ERROR_NO_OBJECTS = 2 ERROR_WRONG_OBJECTS_COUNT = 2
MAX_WORKERS = 50 MAX_WORKERS = 50
def main(): def main():
@ -36,32 +36,36 @@ def main():
ignore_errors = True if args.ignore_errors else False ignore_errors = True if args.ignore_errors else False
workers = int(args.workers) workers = int(args.workers)
objects_per_bucket = int(args.preload_obj)
if args.update: if args.update:
# Open file # Open file
with open(args.out) as f: with open(args.out) as f:
data_json = json.load(f) data_json = json.load(f)
bucket_list = data_json['buckets'] bucket_list = data_json['buckets']
buckets_count = len(bucket_list)
# Get CID list # Get CID list
else: else:
print(f"Create buckets: {args.buckets}") buckets_count = int(args.buckets)
print(f"Create buckets: {buckets_count}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
buckets_runs = {executor.submit(create_bucket, args.endpoint, args.versioning, buckets_runs = {executor.submit(create_bucket, args.endpoint, args.versioning,
args.location): _ for _ in range(int(args.buckets))} args.location): _ for _ in range(buckets_count)}
for run in buckets_runs: for run in buckets_runs:
if run.result() is not None: if run.result():
bucket_list.append(run.result()) bucket_list.append(run.result())
print("Create buckets: Completed") print("Create buckets: Completed")
print(f" > Buckets: {bucket_list}") print(f" > Buckets: {bucket_list}")
if not bucket_list: if buckets_count == 0 or len(bucket_list) != buckets_count:
print("No buckets to work with") print(f"Buckets mismatch in preset: expected {buckets_count}, created {len(bucket_list)}")
if not ignore_errors: if not ignore_errors:
sys.exit(ERROR_NO_BUCKETS) sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
print(f"Upload objects to each bucket: {args.preload_obj} ") print(f"Upload objects to each bucket: {objects_per_bucket} ")
payload_file = tempfile.NamedTemporaryFile() payload_file = tempfile.NamedTemporaryFile()
random_payload(payload_file, args.size) random_payload(payload_file, args.size)
print(" > Create random payload: Completed") print(" > Create random payload: Completed")
@ -70,19 +74,20 @@ def main():
print(f" > Upload objects for bucket {bucket}") print(f" > Upload objects for bucket {bucket}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
objects_runs = {executor.submit(upload_object, bucket, payload_file.name, objects_runs = {executor.submit(upload_object, bucket, payload_file.name,
args.endpoint): _ for _ in range(int(args.preload_obj))} args.endpoint): _ for _ in range(objects_per_bucket)}
for run in objects_runs: for run in objects_runs:
if run.result() is not None: if run.result():
objects_list.append({'bucket': bucket, 'object': run.result()}) objects_list.append({'bucket': bucket, 'object': run.result()})
print(f" > Upload objects for bucket {bucket}: Completed") print(f" > Upload objects for bucket {bucket}: Completed")
print("Upload objects to each bucket: Completed") print("Upload objects to each bucket: Completed")
if int(args.preload_obj) > 0 and not objects_list: total_objects = objects_per_bucket * buckets_count
print("No objects were uploaded") if total_objects > 0 and len(objects_list) != total_objects:
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
if not ignore_errors: if not ignore_errors:
sys.exit(ERROR_NO_OBJECTS) sys.exit(ERROR_WRONG_OBJECTS_COUNT)
data = {'buckets': bucket_list, 'objects': objects_list, 'obj_size': args.size + " Kb"} data = {'buckets': bucket_list, 'objects': objects_list, 'obj_size': args.size + " Kb"}