103 lines
3.9 KiB
Python
Executable file
103 lines
3.9 KiB
Python
Executable file
#!/usr/bin/python3
|
|
|
|
import argparse
|
|
import json
|
|
import sys
|
|
import tempfile
|
|
from concurrent.futures import ProcessPoolExecutor
|
|
|
|
from helpers.cmd import random_payload
|
|
from helpers.aws_cli import create_bucket, upload_object
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--size', help='Upload objects size in kb.')
|
|
parser.add_argument('--buckets', help='Number of buckets to create.')
|
|
parser.add_argument('--out', help='JSON file with output.')
|
|
parser.add_argument('--preload_obj', help='Number of pre-loaded objects.')
|
|
parser.add_argument('--endpoint', help='S3 Gateway address.')
|
|
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). '
|
|
'New buckets will not be created.')
|
|
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.', default="")
|
|
parser.add_argument('--versioning', help='True/False, False by default.')
|
|
parser.add_argument('--ignore-errors', help='Ignore preset errors')
|
|
parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50)
|
|
|
|
args = parser.parse_args()
|
|
print(args)
|
|
|
|
ERROR_WRONG_CONTAINERS_COUNT = 1
|
|
ERROR_WRONG_OBJECTS_COUNT = 2
|
|
MAX_WORKERS = 50
|
|
|
|
def main():
|
|
bucket_list = []
|
|
objects_list = []
|
|
ignore_errors = True if args.ignore_errors else False
|
|
|
|
workers = int(args.workers)
|
|
objects_per_bucket = int(args.preload_obj)
|
|
|
|
if args.update:
|
|
# Open file
|
|
with open(args.out) as f:
|
|
data_json = json.load(f)
|
|
bucket_list = data_json['buckets']
|
|
buckets_count = len(bucket_list)
|
|
# Get CID list
|
|
else:
|
|
buckets_count = int(args.buckets)
|
|
print(f"Create buckets: {buckets_count}")
|
|
|
|
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
|
buckets_runs = {executor.submit(create_bucket, args.endpoint, args.versioning,
|
|
args.location): _ for _ in range(buckets_count)}
|
|
|
|
for run in buckets_runs:
|
|
if run.result():
|
|
bucket_list.append(run.result())
|
|
|
|
print("Create buckets: Completed")
|
|
|
|
print(f" > Buckets: {bucket_list}")
|
|
if buckets_count == 0 or len(bucket_list) != buckets_count:
|
|
print(f"Buckets mismatch in preset: expected {buckets_count}, created {len(bucket_list)}")
|
|
if not ignore_errors:
|
|
sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
|
|
|
|
print(f"Upload objects to each bucket: {objects_per_bucket} ")
|
|
payload_file = tempfile.NamedTemporaryFile()
|
|
random_payload(payload_file, args.size)
|
|
print(" > Create random payload: Completed")
|
|
|
|
for bucket in bucket_list:
|
|
print(f" > Upload objects for bucket {bucket}")
|
|
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
|
objects_runs = {executor.submit(upload_object, bucket, payload_file.name,
|
|
args.endpoint): _ for _ in range(objects_per_bucket)}
|
|
|
|
for run in objects_runs:
|
|
if run.result():
|
|
objects_list.append({'bucket': bucket, 'object': run.result()})
|
|
print(f" > Upload objects for bucket {bucket}: Completed")
|
|
|
|
print("Upload objects to each bucket: Completed")
|
|
|
|
total_objects = objects_per_bucket * buckets_count
|
|
if total_objects > 0 and len(objects_list) != total_objects:
|
|
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
|
|
if not ignore_errors:
|
|
sys.exit(ERROR_WRONG_OBJECTS_COUNT)
|
|
|
|
data = {'buckets': bucket_list, 'objects': objects_list, 'obj_size': args.size + " Kb"}
|
|
|
|
with open(args.out, 'w+') as f:
|
|
json.dump(data, f, ensure_ascii=False, indent=2)
|
|
|
|
print("Result:")
|
|
print(f" > Total Buckets has been created: {len(bucket_list)}.")
|
|
print(f" > Total Objects has been created: {len(objects_list)}.")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|