[#152] Allow to set mix of policies for containers and buckets #152

Merged
fyrchik merged 1 commit from abereziny/xk6-frostfs:feature-multi-load-policy into master 2024-07-03 07:59:42 +00:00
4 changed files with 24 additions and 17 deletions

View file

@ -4,15 +4,16 @@ from helpers.cmd import execute_cmd, log
def create_bucket(endpoint, versioning, location, acl, no_verify_ssl): def create_bucket(endpoint, versioning, location, acl, no_verify_ssl):
configuration = ""
if location: if location:
location = f"--create-bucket-configuration 'LocationConstraint={location}'" configuration = f"--create-bucket-configuration 'LocationConstraint={location}'"
if acl: if acl:
acl = f"--acl {acl}" acl = f"--acl {acl}"
bucket_name = str(uuid.uuid4()) bucket_name = str(uuid.uuid4())
no_verify_ssl_str = "--no-verify-ssl" if no_verify_ssl else "" no_verify_ssl_str = "--no-verify-ssl" if no_verify_ssl else ""
cmd_line = f"aws {no_verify_ssl_str} s3api create-bucket --bucket {bucket_name} " \ cmd_line = f"aws {no_verify_ssl_str} s3api create-bucket --bucket {bucket_name} " \
f"--endpoint {endpoint} {location} {acl} " f"--endpoint {endpoint} {configuration} {acl} "
cmd_line_ver = f"aws {no_verify_ssl_str} s3api put-bucket-versioning --bucket {bucket_name} " \ cmd_line_ver = f"aws {no_verify_ssl_str} s3api put-bucket-versioning --bucket {bucket_name} " \
f"--versioning-configuration Status=Enabled --endpoint {endpoint} {acl} " f"--versioning-configuration Status=Enabled --endpoint {endpoint} {acl} "
@ -33,7 +34,7 @@ def create_bucket(endpoint, versioning, location, acl, no_verify_ssl):
else: else:
log(f"Bucket versioning has been applied for bucket {bucket_name}", endpoint) log(f"Bucket versioning has been applied for bucket {bucket_name}", endpoint)
log(f"Created bucket: {bucket_name}", endpoint) log(f"Created bucket: {bucket_name} ({location})", endpoint)
return bucket_name return bucket_name

View file

@ -34,7 +34,7 @@ def create_container(endpoint, policy, wallet_path, config, acl, local=False, de
raise ValueError(f"no CID was parsed from command output:\t{fst_str}") raise ValueError(f"no CID was parsed from command output:\t{fst_str}")
cid = splitted[1] cid = splitted[1]
log(f"Created container {cid}", endpoint) log(f"Created container: {cid} ({policy})", endpoint)
if not local: if not local:
return cid return cid

View file

@ -15,6 +15,7 @@ from helpers.frostfs_cli import create_container, upload_object
ERROR_WRONG_CONTAINERS_COUNT = 1 ERROR_WRONG_CONTAINERS_COUNT = 1
ERROR_WRONG_OBJECTS_COUNT = 2 ERROR_WRONG_OBJECTS_COUNT = 2
MAX_WORKERS = 50 MAX_WORKERS = 50
DEFAULT_POLICY = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--size', help='Upload objects size in kb') parser.add_argument('--size', help='Upload objects size in kb')
@ -25,8 +26,8 @@ parser.add_argument('--wallet', help='Wallet file path')
parser.add_argument('--config', help='Wallet config file path') parser.add_argument('--config', help='Wallet config file path')
parser.add_argument( parser.add_argument(
"--policy", "--policy",
help="Container placement policy", help=f"Container placement policy. Default is {DEFAULT_POLICY}",
default="REP 2 IN X CBF 2 SELECT 2 FROM * AS X" action="append"
) )
parser.add_argument('--endpoint', help='Nodes addresses separated by comma.') parser.add_argument('--endpoint', help='Nodes addresses separated by comma.')
parser.add_argument('--update', help='Save existed containers') parser.add_argument('--update', help='Save existed containers')
@ -46,6 +47,8 @@ def main():
objects_list = [] objects_list = []
endpoints = args.endpoint.split(',') endpoints = args.endpoint.split(',')
if not args.policy:
args.policy = [DEFAULT_POLICY]
wallet = args.wallet wallet = args.wallet
wallet_config = args.config wallet_config = args.config
@ -63,9 +66,9 @@ def main():
containers_count = int(args.containers) containers_count = int(args.containers)
print(f"Create containers: {containers_count}") print(f"Create containers: {containers_count}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
containers_runs = [executor.submit(create_container, endpoint, args.policy, wallet, wallet_config, args.acl, args.local) containers_runs = [executor.submit(create_container, endpoint, policy, wallet, wallet_config, args.acl, args.local)
for _, endpoint in for _, endpoint, policy in
zip(range(containers_count), cycle(endpoints))] zip(range(containers_count), cycle(endpoints), cycle(args.policy))]
for run in containers_runs: for run in containers_runs:
container_id = run.result() container_id = run.result()

View file

@ -11,6 +11,11 @@ from concurrent.futures import ProcessPoolExecutor
from helpers.cmd import random_payload from helpers.cmd import random_payload
from helpers.aws_cli import create_bucket, upload_object from helpers.aws_cli import create_bucket, upload_object
ERROR_WRONG_CONTAINERS_COUNT = 1
ERROR_WRONG_OBJECTS_COUNT = 2
MAX_WORKERS = 50
DEFAULT_LOCATION = ""
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--size', help='Upload objects size in kb.') parser.add_argument('--size', help='Upload objects size in kb.')
@ -20,7 +25,7 @@ parser.add_argument('--preload_obj', help='Number of pre-loaded objects.')
parser.add_argument('--endpoint', help='S3 Gateways addresses separated by comma.') parser.add_argument('--endpoint', help='S3 Gateways addresses separated by comma.')
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). ' parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). '
'New buckets will not be created.') 'New buckets will not be created.')
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.', default="") parser.add_argument('--location', help=f'AWS location constraint. Default is "{DEFAULT_LOCATION}"', action="append")
parser.add_argument('--versioning', help='True/False, False by default.') parser.add_argument('--versioning', help='True/False, False by default.')
parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true') parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true')
parser.add_argument('--no-verify-ssl', help='Ignore SSL verifications', action='store_true') parser.add_argument('--no-verify-ssl', help='Ignore SSL verifications', action='store_true')
@ -32,10 +37,6 @@ parser.add_argument('--acl', help='Bucket ACL. Default is private. Expected valu
args = parser.parse_args() args = parser.parse_args()
print(args) print(args)
ERROR_WRONG_CONTAINERS_COUNT = 1
ERROR_WRONG_OBJECTS_COUNT = 2
MAX_WORKERS = 50
def main(): def main():
buckets = [] buckets = []
objects_list = [] objects_list = []
@ -43,6 +44,8 @@ def main():
no_verify_ssl = args.no_verify_ssl no_verify_ssl = args.no_verify_ssl
endpoints = args.endpoint.split(',') endpoints = args.endpoint.split(',')
if not args.location:
args.location = [DEFAULT_LOCATION]
workers = int(args.workers) workers = int(args.workers)
objects_per_bucket = int(args.preload_obj) objects_per_bucket = int(args.preload_obj)
@ -59,9 +62,9 @@ def main():
print(f"Create buckets: {buckets_count}") print(f"Create buckets: {buckets_count}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
buckets_runs = [executor.submit(create_bucket, endpoint, args.versioning, args.location, args.acl, no_verify_ssl) buckets_runs = [executor.submit(create_bucket, endpoint, args.versioning, location, args.acl, no_verify_ssl)
for _, endpoint in for _, endpoint, location in
zip(range(buckets_count), cycle(endpoints))] zip(range(buckets_count), cycle(endpoints), cycle(args.location))]
for run in buckets_runs: for run in buckets_runs:
bucket_name = run.result() bucket_name = run.result()