diff --git a/internal/s3/client.go b/internal/s3/client.go index a503f45..7c42f0e 100644 --- a/internal/s3/client.go +++ b/internal/s3/client.go @@ -142,6 +142,8 @@ func (c *Client) Get(bucket, key string) GetResponse { return GetResponse{Success: true} } +// DeleteObjectVersion deletes object version with specified versionID. +// If version argument is empty, deletes all versions and delete-markers of specified object. func (c *Client) DeleteObjectVersion(bucket, key, version string) DeleteResponse { var toDelete []types.ObjectIdentifier diff --git a/scenarios/preset/helpers/aws_cli.py b/scenarios/preset/helpers/aws_cli.py index f59d629..b5817d7 100644 --- a/scenarios/preset/helpers/aws_cli.py +++ b/scenarios/preset/helpers/aws_cli.py @@ -15,7 +15,7 @@ def create_bucket(endpoint, versioning, location, acl, no_verify_ssl): cmd_line = f"aws {no_verify_ssl_str} s3api create-bucket --bucket {bucket_name} " \ f"--endpoint {endpoint} {configuration} {acl} " cmd_line_ver = f"aws {no_verify_ssl_str} s3api put-bucket-versioning --bucket {bucket_name} " \ - f"--versioning-configuration Status=Enabled --endpoint {endpoint} {acl} " + f"--versioning-configuration Status=Enabled --endpoint {endpoint}" output, success = execute_cmd(cmd_line) @@ -25,7 +25,7 @@ def create_bucket(endpoint, versioning, location, acl, no_verify_ssl): f"Error: {output}", endpoint) return False - if versioning == "True": + if versioning: output, success = execute_cmd(cmd_line_ver) if not success: log(f"{cmd_line_ver}\n" diff --git a/scenarios/preset/preset_s3.py b/scenarios/preset/preset_s3.py index ec9edc4..f8f6f77 100755 --- a/scenarios/preset/preset_s3.py +++ b/scenarios/preset/preset_s3.py @@ -13,6 +13,7 @@ from helpers.aws_cli import create_bucket, upload_object ERROR_WRONG_CONTAINERS_COUNT = 1 ERROR_WRONG_OBJECTS_COUNT = 2 +ERROR_WRONG_PERCENTAGE = 3 MAX_WORKERS = 50 DEFAULT_LOCATION = "" @@ -26,7 +27,8 @@ parser.add_argument('--endpoint', help='S3 Gateways addresses separated by comma parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). ' 'New buckets will not be created.') parser.add_argument('--location', help=f'AWS location constraint. Default is "{DEFAULT_LOCATION}"', action="append") -parser.add_argument('--versioning', help='True/False, False by default.') +parser.add_argument('--versioning', help='True/False, False by default. Alias of --buckets_versioned=100') +parser.add_argument('--buckets_versioned', help='Percent of versioned buckets. Default is 0', default=0) parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true') parser.add_argument('--no-verify-ssl', help='Ignore SSL verifications', action='store_true') parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50) @@ -62,8 +64,17 @@ def main(): print(f"Create buckets: {buckets_count}") with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: - buckets_runs = [executor.submit(create_bucket, endpoint, args.versioning, location, args.acl, no_verify_ssl) - for _, endpoint, location in + if not 0 <= int(args.buckets_versioned) <= 100: + print(f"Percent of versioned buckets must be between 0 and 100: got {args.buckets_versioned}") + if not ignore_errors: + sys.exit(ERROR_WRONG_PERCENTAGE) + if args.versioning == "True": + versioning_per_bucket = [True] * buckets_count + else: + num_versioned_buckets = int((int(args.buckets_versioned) / 100) * buckets_count) + versioning_per_bucket = [True] * num_versioned_buckets + [False] * (buckets_count - num_versioned_buckets) + buckets_runs = [executor.submit(create_bucket, endpoint, versioning_per_bucket[i], location, args.acl, no_verify_ssl) + for i, endpoint, location in zip(range(buckets_count), cycle(endpoints), cycle(args.location))] for run in buckets_runs: diff --git a/scenarios/run_scenarios.md b/scenarios/run_scenarios.md index d0e8330..091adc9 100644 --- a/scenarios/run_scenarios.md +++ b/scenarios/run_scenarios.md @@ -125,7 +125,7 @@ The tests will use all pre-created buckets for PUT operations and all pre-create $ ./scenarios/preset/preset_s3.py --size 1024 --buckets 1 --out s3_1024kb.json --endpoint host1:8084 --preload_obj 500 --location load-1-4 ``` * '--location' - specify the name of container policy (from policy.json file). It's important to run 'aws configure' each time when the policy file has been changed to pick up the latest policies. - + * '--buckets_versioned' - specify the percentage of versioned buckets from the total number of created buckets. Default is 0 3. Execute scenario with options: ```shell