Compare commits

...

8 commits

Author SHA1 Message Date
7bafdf0477
[#164] *.js: Fix formatting
Signed-off-by: Alexander Chuprov <a.chuprov@yadro.com>
2024-11-15 18:19:05 +03:00
39078af679
[#164] pre-commit: Add pre-commit
Signed-off-by: Alexander Chuprov <a.chuprov@yadro.com>
2024-11-15 18:19:05 +03:00
74be390aad
[#164] .forgejo: Add clang-format support
Signed-off-by: Alexander Chuprov <a.chuprov@yadro.com>
2024-11-15 18:19:05 +03:00
f3392c390a
[#164] Makefile: Add clang-format support
Signed-off-by: Alexander Chuprov <a.chuprov@yadro.com>
2024-11-15 18:19:05 +03:00
124397578d
[#100] preset_s3: Add a flag for percent of versioned buckets
Add flag "--buckets_versioned". Default is 0 (no versioned buckets)

Signed-off-by: Nikita Zinkevich <n.zinkevich@yadro.com>
2024-11-12 18:21:29 +03:00
a7079cda60
[#100] s3: Add a flag for permanent versioned object deletion
Signed-off-by: Nikita Zinkevich <n.zinkevich@yadro.com>
2024-11-12 18:21:28 +03:00
d3d5a1baed
[#100] s3: Support creating of versioned bucket
Add "versioning" parameter handling in CreateBucket method

Signed-off-by: Nikita Zinkevich <n.zinkevich@yadro.com>
2024-11-12 18:21:28 +03:00
72d24b04a3 [#179] Remove mentions of GitHub from contributor docs
Signed-off-by: Vitaliy Potyarkin <v.potyarkin@yadro.com>
2024-11-06 15:41:48 +03:00
23 changed files with 722 additions and 526 deletions

View file

@ -20,6 +20,27 @@ jobs:
- name: Run linters - name: Run linters
run: make lint run: make lint
clang-format:
name: Check clang-format
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Install deps
run: |
apt update
apt install -y clang-format
- name: Run clang-format
run: make clang
- name: Print diff
run: git diff HEAD
- name: Check that nothing has changed
run: git diff-index --exit-code HEAD
tests: tests:
name: Tests name: Tests
runs-on: ubuntu-latest runs-on: ubuntu-latest

64
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,64 @@
ci:
autofix_prs: false
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-case-conflict
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
- id: check-merge-conflict
- id: check-json
- id: check-xml
- id: check-yaml
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
- id: end-of-file-fixer
exclude: "(.key|.svg)$"
- repo: https://github.com/shellcheck-py/shellcheck-py
rev: v0.9.0.6
hooks:
- id: shellcheck
- repo: local
hooks:
- id: make-lint
name: Run Make Lint
entry: make lint
language: system
pass_filenames: false
- repo: local
hooks:
- id: make-clang
name: Run Make clang
entry: make clang
language: system
pass_filenames: false
- repo: local
hooks:
- id: go-unit-tests
name: go unit tests
entry: make test GOFLAGS=''
pass_filenames: false
types: [go]
language: system
- repo: local
hooks:
- id: gofumpt
name: gofumpt
entry: make fumpt
pass_filenames: false
types: [go]
language: system
- repo: https://github.com/TekWizely/pre-commit-golang
rev: v1.0.0-rc.1
hooks:
- id: go-staticcheck-repo-mod
- id: go-mod-tidy

View file

@ -27,7 +27,8 @@ Start by forking the `xk6-frostfs` repository, make changes in a branch and then
send a pull request. We encourage pull requests to discuss code changes. Here send a pull request. We encourage pull requests to discuss code changes. Here
are the steps in details: are the steps in details:
### Set up your GitHub Repository ### Set up your repository
Fork [xk6-frostfs upstream](https://git.frostfs.info/TrueCloudLab/xk6-frostfs/fork) source Fork [xk6-frostfs upstream](https://git.frostfs.info/TrueCloudLab/xk6-frostfs/fork) source
repository to your own personal repository. Copy the URL of your fork (you will repository to your own personal repository. Copy the URL of your fork (you will
need it for the `git clone` command below). need it for the `git clone` command below).
@ -89,7 +90,7 @@ $ git push origin feature/123-something_awesome
``` ```
### Create a Pull Request ### Create a Pull Request
Pull requests can be created via GitHub. Refer to [this Pull requests can be created via git.frostfs.info. Refer to [this
document](https://help.github.com/articles/creating-a-pull-request/) for document](https://help.github.com/articles/creating-a-pull-request/) for
detailed steps on how to create a pull request. After a Pull Request gets peer detailed steps on how to create a pull request. After a Pull Request gets peer
reviewed and approved, it will be merged. reviewed and approved, it will be merged.

View file

@ -66,6 +66,8 @@ cover:
format: format:
@echo "⇒ Processing gofmt check" @echo "⇒ Processing gofmt check"
@gofmt -s -w ./ @gofmt -s -w ./
@echo "⇒ Processing clang-format"
@clang-format -i **/*.js
# Run linters # Run linters
lint: lint:

View file

@ -1,8 +1,10 @@
import local from 'k6/x/frostfs/local'; import local from 'k6/x/frostfs/local';
import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js'; import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js';
const payload = open('../go.sum', 'b'); const payload = open('../go.sum', 'b');
const local_cli = local.connect("/path/to/config.yaml", "/path/to/config/dir", "", false) const local_cli =
local.connect("/path/to/config.yaml", "/path/to/config/dir", "", false)
export const options = { export const options = {
stages : [ stages : [
@ -13,12 +15,12 @@ export const options = {
export default function() { export default function() {
let headers = { let headers = {
'unique_header' : uuidv4() 'unique_header' : uuidv4()
} } const container_id = '6BVPPXQewRJ6J5EYmAPLczXxNocS7ikyF7amS2esWQnb';
const container_id = '6BVPPXQewRJ6J5EYmAPLczXxNocS7ikyF7amS2esWQnb';
let resp = local_cli.put(container_id, headers, payload) let resp = local_cli.put(container_id, headers, payload)
if (resp.success) { if (resp.success) {
local_cli.get(container_id, resp.object_id) local_cli.get(container_id, resp.object_id)
} else { }
else {
console.log(resp.error) console.log(resp.error)
} }
} }

View file

@ -1,9 +1,13 @@
import native from 'k6/x/frostfs/native';
import {fail} from "k6"; import {fail} from "k6";
import native from 'k6/x/frostfs/native';
import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js'; import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js';
const payload = open('../go.sum', 'b'); const payload = open('../go.sum', 'b');
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "1dd37fba80fec4e6a6f13fd708d8dcb3b29def768017052f6c930fa1c5d90bbb", 0, 0, false, 0) const frostfs_cli = native.connect(
"s01.frostfs.devenv:8080",
"1dd37fba80fec4e6a6f13fd708d8dcb3b29def768017052f6c930fa1c5d90bbb", 0, 0,
false, 0)
export const options = { export const options = {
stages : [ stages : [
@ -28,13 +32,12 @@ export function setup() {
} }
export default function(data) { export default function(data) {
let headers = { let headers = {'unique_header' : uuidv4()} let resp =
'unique_header': uuidv4() frostfs_cli.put(data.container_id, headers, payload)
}
let resp = frostfs_cli.put(data.container_id, headers, payload)
if (resp.success) { if (resp.success) {
frostfs_cli.get(data.container_id, resp.object_id) frostfs_cli.get(data.container_id, resp.object_id)
} else { }
else {
console.log(resp.error) console.log(resp.error)
} }
} }

View file

@ -1,9 +1,11 @@
import native from 'k6/x/frostfs/native'; import native from 'k6/x/frostfs/native';
import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js'; import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js';
const payload = open('../go.sum', 'b'); const payload = open('../go.sum', 'b');
const container = "AjSxSNNXbJUDPqqKYm1VbFVDGCakbpUNH8aGjPmGAH3B" const container = "AjSxSNNXbJUDPqqKYm1VbFVDGCakbpUNH8aGjPmGAH3B"
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "", 0, 0, false, 0) const frostfs_cli =
native.connect("s01.frostfs.devenv:8080", "", 0, 0, false, 0)
const frostfs_obj = frostfs_cli.onsite(container, payload) const frostfs_obj = frostfs_cli.onsite(container, payload)
export const options = { export const options = {
@ -13,13 +15,11 @@ export const options = {
}; };
export default function() { export default function() {
let headers = { let headers = {'unique_header' : uuidv4()} let resp = frostfs_obj.put(headers)
'unique_header': uuidv4()
}
let resp = frostfs_obj.put(headers)
if (resp.success) { if (resp.success) {
frostfs_cli.get(container, resp.object_id) frostfs_cli.get(container, resp.object_id)
} else { }
else {
console.log(resp.error) console.log(resp.error)
} }
} }

View file

@ -1,10 +1,12 @@
import s3 from 'k6/x/frostfs/s3';
import {fail} from 'k6' import {fail} from 'k6'
import s3 from 'k6/x/frostfs/s3';
import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js'; import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js';
const payload = open('../go.sum', 'b'); const payload = open('../go.sum', 'b');
const bucket = "cats" const bucket = "cats"
const s3_cli = s3.connect("https://s3.frostfs.devenv:8080", {'no_verify_ssl': 'true'}) const s3_cli =
s3.connect("https://s3.frostfs.devenv:8080", {'no_verify_ssl' : 'true'})
export const options = { export const options = {
stages : [ stages : [

View file

@ -1,9 +1,11 @@
import s3local from 'k6/x/frostfs/s3local'; import s3local from 'k6/x/frostfs/s3local';
import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js'; import {uuidv4} from '../scenarios/libs/k6-utils-1.4.0.js';
const bucket = "testbucket" const bucket = "testbucket"
const payload = open('../go.sum', 'b'); const payload = open('../go.sum', 'b');
const s3local_cli = s3local.connect("path/to/storage/config.yml", "path/to/storage/config/dir", {}, { const s3local_cli = s3local.connect(
"path/to/storage/config.yml", "path/to/storage/config/dir", {}, {
'testbucket' : 'GBQDDUM1hdodXmiRHV57EUkFWJzuntsG8BG15wFSwam6', 'testbucket' : 'GBQDDUM1hdodXmiRHV57EUkFWJzuntsG8BG15wFSwam6',
}); });

View file

@ -142,6 +142,70 @@ func (c *Client) Get(bucket, key string) GetResponse {
return GetResponse{Success: true} return GetResponse{Success: true}
} }
// DeleteObjectVersion deletes object version with specified versionID.
// If version argument is empty, deletes all versions and delete-markers of specified object.
func (c *Client) DeleteObjectVersion(bucket, key, version string) DeleteResponse {
var toDelete []types.ObjectIdentifier
if version != "" {
toDelete = append(toDelete, types.ObjectIdentifier{
Key: aws.String(key),
VersionId: aws.String(version),
})
} else {
versions, err := c.cli.ListObjectVersions(c.vu.Context(), &s3.ListObjectVersionsInput{
Bucket: aws.String(bucket),
Prefix: aws.String(key),
})
if err != nil {
stats.Report(c.vu, objDeleteFails, 1)
return DeleteResponse{Success: false, Error: err.Error()}
}
toDelete = filterObjectVersions(versions, key)
}
if len(toDelete) == 0 {
return c.Delete(bucket, key)
} else {
_, err := c.cli.DeleteObjects(c.vu.Context(), &s3.DeleteObjectsInput{
Bucket: aws.String(bucket),
Delete: &types.Delete{
Objects: toDelete,
Quiet: true,
},
})
if err != nil {
stats.Report(c.vu, objDeleteFails, 1)
return DeleteResponse{Success: false, Error: err.Error()}
}
}
return DeleteResponse{Success: true}
}
func filterObjectVersions(versions *s3.ListObjectVersionsOutput, key string) []types.ObjectIdentifier {
var result []types.ObjectIdentifier
for _, v := range versions.Versions {
if *v.Key == key {
result = append(result, types.ObjectIdentifier{
Key: v.Key,
VersionId: v.VersionId,
})
}
}
for _, marker := range versions.DeleteMarkers {
if *marker.Key == key {
result = append(result, types.ObjectIdentifier{
Key: marker.Key,
VersionId: marker.VersionId,
})
}
}
return result
}
func get( func get(
c *s3.Client, c *s3.Client,
bucket string, bucket string,
@ -215,6 +279,26 @@ func (c *Client) CreateBucket(bucket string, params map[string]string) CreateBuc
return CreateBucketResponse{Success: false, Error: err.Error()} return CreateBucketResponse{Success: false, Error: err.Error()}
} }
var versioning bool
if strVersioned, ok := params["versioning"]; ok {
if versioning, err = strconv.ParseBool(strVersioned); err != nil {
stats.Report(c.vu, createBucketFails, 1)
return CreateBucketResponse{Success: false, Error: err.Error()}
}
}
if versioning {
_, err = c.cli.PutBucketVersioning(context.TODO(), &s3.PutBucketVersioningInput{
Bucket: aws.String(bucket),
VersioningConfiguration: &types.VersioningConfiguration{
Status: types.BucketVersioningStatusEnabled,
},
})
if err != nil {
stats.Report(c.vu, createBucketFails, 1)
return CreateBucketResponse{Success: false, Error: err.Error()}
}
}
stats.Report(c.vu, createBucketSuccess, 1) stats.Report(c.vu, createBucketSuccess, 1)
stats.Report(c.vu, createBucketDuration, metrics.D(time.Since(start))) stats.Report(c.vu, createBucketDuration, metrics.D(time.Since(start)))
return CreateBucketResponse{Success: true} return CreateBucketResponse{Success: true}

View file

@ -31,7 +31,8 @@ const grpc_endpoint =
const grpc_client = native.connect( const grpc_client = native.connect(
grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5, grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5,
__ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60,
__ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true' : false, __ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true'
: false,
1024 * parseInt(__ENV.MAX_OBJECT_SIZE || '0')); 1024 * parseInt(__ENV.MAX_OBJECT_SIZE || '0'));
const log = logging.new().withField('endpoint', grpc_endpoint); const log = logging.new().withField('endpoint', grpc_endpoint);

View file

@ -12,13 +12,13 @@ import { uuidv4 } from './libs/k6-utils-1.4.0.js';
parseEnv(); parseEnv();
const obj_list = new SharedArray('obj_list', function () { const obj_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).objects; 'obj_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
const container_list = new SharedArray('container_list', function () { const container_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).containers; 'container_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).containers; });
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size; const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json'; const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
@ -30,7 +30,8 @@ const grpc_endpoint =
const grpc_client = native.connect( const grpc_client = native.connect(
grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5, grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5,
__ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60,
__ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true' : false, __ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true'
: false,
1024 * parseInt(__ENV.MAX_OBJECT_SIZE || '0')); 1024 * parseInt(__ENV.MAX_OBJECT_SIZE || '0'));
const log = logging.new().withField('endpoint', grpc_endpoint); const log = logging.new().withField('endpoint', grpc_endpoint);

View file

@ -12,13 +12,13 @@ import {uuidv4} from './libs/k6-utils-1.4.0.js';
parseEnv(); parseEnv();
const obj_list = new SharedArray('obj_list', function() { const obj_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).objects; 'obj_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
const container_list = new SharedArray('container_list', function() { const container_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).containers; 'container_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).containers; });
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size; const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json'; const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
@ -137,7 +137,8 @@ export function obj_read() {
const resp = const resp =
http.get(`http://${http_endpoint}/get/${obj.container}/${obj.object}`); http.get(`http://${http_endpoint}/get/${obj.container}/${obj.object}`);
if (resp.status != 200) { if (resp.status != 200) {
log.withFields({status: resp.status, cid: obj.container, oid: obj.object}) log.withFields(
{status : resp.status, cid : obj.container, oid : obj.object})
.error(resp.error); .error(resp.error);
} }
} }

View file

@ -12,13 +12,13 @@ import {uuidv4} from './libs/k6-utils-1.4.0.js';
parseEnv(); parseEnv();
const obj_list = new SharedArray('obj_list', function() { const obj_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).objects; 'obj_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
const container_list = new SharedArray('container_list', function() { const container_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).containers; 'container_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).containers; });
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size; const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json'; const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';

View file

@ -15,7 +15,7 @@ def create_bucket(endpoint, versioning, location, acl, no_verify_ssl):
cmd_line = f"aws {no_verify_ssl_str} s3api create-bucket --bucket {bucket_name} " \ cmd_line = f"aws {no_verify_ssl_str} s3api create-bucket --bucket {bucket_name} " \
f"--endpoint {endpoint} {configuration} {acl} " f"--endpoint {endpoint} {configuration} {acl} "
cmd_line_ver = f"aws {no_verify_ssl_str} s3api put-bucket-versioning --bucket {bucket_name} " \ cmd_line_ver = f"aws {no_verify_ssl_str} s3api put-bucket-versioning --bucket {bucket_name} " \
f"--versioning-configuration Status=Enabled --endpoint {endpoint} {acl} " f"--versioning-configuration Status=Enabled --endpoint {endpoint}"
output, success = execute_cmd(cmd_line) output, success = execute_cmd(cmd_line)
@ -25,7 +25,7 @@ def create_bucket(endpoint, versioning, location, acl, no_verify_ssl):
f"Error: {output}", endpoint) f"Error: {output}", endpoint)
return False return False
if versioning == "True": if versioning:
output, success = execute_cmd(cmd_line_ver) output, success = execute_cmd(cmd_line_ver)
if not success: if not success:
log(f"{cmd_line_ver}\n" log(f"{cmd_line_ver}\n"

View file

@ -13,6 +13,7 @@ from helpers.aws_cli import create_bucket, upload_object
ERROR_WRONG_CONTAINERS_COUNT = 1 ERROR_WRONG_CONTAINERS_COUNT = 1
ERROR_WRONG_OBJECTS_COUNT = 2 ERROR_WRONG_OBJECTS_COUNT = 2
ERROR_WRONG_PERCENTAGE = 3
MAX_WORKERS = 50 MAX_WORKERS = 50
DEFAULT_LOCATION = "" DEFAULT_LOCATION = ""
@ -26,7 +27,8 @@ parser.add_argument('--endpoint', help='S3 Gateways addresses separated by comma
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). ' parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). '
'New buckets will not be created.') 'New buckets will not be created.')
parser.add_argument('--location', help=f'AWS location constraint. Default is "{DEFAULT_LOCATION}"', action="append") parser.add_argument('--location', help=f'AWS location constraint. Default is "{DEFAULT_LOCATION}"', action="append")
parser.add_argument('--versioning', help='True/False, False by default.') parser.add_argument('--versioning', help='True/False, False by default. Alias of --buckets_versioned=100')
parser.add_argument('--buckets_versioned', help='Percent of versioned buckets. Default is 0', default=0)
parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true') parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true')
parser.add_argument('--no-verify-ssl', help='Ignore SSL verifications', action='store_true') parser.add_argument('--no-verify-ssl', help='Ignore SSL verifications', action='store_true')
parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50) parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50)
@ -62,8 +64,17 @@ def main():
print(f"Create buckets: {buckets_count}") print(f"Create buckets: {buckets_count}")
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor: with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
buckets_runs = [executor.submit(create_bucket, endpoint, args.versioning, location, args.acl, no_verify_ssl) if not 0 <= int(args.buckets_versioned) <= 100:
for _, endpoint, location in print(f"Percent of versioned buckets must be between 0 and 100: got {args.buckets_versioned}")
if not ignore_errors:
sys.exit(ERROR_WRONG_PERCENTAGE)
if args.versioning == "True":
versioning_per_bucket = [True] * buckets_count
else:
num_versioned_buckets = int((int(args.buckets_versioned) / 100) * buckets_count)
versioning_per_bucket = [True] * num_versioned_buckets + [False] * (buckets_count - num_versioned_buckets)
buckets_runs = [executor.submit(create_bucket, endpoint, versioning_per_bucket[i], location, args.acl, no_verify_ssl)
for i, endpoint, location in
zip(range(buckets_count), cycle(endpoints), cycle(args.location))] zip(range(buckets_count), cycle(endpoints), cycle(args.location))]
for run in buckets_runs: for run in buckets_runs:

View file

@ -125,7 +125,7 @@ The tests will use all pre-created buckets for PUT operations and all pre-create
$ ./scenarios/preset/preset_s3.py --size 1024 --buckets 1 --out s3_1024kb.json --endpoint host1:8084 --preload_obj 500 --location load-1-4 $ ./scenarios/preset/preset_s3.py --size 1024 --buckets 1 --out s3_1024kb.json --endpoint host1:8084 --preload_obj 500 --location load-1-4
``` ```
* '--location' - specify the name of container policy (from policy.json file). It's important to run 'aws configure' each time when the policy file has been changed to pick up the latest policies. * '--location' - specify the name of container policy (from policy.json file). It's important to run 'aws configure' each time when the policy file has been changed to pick up the latest policies.
* '--buckets_versioned' - specify the percentage of versioned buckets from the total number of created buckets. Default is 0
3. Execute scenario with options: 3. Execute scenario with options:
```shell ```shell

View file

@ -6,10 +6,10 @@ import registry from 'k6/x/frostfs/registry';
import s3 from 'k6/x/frostfs/s3'; import s3 from 'k6/x/frostfs/s3';
import stats from 'k6/x/frostfs/stats'; import stats from 'k6/x/frostfs/stats';
import {generateS3Key} from './libs/keygen.js'; import {newGenerator} from './libs/datagen.js';
import {parseEnv} from './libs/env-parser.js'; import {parseEnv} from './libs/env-parser.js';
import {textSummary} from './libs/k6-summary-0.0.2.js'; import {textSummary} from './libs/k6-summary-0.0.2.js';
import {newGenerator} from './libs/datagen.js'; import {generateS3Key} from './libs/keygen.js';
parseEnv(); parseEnv();

View file

@ -5,20 +5,20 @@ import registry from 'k6/x/frostfs/registry';
import s3 from 'k6/x/frostfs/s3'; import s3 from 'k6/x/frostfs/s3';
import stats from 'k6/x/frostfs/stats'; import stats from 'k6/x/frostfs/stats';
import {generateS3Key} from './libs/keygen.js';
import {newGenerator} from './libs/datagen.js'; import {newGenerator} from './libs/datagen.js';
import {parseEnv} from './libs/env-parser.js'; import {parseEnv} from './libs/env-parser.js';
import {textSummary} from './libs/k6-summary-0.0.2.js'; import {textSummary} from './libs/k6-summary-0.0.2.js';
import {generateS3Key} from './libs/keygen.js';
parseEnv(); parseEnv();
const obj_list = new SharedArray('obj_list', function() { const obj_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).objects; 'obj_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
const bucket_list = new SharedArray('bucket_list', function() { const bucket_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; 'bucket_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; });
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size; const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json'; const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
@ -86,7 +86,6 @@ if (write_rate > 0) {
}; };
} }
const pre_alloc_read_vus = parseInt(__ENV.PRE_ALLOC_READERS || '0'); const pre_alloc_read_vus = parseInt(__ENV.PRE_ALLOC_READERS || '0');
const max_read_vus = parseInt(__ENV.MAX_READERS || pre_alloc_read_vus); const max_read_vus = parseInt(__ENV.MAX_READERS || pre_alloc_read_vus);
const read_rate = parseInt(__ENV.READ_RATE || '0'); const read_rate = parseInt(__ENV.READ_RATE || '0');
@ -103,7 +102,6 @@ if (read_rate > 0) {
}; };
} }
const pre_alloc_delete_vus = parseInt(__ENV.PRE_ALLOC_DELETERS || '0'); const pre_alloc_delete_vus = parseInt(__ENV.PRE_ALLOC_DELETERS || '0');
const max_delete_vus = parseInt(__ENV.MAX_DELETERS || pre_alloc_write_vus); const max_delete_vus = parseInt(__ENV.MAX_DELETERS || pre_alloc_write_vus);
const delete_rate = parseInt(__ENV.DELETE_RATE || '0'); const delete_rate = parseInt(__ENV.DELETE_RATE || '0');

View file

@ -6,10 +6,10 @@ import registry from 'k6/x/frostfs/registry';
import s3 from 'k6/x/frostfs/s3'; import s3 from 'k6/x/frostfs/s3';
import stats from 'k6/x/frostfs/stats'; import stats from 'k6/x/frostfs/stats';
import {generateS3Key} from './libs/keygen.js';
import {newGenerator} from './libs/datagen.js'; import {newGenerator} from './libs/datagen.js';
import {parseEnv} from './libs/env-parser.js'; import {parseEnv} from './libs/env-parser.js';
import {textSummary} from './libs/k6-summary-0.0.2.js'; import {textSummary} from './libs/k6-summary-0.0.2.js';
import {generateS3Key} from './libs/keygen.js';
parseEnv(); parseEnv();
@ -100,8 +100,6 @@ if (registry_enabled ) {
}); });
} }
const delete_vu_count = parseInt(__ENV.DELETERS || '0'); const delete_vu_count = parseInt(__ENV.DELETERS || '0');
if (delete_vu_count > 0) { if (delete_vu_count > 0) {
if (!obj_to_delete_selector) { if (!obj_to_delete_selector) {
@ -186,9 +184,15 @@ export function obj_read() {
} }
const resp = s3_client.get(obj.s3_bucket, obj.s3_key) const resp = s3_client.get(obj.s3_bucket, obj.s3_key)
if (!resp.success) { if (!resp.success) {
log.withFields({bucket : obj.s3_bucket, key : obj.s3_key, status: obj.status, op: `READ`}) log.withFields({
bucket : obj.s3_bucket,
key : obj.s3_key,
status : obj.status,
op : `READ`
})
.error(resp.error); .error(resp.error);
} else { }
else {
obj_registry.setObjectStatus(obj.id, obj.status, 'read'); obj_registry.setObjectStatus(obj.id, obj.status, 'read');
} }

View file

@ -5,16 +5,16 @@ import registry from 'k6/x/frostfs/registry';
import s3 from 'k6/x/frostfs/s3'; import s3 from 'k6/x/frostfs/s3';
import stats from 'k6/x/frostfs/stats'; import stats from 'k6/x/frostfs/stats';
import {generateS3Key} from './libs/keygen.js';
import {newGenerator} from './libs/datagen.js'; import {newGenerator} from './libs/datagen.js';
import {parseEnv} from './libs/env-parser.js'; import {parseEnv} from './libs/env-parser.js';
import {textSummary} from './libs/k6-summary-0.0.2.js'; import {textSummary} from './libs/k6-summary-0.0.2.js';
import {generateS3Key} from './libs/keygen.js';
parseEnv(); parseEnv();
const bucket_list = new SharedArray('bucket_list', function() { const bucket_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; 'bucket_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; });
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json'; const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
@ -105,9 +105,8 @@ export function obj_write_multipart() {
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)]; const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
const payload = generator.genPayload(); const payload = generator.genPayload();
const resp = s3_client.multipart( const resp = s3_client.multipart(bucket, key, write_multipart_part_size,
bucket, key, write_multipart_part_size, write_multipart_vu_count, write_multipart_vu_count, payload);
payload);
if (!resp.success) { if (!resp.success) {
log.withFields({bucket : bucket, key : key}).error(resp.error); log.withFields({bucket : bucket, key : key}).error(resp.error);
return; return;

View file

@ -5,25 +5,25 @@ import registry from 'k6/x/frostfs/registry';
import s3local from 'k6/x/frostfs/s3local'; import s3local from 'k6/x/frostfs/s3local';
import stats from 'k6/x/frostfs/stats'; import stats from 'k6/x/frostfs/stats';
import {generateS3Key} from './libs/keygen.js';
import {newGenerator} from './libs/datagen.js'; import {newGenerator} from './libs/datagen.js';
import {parseEnv} from './libs/env-parser.js'; import {parseEnv} from './libs/env-parser.js';
import {textSummary} from './libs/k6-summary-0.0.2.js'; import {textSummary} from './libs/k6-summary-0.0.2.js';
import {uuidv4} from './libs/k6-utils-1.4.0.js'; import {uuidv4} from './libs/k6-utils-1.4.0.js';
import {generateS3Key} from './libs/keygen.js';
parseEnv(); parseEnv();
const obj_list = new SharedArray('obj_list', function() { const obj_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).objects; 'obj_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
const container_list = new SharedArray('container_list', function() { const container_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).containers; 'container_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).containers; });
const bucket_list = new SharedArray('bucket_list', function() { const bucket_list = new SharedArray(
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; 'bucket_list',
}); function() { return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; });
function bucket_mapping() { function bucket_mapping() {
if (container_list.length != bucket_list.length) { if (container_list.length != bucket_list.length) {
@ -43,8 +43,7 @@ const config_file = __ENV.CONFIG_FILE;
const config_dir = __ENV.CONFIG_DIR; const config_dir = __ENV.CONFIG_DIR;
const max_total_size_gb = const max_total_size_gb =
__ENV.MAX_TOTAL_SIZE_GB ? parseInt(__ENV.MAX_TOTAL_SIZE_GB) : 0; __ENV.MAX_TOTAL_SIZE_GB ? parseInt(__ENV.MAX_TOTAL_SIZE_GB) : 0;
const s3_client = s3local.connect( const s3_client = s3local.connect(config_file, config_dir, {
config_file, config_dir, {
'debug_logger' : __ENV.DEBUG_LOGGER || 'false', 'debug_logger' : __ENV.DEBUG_LOGGER || 'false',
}, },
bucket_mapping(), max_total_size_gb); bucket_mapping(), max_total_size_gb);

View file

@ -44,7 +44,8 @@ if (__ENV.GRPC_ENDPOINTS) {
grpc_client = native.connect( grpc_client = native.connect(
grpcEndpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 0, grpcEndpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 0,
__ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 0, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 0,
__ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true' : false, __ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true'
: false,
1024 * parseInt(__ENV.MAX_OBJECT_SIZE || '0')); 1024 * parseInt(__ENV.MAX_OBJECT_SIZE || '0'));
} }