forked from TrueCloudLab/xk6-frostfs
Compare commits
2 commits
master
...
lorem-ipsu
Author | SHA1 | Date | |
---|---|---|---|
025facee96 | |||
c56bbf04d5 |
58 changed files with 1465 additions and 2909 deletions
|
@ -1,21 +0,0 @@
|
||||||
name: DCO action
|
|
||||||
on: [pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dco:
|
|
||||||
name: DCO
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup Go
|
|
||||||
uses: actions/setup-go@v3
|
|
||||||
with:
|
|
||||||
go-version: '1.21'
|
|
||||||
|
|
||||||
- name: Run commit format checker
|
|
||||||
uses: https://git.frostfs.info/TrueCloudLab/dco-go@v2
|
|
||||||
with:
|
|
||||||
from: 'origin/${{ github.event.pull_request.base.ref }}'
|
|
|
@ -1,55 +0,0 @@
|
||||||
name: Tests and linters
|
|
||||||
on: [pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
name: Lint
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v3
|
|
||||||
with:
|
|
||||||
go-version: '1.21'
|
|
||||||
cache: true
|
|
||||||
|
|
||||||
- name: golangci-lint
|
|
||||||
uses: https://github.com/golangci/golangci-lint-action@v3
|
|
||||||
with:
|
|
||||||
version: latest
|
|
||||||
|
|
||||||
tests:
|
|
||||||
name: Tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
go_versions: [ '1.20', '1.21' ]
|
|
||||||
fail-fast: false
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v3
|
|
||||||
with:
|
|
||||||
go-version: '${{ matrix.go_versions }}'
|
|
||||||
cache: true
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
run: make test
|
|
||||||
|
|
||||||
tests-race:
|
|
||||||
name: Tests with -race
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set up Go
|
|
||||||
uses: actions/setup-go@v3
|
|
||||||
with:
|
|
||||||
go-version: '1.21'
|
|
||||||
cache: true
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
run: go test ./... -count=1 -race
|
|
||||||
|
|
0
.forgejo/CODEOWNERS → .github/CODEOWNERS
vendored
0
.forgejo/CODEOWNERS → .github/CODEOWNERS
vendored
0
.forgejo/logo.svg → .github/logo.svg
vendored
0
.forgejo/logo.svg → .github/logo.svg
vendored
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 5.5 KiB |
21
.github/workflows/dco.yml
vendored
Normal file
21
.github/workflows/dco.yml
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
name: DCO check
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
commits_check_job:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Commits Check
|
||||||
|
steps:
|
||||||
|
- name: Get PR Commits
|
||||||
|
id: 'get-pr-commits'
|
||||||
|
uses: tim-actions/get-pr-commits@master
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: DCO Check
|
||||||
|
uses: tim-actions/dco@master
|
||||||
|
with:
|
||||||
|
commits: ${{ steps.get-pr-commits.outputs.commits }}
|
34
.github/workflows/go.yml
vendored
Normal file
34
.github/workflows/go.yml
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
name: Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
types: [opened, synchronize]
|
||||||
|
paths-ignore:
|
||||||
|
- '**/*.md'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
name: Lint
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
- name: Check out code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: golangci-lint
|
||||||
|
uses: golangci/golangci-lint-action@v2
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
args: --timeout=2m
|
||||||
|
|
||||||
|
tests:
|
||||||
|
name: Tests
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
go_versions: [ '1.17', '1.18', '1.19' ]
|
||||||
|
fail-fast: false
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -1,6 +1,3 @@
|
||||||
k6
|
k6
|
||||||
*.bolt
|
*.bolt
|
||||||
presets
|
presets
|
||||||
bin
|
|
||||||
# Preset script artifacts.
|
|
||||||
__pycache__
|
|
||||||
|
|
94
Makefile
94
Makefile
|
@ -1,94 +0,0 @@
|
||||||
#!/usr/bin/make -f
|
|
||||||
|
|
||||||
# Common variables
|
|
||||||
REPO ?= $(shell go list -m)
|
|
||||||
VERSION ?= $(shell git describe --tags --dirty --match "v*" --always --abbrev=8 2>/dev/null || cat VERSION 2>/dev/null || echo "develop")
|
|
||||||
GO_VERSION ?= 1.19
|
|
||||||
LINT_VERSION ?= 1.49.0
|
|
||||||
BINDIR = bin
|
|
||||||
|
|
||||||
# Binaries to build
|
|
||||||
CMDS = $(addprefix frostfs-, $(notdir $(wildcard cmd/*)))
|
|
||||||
BINS = $(addprefix $(BINDIR)/, $(CMDS))
|
|
||||||
|
|
||||||
.PHONY: all $(BINS) $(BINDIR) dep docker/ test cover format lint docker/lint pre-commit unpre-commit version clean
|
|
||||||
|
|
||||||
# Make all binaries
|
|
||||||
all: $(BINS)
|
|
||||||
|
|
||||||
$(BINS): $(BINDIR) dep
|
|
||||||
@echo "⇒ Build $@"
|
|
||||||
CGO_ENABLED=0 \
|
|
||||||
go build -v -trimpath \
|
|
||||||
-ldflags "-X $(REPO)/internal/version.Version=$(VERSION)" \
|
|
||||||
-o $@ ./cmd/$(subst frostfs-,,$(notdir $@))
|
|
||||||
|
|
||||||
$(BINDIR):
|
|
||||||
@echo "⇒ Ensure dir: $@"
|
|
||||||
@mkdir -p $@
|
|
||||||
|
|
||||||
# Pull go dependencies
|
|
||||||
dep:
|
|
||||||
@printf "⇒ Download requirements: "
|
|
||||||
@CGO_ENABLED=0 \
|
|
||||||
go mod download && echo OK
|
|
||||||
@printf "⇒ Tidy requirements: "
|
|
||||||
@CGO_ENABLED=0 \
|
|
||||||
go mod tidy -v && echo OK
|
|
||||||
|
|
||||||
# Run `make %` in Golang container, for more information run `make help.docker/%`
|
|
||||||
docker/%:
|
|
||||||
$(if $(filter $*,all $(BINS)), \
|
|
||||||
@echo "=> Running 'make $*' in clean Docker environment" && \
|
|
||||||
docker run --rm -t \
|
|
||||||
-v `pwd`:/src \
|
|
||||||
-w /src \
|
|
||||||
-u `stat -c "%u:%g" .` \
|
|
||||||
--env HOME=/src \
|
|
||||||
golang:$(GO_VERSION) make $*,\
|
|
||||||
@echo "supported docker targets: all $(BINS) lint")
|
|
||||||
|
|
||||||
# Run tests
|
|
||||||
test:
|
|
||||||
@go test ./... -cover
|
|
||||||
|
|
||||||
# Run tests with race detection and produce coverage output
|
|
||||||
cover:
|
|
||||||
@go test -v -race ./... -coverprofile=coverage.txt -covermode=atomic
|
|
||||||
@go tool cover -html=coverage.txt -o coverage.html
|
|
||||||
|
|
||||||
# Reformat code
|
|
||||||
format:
|
|
||||||
@echo "⇒ Processing gofmt check"
|
|
||||||
@gofmt -s -w ./
|
|
||||||
|
|
||||||
# Run linters
|
|
||||||
lint:
|
|
||||||
@golangci-lint --timeout=5m run
|
|
||||||
|
|
||||||
# Run linters in Docker
|
|
||||||
docker/lint:
|
|
||||||
docker run --rm -it \
|
|
||||||
-v `pwd`:/src \
|
|
||||||
-u `stat -c "%u:%g" .` \
|
|
||||||
--env HOME=/src \
|
|
||||||
golangci/golangci-lint:v$(LINT_VERSION) bash -c 'cd /src/ && make lint'
|
|
||||||
|
|
||||||
# Activate pre-commit hooks
|
|
||||||
pre-commit:
|
|
||||||
pre-commit install -t pre-commit -t commit-msg
|
|
||||||
|
|
||||||
# Deactivate pre-commit hooks
|
|
||||||
unpre-commit:
|
|
||||||
pre-commit uninstall -t pre-commit -t commit-msg
|
|
||||||
|
|
||||||
# Show current version
|
|
||||||
version:
|
|
||||||
@echo $(VERSION)
|
|
||||||
|
|
||||||
# Clean up files
|
|
||||||
clean:
|
|
||||||
rm -rf .cache
|
|
||||||
rm -rf $(BINDIR)
|
|
||||||
|
|
||||||
include help.mk
|
|
48
README.md
48
README.md
|
@ -47,11 +47,10 @@ Create native client with `connect` method. Arguments:
|
||||||
- hex encoded private key (empty value produces random key)
|
- hex encoded private key (empty value produces random key)
|
||||||
- dial timeout in seconds (0 for the default value)
|
- dial timeout in seconds (0 for the default value)
|
||||||
- stream timeout in seconds (0 for the default value)
|
- stream timeout in seconds (0 for the default value)
|
||||||
- generate object header on the client side (for big object - split locally too)
|
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import native from 'k6/x/frostfs/native';
|
import native from 'k6/x/frostfs/native';
|
||||||
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "", 0, 0, false)
|
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "", 0, 0)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Methods
|
### Methods
|
||||||
|
@ -74,13 +73,12 @@ const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "", 0, 0, false)
|
||||||
|
|
||||||
Create a local client with `connect` method. Arguments:
|
Create a local client with `connect` method. Arguments:
|
||||||
- local path to frostfs storage node configuration file
|
- local path to frostfs storage node configuration file
|
||||||
- local path to frostfs storage node configuration directory
|
|
||||||
- hex encoded private key (empty value produces random key)
|
- hex encoded private key (empty value produces random key)
|
||||||
- whether to use the debug logger (warning: very verbose)
|
- whether to use the debug logger (warning: very verbose)
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import local from 'k6/x/frostfs/local';
|
import local from 'k6/x/frostfs/local';
|
||||||
const local_client = local.connect("/path/to/config.yaml", "/path/to/config/dir", "", false)
|
const local_client = local.connect("/path/to/config.yaml", "", false)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Methods
|
### Methods
|
||||||
|
@ -100,13 +98,13 @@ Credentials are taken from default AWS configuration files and ENVs.
|
||||||
|
|
||||||
```js
|
```js
|
||||||
import s3 from 'k6/x/frostfs/s3';
|
import s3 from 'k6/x/frostfs/s3';
|
||||||
const s3_cli = s3.connect("https://s3.frostfs.devenv:8080")
|
const s3_cli = s3.connect("http://s3.frostfs.devenv:8080")
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also provide additional options:
|
You can also provide additional options:
|
||||||
```js
|
```js
|
||||||
import s3 from 'k6/x/frostfs/s3';
|
import s3 from 'k6/x/frostfs/s3';
|
||||||
const s3_cli = s3.connect("https://s3.frostfs.devenv:8080", {'no_verify_ssl': 'true', 'timeout': '60s'})
|
const s3_cli = s3.connect("http://s3.frostfs.devenv:8080", {'no_verify_ssl': 'true', 'timeout': '60s'})
|
||||||
```
|
```
|
||||||
|
|
||||||
* `no_verify_ss` - Bool. If `true` - skip verifying the s3 certificate chain and host name (useful if s3 uses self-signed certificates)
|
* `no_verify_ss` - Bool. If `true` - skip verifying the s3 certificate chain and host name (useful if s3 uses self-signed certificates)
|
||||||
|
@ -124,7 +122,6 @@ const s3_cli = s3.connect("https://s3.frostfs.devenv:8080", {'no_verify_ssl': 't
|
||||||
|
|
||||||
Create local s3 client with `connect` method. Arguments:
|
Create local s3 client with `connect` method. Arguments:
|
||||||
- local path to frostfs storage node configuration file
|
- local path to frostfs storage node configuration file
|
||||||
- local path to frostfs storage node configuration directory
|
|
||||||
- parameter map with the following options:
|
- parameter map with the following options:
|
||||||
* `hex_key`: private key to use as a hexadecimal string. A random one is created if none is provided.
|
* `hex_key`: private key to use as a hexadecimal string. A random one is created if none is provided.
|
||||||
* `node_position`: position of this node in the node array if loading multiple nodes independently (default: 0).
|
* `node_position`: position of this node in the node array if loading multiple nodes independently (default: 0).
|
||||||
|
@ -137,7 +134,7 @@ Create local s3 client with `connect` method. Arguments:
|
||||||
import local from 'k6/x/frostfs/local';
|
import local from 'k6/x/frostfs/local';
|
||||||
const params = {'node_position': 1, 'node_count': 3}
|
const params = {'node_position': 1, 'node_count': 3}
|
||||||
const bucketMapping = {'mytestbucket': 'GBQDDUM1hdodXmiRHV57EUkFWJzuntsG8BG15wFSwam6'}
|
const bucketMapping = {'mytestbucket': 'GBQDDUM1hdodXmiRHV57EUkFWJzuntsG8BG15wFSwam6'}
|
||||||
const local_client = local.connect("/path/to/config.yaml", "/path/to/config/dir", params, bucketMapping)
|
const local_client = local.connect("/path/to/config.yaml", params, bucketMapping)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Methods
|
### Methods
|
||||||
|
@ -150,41 +147,6 @@ const local_client = local.connect("/path/to/config.yaml", "/path/to/config/dir"
|
||||||
|
|
||||||
See native protocol and s3 test suite examples in [examples](./examples) dir.
|
See native protocol and s3 test suite examples in [examples](./examples) dir.
|
||||||
|
|
||||||
# Command line utils
|
|
||||||
|
|
||||||
To build all command line utils just run:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
$ make
|
|
||||||
```
|
|
||||||
|
|
||||||
All binaries will be in `bin` directory.
|
|
||||||
|
|
||||||
## Export registry db
|
|
||||||
|
|
||||||
You can export registry bolt db to json file, that can be used as pregen for scenarios (see [docs](./scenarios/run_scenarios.md)).
|
|
||||||
To do this use `frostfs-xk6-registry-exporter`, available flags can be seen in help:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
$ ./bin/frostfs-xk6-registry-exporter -h
|
|
||||||
Registry exporter for xk6
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
registry-exporter [flags]
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
registry-exporter registry.bolt
|
|
||||||
registry-exporter --status created --out out.json registry.bolt
|
|
||||||
|
|
||||||
Flags:
|
|
||||||
--age int Object age
|
|
||||||
--format string Output format (default "json")
|
|
||||||
-h, --help help for registry-exporter
|
|
||||||
--out string Path to output file (default "dumped-registry.json")
|
|
||||||
--status string Object status (default "created")
|
|
||||||
-v, --version version for registry-exporter
|
|
||||||
```
|
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
|
||||||
- [GNU General Public License v3.0](LICENSE)
|
- [GNU General Public License v3.0](LICENSE)
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
ctx, _ := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGHUP)
|
|
||||||
|
|
||||||
if cmd, err := rootCmd.ExecuteContextC(ctx); err != nil {
|
|
||||||
cmd.PrintErrln("Error:", err.Error())
|
|
||||||
cmd.PrintErrf("Run '%v --help' for usage.\n", cmd.CommandPath())
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,89 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"runtime"
|
|
||||||
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/registry"
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/version"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
var rootCmd = &cobra.Command{
|
|
||||||
Use: "registry-exporter",
|
|
||||||
Version: version.Version,
|
|
||||||
Short: "Registry exporter",
|
|
||||||
Long: "Registry exporter for xk6",
|
|
||||||
Example: `registry-exporter registry.bolt
|
|
||||||
registry-exporter --status created --out out.json registry.bolt`,
|
|
||||||
SilenceErrors: true,
|
|
||||||
SilenceUsage: true,
|
|
||||||
RunE: rootCmdRun,
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
outFlag = "out"
|
|
||||||
formatFlag = "format"
|
|
||||||
statusFlag = "status"
|
|
||||||
ageFlag = "age"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
defaultOutPath = "dumped-registry.json"
|
|
||||||
|
|
||||||
jsonFormat = "json"
|
|
||||||
|
|
||||||
createdStatus = "created"
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
rootCmd.Flags().String(outFlag, defaultOutPath, "Path to output file")
|
|
||||||
rootCmd.Flags().String(formatFlag, jsonFormat, "Output format")
|
|
||||||
rootCmd.Flags().String(statusFlag, createdStatus, "Object status")
|
|
||||||
rootCmd.Flags().Int(ageFlag, 0, "Object age")
|
|
||||||
|
|
||||||
cobra.AddTemplateFunc("runtimeVersion", runtime.Version)
|
|
||||||
rootCmd.SetVersionTemplate(`FrostFS xk6 Registry Exporter
|
|
||||||
{{printf "Version: %s" .Version }}
|
|
||||||
GoVersion: {{ runtimeVersion }}
|
|
||||||
`)
|
|
||||||
}
|
|
||||||
|
|
||||||
func rootCmdRun(cmd *cobra.Command, args []string) error {
|
|
||||||
if len(args) != 1 {
|
|
||||||
return fmt.Errorf("expected exacly one non-flag argumet: path to the registry, got: %s", args)
|
|
||||||
}
|
|
||||||
|
|
||||||
format, err := cmd.Flags().GetString(formatFlag)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("get '%s' flag: %w", formatFlag, err)
|
|
||||||
}
|
|
||||||
if format != jsonFormat {
|
|
||||||
return fmt.Errorf("unknown format '%s', only '%s' is supported", format, jsonFormat)
|
|
||||||
}
|
|
||||||
|
|
||||||
out, err := cmd.Flags().GetString(outFlag)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("get '%s' flag: %w", outFlag, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
status, err := cmd.Flags().GetString(statusFlag)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("get '%s' flag: %w", statusFlag, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
age, err := cmd.Flags().GetInt(ageFlag)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("get '%s' flag: %w", ageFlag, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
objRegistry := registry.NewObjRegistry(cmd.Context(), args[0])
|
|
||||||
objSelector := registry.NewObjSelector(objRegistry, 0, registry.SelectorAwaiting, ®istry.ObjFilter{
|
|
||||||
Status: status,
|
|
||||||
Age: age,
|
|
||||||
})
|
|
||||||
objExporter := registry.NewObjExporter(objSelector)
|
|
||||||
|
|
||||||
cmd.Println("Writing result file:", out)
|
|
||||||
return objExporter.ExportJSONPreGen(out)
|
|
||||||
}
|
|
|
@ -2,7 +2,7 @@ import local from 'k6/x/frostfs/local';
|
||||||
import { uuidv4 } from '../scenarios/libs/k6-utils-1.4.0.js';
|
import { uuidv4 } from '../scenarios/libs/k6-utils-1.4.0.js';
|
||||||
|
|
||||||
const payload = open('../go.sum', 'b');
|
const payload = open('../go.sum', 'b');
|
||||||
const local_cli = local.connect("/path/to/config.yaml", "/path/to/config/dir", "", false)
|
const local_cli = local.connect("/path/to/config.yaml", "", false)
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
stages: [
|
stages: [
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { fail } from "k6";
|
||||||
import { uuidv4 } from '../scenarios/libs/k6-utils-1.4.0.js';
|
import { uuidv4 } from '../scenarios/libs/k6-utils-1.4.0.js';
|
||||||
|
|
||||||
const payload = open('../go.sum', 'b');
|
const payload = open('../go.sum', 'b');
|
||||||
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "1dd37fba80fec4e6a6f13fd708d8dcb3b29def768017052f6c930fa1c5d90bbb", 0, 0, false)
|
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "1dd37fba80fec4e6a6f13fd708d8dcb3b29def768017052f6c930fa1c5d90bbb", 0, 0)
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
stages: [
|
stages: [
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { uuidv4 } from '../scenarios/libs/k6-utils-1.4.0.js';
|
||||||
|
|
||||||
const payload = open('../go.sum', 'b');
|
const payload = open('../go.sum', 'b');
|
||||||
const container = "AjSxSNNXbJUDPqqKYm1VbFVDGCakbpUNH8aGjPmGAH3B"
|
const container = "AjSxSNNXbJUDPqqKYm1VbFVDGCakbpUNH8aGjPmGAH3B"
|
||||||
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "", 0, 0, false)
|
const frostfs_cli = native.connect("s01.frostfs.devenv:8080", "", 0, 0)
|
||||||
const frostfs_obj = frostfs_cli.onsite(container, payload)
|
const frostfs_obj = frostfs_cli.onsite(container, payload)
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { uuidv4 } from '../scenarios/libs/k6-utils-1.4.0.js';
|
||||||
|
|
||||||
const bucket = "testbucket"
|
const bucket = "testbucket"
|
||||||
const payload = open('../go.sum', 'b');
|
const payload = open('../go.sum', 'b');
|
||||||
const s3local_cli = s3local.connect("path/to/storage/config.yml", "path/to/storage/config/dir", {}, {
|
const s3local_cli = s3local.connect("path/to/storage/config.yml", {}, {
|
||||||
'testbucket': 'GBQDDUM1hdodXmiRHV57EUkFWJzuntsG8BG15wFSwam6',
|
'testbucket': 'GBQDDUM1hdodXmiRHV57EUkFWJzuntsG8BG15wFSwam6',
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
139
go.mod
139
go.mod
|
@ -1,139 +1,122 @@
|
||||||
module git.frostfs.info/TrueCloudLab/xk6-frostfs
|
module git.frostfs.info/TrueCloudLab/xk6-frostfs
|
||||||
|
|
||||||
go 1.20
|
go 1.19
|
||||||
|
|
||||||
require (
|
require (
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-node v0.37.1-0.20231213105742-e39db63827d8
|
git.frostfs.info/TrueCloudLab/frostfs-node v0.22.2-0.20230522084814-731bf5d0ee66
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-s3-gw v0.27.0-rc.2
|
git.frostfs.info/TrueCloudLab/frostfs-s3-gw v0.24.1-0.20230403110435-01afa1cae425
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-sdk-go v0.0.0-20230928142024-84b9d29fc98c
|
git.frostfs.info/TrueCloudLab/frostfs-sdk-go v0.0.0-20230519144724-f5b23eb22569
|
||||||
git.frostfs.info/TrueCloudLab/tzhash v1.8.0
|
git.frostfs.info/TrueCloudLab/tzhash v1.8.0
|
||||||
github.com/aws/aws-sdk-go-v2 v1.19.0
|
github.com/aws/aws-sdk-go-v2 v1.16.3
|
||||||
github.com/aws/aws-sdk-go-v2/config v1.18.28
|
github.com/aws/aws-sdk-go-v2/config v1.15.5
|
||||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.72
|
github.com/aws/aws-sdk-go-v2/service/s3 v1.26.9
|
||||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.37.0
|
github.com/dop251/goja v0.0.0-20230427124612-428fc442ff5f
|
||||||
github.com/dop251/goja v0.0.0-20230626124041-ba8a63e79201
|
|
||||||
github.com/go-loremipsum/loremipsum v1.1.3
|
github.com/go-loremipsum/loremipsum v1.1.3
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/joho/godotenv v1.5.1
|
github.com/joho/godotenv v1.5.1
|
||||||
github.com/nspcc-dev/neo-go v0.101.5-0.20230808195420-5fc61be5f6c5
|
github.com/nspcc-dev/neo-go v0.101.1
|
||||||
github.com/panjf2000/ants/v2 v2.8.0
|
github.com/panjf2000/ants/v2 v2.5.0
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.2
|
||||||
github.com/spf13/cobra v1.7.0
|
github.com/stretchr/testify v1.8.3
|
||||||
github.com/stretchr/testify v1.8.4
|
go.etcd.io/bbolt v1.3.6
|
||||||
go.etcd.io/bbolt v1.3.7
|
go.k6.io/k6 v0.44.2-0.20230524054758-add1a5fe5019
|
||||||
go.k6.io/k6 v0.45.1
|
|
||||||
go.uber.org/zap v1.24.0
|
go.uber.org/zap v1.24.0
|
||||||
golang.org/x/sys v0.10.0
|
golang.org/x/sys v0.8.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-api-go/v2 v2.16.1-0.20231121085847-241a9f1ad0a4 // indirect
|
git.frostfs.info/TrueCloudLab/frostfs-api-go/v2 v2.15.1-0.20230519114017-0c67b8fefa41 // indirect
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-crypto v0.6.0 // indirect
|
git.frostfs.info/TrueCloudLab/frostfs-crypto v0.6.0 // indirect
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-observability v0.0.0-20230531082742-c97d21411eb6 // indirect
|
git.frostfs.info/TrueCloudLab/hrw v1.2.0 // indirect
|
||||||
git.frostfs.info/TrueCloudLab/hrw v1.2.1 // indirect
|
|
||||||
git.frostfs.info/TrueCloudLab/rfc6979 v0.4.0 // indirect
|
git.frostfs.info/TrueCloudLab/rfc6979 v0.4.0 // indirect
|
||||||
github.com/antlr4-go/antlr/v4 v4.13.0 // indirect
|
github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230512164433-5d1fd1a340c9 // indirect
|
||||||
github.com/aws/aws-sdk-go v1.44.296 // indirect
|
github.com/aws/aws-sdk-go v1.44.6 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect
|
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.1 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/credentials v1.13.27 // indirect
|
github.com/aws/aws-sdk-go-v2/credentials v1.12.0 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 // indirect
|
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 // indirect
|
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.10 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 // indirect
|
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36 // indirect
|
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.11 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.27 // indirect
|
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.1 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 // indirect
|
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.1 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.30 // indirect
|
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.5 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 // indirect
|
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.4 // indirect
|
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 // indirect
|
github.com/aws/aws-sdk-go-v2/service/sso v1.11.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 // indirect
|
github.com/aws/aws-sdk-go-v2/service/sts v1.16.4 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 // indirect
|
github.com/aws/smithy-go v1.11.2 // indirect
|
||||||
github.com/aws/smithy-go v1.13.5 // indirect
|
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
github.com/bluele/gcache v0.0.2 // indirect
|
github.com/bluele/gcache v0.0.2 // indirect
|
||||||
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
|
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
|
||||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect
|
||||||
github.com/dgraph-io/badger/v4 v4.1.0 // indirect
|
|
||||||
github.com/dgraph-io/ristretto v0.1.1 // indirect
|
|
||||||
github.com/dlclark/regexp2 v1.10.0 // indirect
|
github.com/dlclark/regexp2 v1.10.0 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.0 // indirect
|
|
||||||
github.com/fatih/color v1.15.0 // indirect
|
github.com/fatih/color v1.15.0 // indirect
|
||||||
github.com/fsnotify/fsnotify v1.6.0 // indirect
|
github.com/fsnotify/fsnotify v1.6.0 // indirect
|
||||||
github.com/go-logr/logr v1.2.4 // indirect
|
github.com/go-logr/logr v1.2.4 // indirect
|
||||||
github.com/go-logr/stdr v1.2.2 // indirect
|
github.com/go-logr/stdr v1.2.2 // indirect
|
||||||
github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible // indirect
|
github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible // indirect
|
||||||
github.com/gogo/protobuf v1.3.2 // indirect
|
|
||||||
github.com/golang/glog v1.1.0 // indirect
|
|
||||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
|
||||||
github.com/golang/protobuf v1.5.3 // indirect
|
github.com/golang/protobuf v1.5.3 // indirect
|
||||||
github.com/golang/snappy v0.0.4 // indirect
|
github.com/google/pprof v0.0.0-20230510103437-eeec1cb781c3 // indirect
|
||||||
github.com/google/flatbuffers v1.12.1 // indirect
|
|
||||||
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8 // indirect
|
|
||||||
github.com/gorilla/mux v1.8.0 // indirect
|
github.com/gorilla/mux v1.8.0 // indirect
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 // indirect
|
||||||
github.com/hashicorp/golang-lru v0.6.0 // indirect
|
github.com/hashicorp/golang-lru v0.6.0 // indirect
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.4 // indirect
|
github.com/hashicorp/golang-lru/v2 v2.0.2 // indirect
|
||||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
|
||||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
github.com/josharian/intern v1.0.0 // indirect
|
github.com/josharian/intern v1.0.0 // indirect
|
||||||
github.com/klauspost/compress v1.16.7 // indirect
|
github.com/klauspost/compress v1.16.5 // indirect
|
||||||
github.com/magiconair/properties v1.8.7 // indirect
|
github.com/magiconair/properties v1.8.7 // indirect
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||||
github.com/minio/highwayhash v1.0.2 // indirect
|
github.com/minio/highwayhash v1.0.2 // indirect
|
||||||
github.com/minio/sio v0.3.1 // indirect
|
github.com/minio/sio v0.3.0 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||||
github.com/mr-tron/base58 v1.2.0 // indirect
|
github.com/mr-tron/base58 v1.2.0 // indirect
|
||||||
github.com/mstoykov/atlas v0.0.0-20220811071828-388f114305dd // indirect
|
github.com/mstoykov/atlas v0.0.0-20220811071828-388f114305dd // indirect
|
||||||
github.com/nats-io/jwt/v2 v2.4.1 // indirect
|
github.com/nats-io/jwt/v2 v2.4.1 // indirect
|
||||||
github.com/nats-io/nats.go v1.27.1 // indirect
|
github.com/nats-io/nats.go v1.25.0 // indirect
|
||||||
github.com/nats-io/nkeys v0.4.4 // indirect
|
github.com/nats-io/nkeys v0.4.4 // indirect
|
||||||
github.com/nats-io/nuid v1.0.1 // indirect
|
github.com/nats-io/nuid v1.0.1 // indirect
|
||||||
github.com/nspcc-dev/rfc6979 v0.2.0 // indirect
|
github.com/nspcc-dev/rfc6979 v0.2.0 // indirect
|
||||||
github.com/onsi/ginkgo v1.16.5 // indirect
|
github.com/onsi/ginkgo v1.16.5 // indirect
|
||||||
github.com/onsi/gomega v1.20.2 // indirect
|
github.com/onsi/gomega v1.20.2 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
github.com/pelletier/go-toml/v2 v2.0.7 // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/prometheus/client_golang v1.16.0 // indirect
|
github.com/prometheus/client_golang v1.15.1 // indirect
|
||||||
github.com/prometheus/client_model v0.4.0 // indirect
|
github.com/prometheus/client_model v0.4.0 // indirect
|
||||||
github.com/prometheus/common v0.44.0 // indirect
|
github.com/prometheus/common v0.44.0 // indirect
|
||||||
github.com/prometheus/procfs v0.11.0 // indirect
|
github.com/prometheus/procfs v0.10.0 // indirect
|
||||||
github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e // indirect
|
github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e // indirect
|
||||||
|
github.com/spaolacci/murmur3 v1.1.0 // indirect
|
||||||
github.com/spf13/afero v1.9.5 // indirect
|
github.com/spf13/afero v1.9.5 // indirect
|
||||||
github.com/spf13/cast v1.5.1 // indirect
|
github.com/spf13/cast v1.5.0 // indirect
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
github.com/spf13/viper v1.16.0 // indirect
|
github.com/spf13/viper v1.15.0 // indirect
|
||||||
github.com/subosito/gotenv v1.4.2 // indirect
|
github.com/subosito/gotenv v1.4.2 // indirect
|
||||||
github.com/twmb/murmur3 v1.1.8 // indirect
|
go.opentelemetry.io/otel v1.15.1 // indirect
|
||||||
go.opencensus.io v0.24.0 // indirect
|
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.15.1 // indirect
|
||||||
go.opentelemetry.io/otel v1.16.0 // indirect
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.15.1 // indirect
|
||||||
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 // indirect
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.15.1 // indirect
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 // indirect
|
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.15.1 // indirect
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 // indirect
|
go.opentelemetry.io/otel/sdk v1.15.1 // indirect
|
||||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 // indirect
|
go.opentelemetry.io/otel/trace v1.15.1 // indirect
|
||||||
go.opentelemetry.io/otel/metric v1.16.0 // indirect
|
go.opentelemetry.io/proto/otlp v0.19.0 // indirect
|
||||||
go.opentelemetry.io/otel/sdk v1.16.0 // indirect
|
|
||||||
go.opentelemetry.io/otel/trace v1.16.0 // indirect
|
|
||||||
go.opentelemetry.io/proto/otlp v0.20.0 // indirect
|
|
||||||
go.uber.org/atomic v1.11.0 // indirect
|
go.uber.org/atomic v1.11.0 // indirect
|
||||||
go.uber.org/multierr v1.11.0 // indirect
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
golang.org/x/crypto v0.11.0 // indirect
|
golang.org/x/crypto v0.9.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect
|
golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc // indirect
|
||||||
golang.org/x/net v0.12.0 // indirect
|
golang.org/x/net v0.10.0 // indirect
|
||||||
golang.org/x/sync v0.3.0 // indirect
|
golang.org/x/sync v0.2.0 // indirect
|
||||||
golang.org/x/text v0.11.0 // indirect
|
golang.org/x/text v0.9.0 // indirect
|
||||||
golang.org/x/time v0.3.0 // indirect
|
golang.org/x/time v0.3.0 // indirect
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20230629202037-9506855d4529 // indirect
|
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230629202037-9506855d4529 // indirect
|
google.golang.org/grpc v1.55.0 // indirect
|
||||||
google.golang.org/grpc v1.56.1 // indirect
|
google.golang.org/protobuf v1.30.0 // indirect
|
||||||
google.golang.org/protobuf v1.31.0 // indirect
|
|
||||||
gopkg.in/guregu/null.v3 v3.5.0 // indirect
|
gopkg.in/guregu/null.v3 v3.5.0 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
|
328
go.sum
328
go.sum
|
@ -36,70 +36,66 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
|
||||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||||
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
|
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
|
||||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-api-go/v2 v2.16.1-0.20231121085847-241a9f1ad0a4 h1:wjLfZ3WCt7qNGsQv+Jl0TXnmtg0uVk/jToKPFTBc/jo=
|
git.frostfs.info/TrueCloudLab/frostfs-api-go/v2 v2.15.1-0.20230519114017-0c67b8fefa41 h1:xtGsOUX8Rz0hwWIFa148URysWuD4nRHspPNbYAUc1tg=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-api-go/v2 v2.16.1-0.20231121085847-241a9f1ad0a4/go.mod h1:uY0AYmCznjZdghDnAk7THFIe1Vlg531IxUcus7ZfUJI=
|
git.frostfs.info/TrueCloudLab/frostfs-api-go/v2 v2.15.1-0.20230519114017-0c67b8fefa41/go.mod h1:6wEpMfSwD5xNtQYYVHWWTHwpYuvyumyntZEzILBIXUo=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-crypto v0.6.0 h1:FxqFDhQYYgpe41qsIHVOcdzSVCB8JNSfPG7Uk4r2oSk=
|
git.frostfs.info/TrueCloudLab/frostfs-crypto v0.6.0 h1:FxqFDhQYYgpe41qsIHVOcdzSVCB8JNSfPG7Uk4r2oSk=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-crypto v0.6.0/go.mod h1:RUIKZATQLJ+TaYQa60X2fTDwfuhMfm8Ar60bQ5fr+vU=
|
git.frostfs.info/TrueCloudLab/frostfs-crypto v0.6.0/go.mod h1:RUIKZATQLJ+TaYQa60X2fTDwfuhMfm8Ar60bQ5fr+vU=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-node v0.37.1-0.20231213105742-e39db63827d8 h1:GkVcsr5vo7DJFG9DiD/JNUTBCIcDqqpVvJX+4xDVNXo=
|
git.frostfs.info/TrueCloudLab/frostfs-node v0.22.2-0.20230522084814-731bf5d0ee66 h1:poZ1ufHTcFuPZVcLNXWHTgvCv4VQ0OBi8FmPbvjorug=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-node v0.37.1-0.20231213105742-e39db63827d8/go.mod h1:X7m/CJ8BgTQBQ0o4rEFNbVLSsncrJwtyqNO79r6TYe8=
|
git.frostfs.info/TrueCloudLab/frostfs-node v0.22.2-0.20230522084814-731bf5d0ee66/go.mod h1:YFk37+9i3+lTPqHMjJ3LQqMz+iSM1oOGYNGEJ+0X+20=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-observability v0.0.0-20230531082742-c97d21411eb6 h1:aGQ6QaAnTerQ5Dq5b2/f9DUQtSqPkZZ/bkMx/HKuLCo=
|
git.frostfs.info/TrueCloudLab/frostfs-s3-gw v0.24.1-0.20230403110435-01afa1cae425 h1:vHDmz5CLJrw0JZR85TP57WqvjwgfTmbgOp/SQcmjUUg=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-observability v0.0.0-20230531082742-c97d21411eb6/go.mod h1:W8Nn08/l6aQ7UlIbpF7FsQou7TVpcRD1ZT1KG4TrFhE=
|
git.frostfs.info/TrueCloudLab/frostfs-s3-gw v0.24.1-0.20230403110435-01afa1cae425/go.mod h1:b0Z8M58N+uyOvfSWPO3ZWsqK1t9o/w2qj78ITNiUTOw=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-s3-gw v0.27.0-rc.2 h1:C9Hya/NN8U7P45od221YCtL78zmIbHs9eq5AWlcNkJQ=
|
git.frostfs.info/TrueCloudLab/frostfs-sdk-go v0.0.0-20230519144724-f5b23eb22569 h1:JDySbbi0MyU1fSuew0jNyWwDhrQREetHWJiSzKzXXXI=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-s3-gw v0.27.0-rc.2/go.mod h1:sYR/v3/WzBDAR8dLewjTxaSNYnvQtMVj0ypSy1FuLRo=
|
git.frostfs.info/TrueCloudLab/frostfs-sdk-go v0.0.0-20230519144724-f5b23eb22569/go.mod h1:0n2gQYkqCgiXkDzk0RLUUxoF2xX79Ke85bAiGV8cBl8=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-sdk-go v0.0.0-20230928142024-84b9d29fc98c h1:c8mduKlc8Zioppz5o06QRYS5KYX3BFRO+NgKj2q6kD8=
|
git.frostfs.info/TrueCloudLab/hrw v1.2.0 h1:KvAES7xIqmQBGd2q8KanNosD9+4BhU/zqD5Kt5KSflk=
|
||||||
git.frostfs.info/TrueCloudLab/frostfs-sdk-go v0.0.0-20230928142024-84b9d29fc98c/go.mod h1:t1akKcUH7iBrFHX8rSXScYMP17k2kYQXMbZooiL5Juw=
|
git.frostfs.info/TrueCloudLab/hrw v1.2.0/go.mod h1:mq2sbvYfO+BB6iFZwYBkgC0yc6mJNx+qZi4jW918m+Y=
|
||||||
git.frostfs.info/TrueCloudLab/hrw v1.2.1 h1:ccBRK21rFvY5R1WotI6LNoPlizk7qSvdfD8lNIRudVc=
|
|
||||||
git.frostfs.info/TrueCloudLab/hrw v1.2.1/go.mod h1:C1Ygde2n843yTZEQ0FP69jYiuaYV0kriLvP4zm8JuvM=
|
|
||||||
git.frostfs.info/TrueCloudLab/rfc6979 v0.4.0 h1:M2KR3iBj7WpY3hP10IevfIB9MURr4O9mwVfJ+SjT3HA=
|
git.frostfs.info/TrueCloudLab/rfc6979 v0.4.0 h1:M2KR3iBj7WpY3hP10IevfIB9MURr4O9mwVfJ+SjT3HA=
|
||||||
git.frostfs.info/TrueCloudLab/rfc6979 v0.4.0/go.mod h1:okpbKfVYf/BpejtfFTfhZqFP+sZ8rsHrP8Rr/jYPNRc=
|
git.frostfs.info/TrueCloudLab/rfc6979 v0.4.0/go.mod h1:okpbKfVYf/BpejtfFTfhZqFP+sZ8rsHrP8Rr/jYPNRc=
|
||||||
git.frostfs.info/TrueCloudLab/tzhash v1.8.0 h1:UFMnUIk0Zh17m8rjGHJMqku2hCgaXDqjqZzS4gsb4UA=
|
git.frostfs.info/TrueCloudLab/tzhash v1.8.0 h1:UFMnUIk0Zh17m8rjGHJMqku2hCgaXDqjqZzS4gsb4UA=
|
||||||
git.frostfs.info/TrueCloudLab/tzhash v1.8.0/go.mod h1:dhY+oy274hV8wGvGL4MwwMpdL3GYvaX1a8GQZQHvlF8=
|
git.frostfs.info/TrueCloudLab/tzhash v1.8.0/go.mod h1:dhY+oy274hV8wGvGL4MwwMpdL3GYvaX1a8GQZQHvlF8=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
|
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||||
github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs=
|
github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs=
|
||||||
github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI=
|
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||||
github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g=
|
github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230512164433-5d1fd1a340c9 h1:goHVqTbFX3AIo0tzGr14pgfAW2ZfPChKO21Z9MGf/gk=
|
||||||
github.com/aws/aws-sdk-go v1.44.296 h1:ALRZIIKI+6EBWDiWP4RHWmOtHZ7dywRzenL4NWgNI2A=
|
github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230512164433-5d1fd1a340c9/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM=
|
||||||
github.com/aws/aws-sdk-go v1.44.296/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
github.com/aws/aws-sdk-go v1.44.6 h1:Y+uHxmZfhRTLX2X3khkdxCoTZAyGEX21aOUHe1U6geg=
|
||||||
github.com/aws/aws-sdk-go-v2 v1.19.0 h1:klAT+y3pGFBU/qVf1uzwttpBbiuozJYWzNLHioyDJ+k=
|
github.com/aws/aws-sdk-go v1.44.6/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
|
||||||
github.com/aws/aws-sdk-go-v2 v1.19.0/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw=
|
github.com/aws/aws-sdk-go-v2 v1.16.3 h1:0W1TSJ7O6OzwuEvIXAtJGvOeQ0SGAhcpxPN2/NK5EhM=
|
||||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 h1:dK82zF6kkPeCo8J1e+tGx4JdvDIQzj7ygIoLg8WMuGs=
|
github.com/aws/aws-sdk-go-v2 v1.16.3/go.mod h1:ytwTPBG6fXTZLxxeeCCWj2/EMYp/xDUgX+OET6TLNNU=
|
||||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10/go.mod h1:VeTZetY5KRJLuD/7fkQXMU6Mw7H5m/KP2J5Iy9osMno=
|
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.1 h1:SdK4Ppk5IzLs64ZMvr6MrSficMtjY2oS0WOORXTlxwU=
|
||||||
github.com/aws/aws-sdk-go-v2/config v1.18.28 h1:TINEaKyh1Td64tqFvn09iYpKiWjmHYrG1fa91q2gnqw=
|
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.1/go.mod h1:n8Bs1ElDD2wJ9kCRTczA83gYbBmjSwZp3umc6zF4EeM=
|
||||||
github.com/aws/aws-sdk-go-v2/config v1.18.28/go.mod h1:nIL+4/8JdAuNHEjn/gPEXqtnS02Q3NXB/9Z7o5xE4+A=
|
github.com/aws/aws-sdk-go-v2/config v1.15.5 h1:P+xwhr6kabhxDTXTVH9YoHkqjLJ0wVVpIUHtFNr2hjU=
|
||||||
github.com/aws/aws-sdk-go-v2/credentials v1.13.27 h1:dz0yr/yR1jweAnsCx+BmjerUILVPQ6FS5AwF/OyG1kA=
|
github.com/aws/aws-sdk-go-v2/config v1.15.5/go.mod h1:ZijHHh0xd/A+ZY53az0qzC5tT46kt4JVCePf2NX9Lk4=
|
||||||
github.com/aws/aws-sdk-go-v2/credentials v1.13.27/go.mod h1:syOqAek45ZXZp29HlnRS/BNgMIW6uiRmeuQsz4Qh2UE=
|
github.com/aws/aws-sdk-go-v2/credentials v1.12.0 h1:4R/NqlcRFSkR0wxOhgHi+agGpbEr5qMCjn7VqUIJY+E=
|
||||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5 h1:kP3Me6Fy3vdi+9uHd7YLr6ewPxRL+PU6y15urfTaamU=
|
github.com/aws/aws-sdk-go-v2/credentials v1.12.0/go.mod h1:9YWk7VW+eyKsoIL6/CljkTrNVWBSK9pkqOPUuijid4A=
|
||||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.5/go.mod h1:Gj7tm95r+QsDoN2Fhuz/3npQvcZbkEf5mL70n3Xfluc=
|
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.4 h1:FP8gquGeGHHdfY6G5llaMQDF+HAf20VKc8opRwmjf04=
|
||||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.72 h1:m0MmP89v1B0t3b8W8rtATU76KNsodak69QtiokHyEvo=
|
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.4/go.mod h1:u/s5/Z+ohUQOPXl00m2yJVyioWDECsbpXTQlaqSlufc=
|
||||||
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.72/go.mod h1:ylOTxIuoTL+XjH46Omv2iPjHdeGUk3SQ4hxYho4EHMA=
|
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.10 h1:uFWgo6mGJI1n17nbcvSc6fxVuR3xLNqvXt12JCnEcT8=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35 h1:hMUCiE3Zi5AHrRNGf5j985u0WyqI6r2NULhUfo0N/No=
|
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.10/go.mod h1:F+EZtuIwjlv35kRJPyBGcsA4f7bnSoz15zOQ2lJq1Z4=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.35/go.mod h1:ipR5PvpSPqIqL5Mi82BxLnfMkHVbmco8kUwO2xrCi0M=
|
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.4 h1:cnsvEKSoHN4oAN7spMMr0zhEW2MHnhAVpmqQg8E6UcM=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29 h1:yOpYx+FTBdpk/g+sBU6Cb1H0U/TLEcYYp66mYqsPpcc=
|
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.4/go.mod h1:8glyUqVIM4AmeenIsPo0oVh3+NUwnsQml2OFupfQW+0=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.29/go.mod h1:M/eUABlDbw2uVrdAn+UsI6M727qp2fxkp8K0ejcBDUY=
|
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.11 h1:6cZRymlLEIlDTEB0+5+An6Zj1CKt6rSE69tOmFeu1nk=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36 h1:8r5m1BoAWkn0TDC34lUculryf7nUF25EgIMdjvGCkgo=
|
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.11/go.mod h1:0MR+sS1b/yxsfAPvAESrw8NfwUoxMinDyw6EYR9BS2U=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.36/go.mod h1:Rmw2M1hMVTwiUhjwMoIBFWFJMhvJbct06sSidxInkhY=
|
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.1 h1:C21IDZCm9Yu5xqjb3fKmxDoYvJXtw1DNlOmLZEIlY1M=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.27 h1:cZG7psLfqpkB6H+fIrgUDWmlzM474St1LP0jcz272yI=
|
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.1/go.mod h1:l/BbcfqDCT3hePawhy4ZRtewjtdkl6GWtd9/U+1penQ=
|
||||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.27/go.mod h1:ZdjYvJpDlefgh8/hWelJhqgqJeodxu4SmbVsSdBlL7E=
|
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.1 h1:T4pFel53bkHjL2mMo+4DKE6r6AuoZnM0fg7k1/ratr4=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 h1:y2+VQzC6Zh2ojtV2LoC0MNwHWc6qXv/j2vrQtlftkdA=
|
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.1/go.mod h1:GeUru+8VzrTXV/83XyMJ80KpH8xO89VPoUileyNQ+tc=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11/go.mod h1:iV4q2hsqtNECrfmlXyord9u4zyuFEJX9eLgLpSPzWA8=
|
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.5 h1:9LSZqt4v1JiehyZTrQnRFf2mY/awmyYNNY/b7zqtduU=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.30 h1:Bje8Xkh2OWpjBdNfXLrnn8eZg569dUQmhgtydxAYyP0=
|
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.5/go.mod h1:S8TVP66AAkMMdYYCNZGvrdEq9YRm+qLXjio4FqRnrEE=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.30/go.mod h1:qQtIBl5OVMfmeQkz8HaVyh5DzFmmFXyvK27UgIgOr4c=
|
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.4 h1:b16QW0XWl0jWjLABFc1A+uh145Oqv+xDcObNk0iQgUk=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29 h1:IiDolu/eLmuB18DRZibj77n1hHQT7z12jnGO7Ze3pLc=
|
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.4/go.mod h1:uKkN7qmSIsNJVyMtxNQoCEYMvFEXbOg9fwCJPdfp2u8=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.29/go.mod h1:fDbkK4o7fpPXWn8YAPmTieAMuB9mk/VgvW64uaUqxd4=
|
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.4 h1:RE/DlZLYrz1OOmq8F28IXHLksuuvlpzUbvJ+SESCZBI=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.4 h1:hx4WksB0NRQ9utR+2c3gEGzl6uKj3eM6PMQ6tN3lgXs=
|
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.4/go.mod h1:oudbsSdDtazNj47z1ut1n37re9hDsKpk2ZI3v7KSxq0=
|
||||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.4/go.mod h1:JniVpqvw90sVjNqanGLufrVapWySL28fhBlYgl96Q/w=
|
github.com/aws/aws-sdk-go-v2/service/s3 v1.26.9 h1:LCQKnopq2t4oQS3VKivlYTzAHCTJZZoQICM9fny7KHY=
|
||||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.37.0 h1:PalLOEGZ/4XfQxpGZFTLaoJSmPoybnqJYotaIZEf/Rg=
|
github.com/aws/aws-sdk-go-v2/service/s3 v1.26.9/go.mod h1:iMYipLPXlWpBJ0KFX7QJHZ84rBydHBY8as2aQICTPWk=
|
||||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.37.0/go.mod h1:PwyKKVL0cNkC37QwLcrhyeCrAk+5bY8O2ou7USyAS2A=
|
github.com/aws/aws-sdk-go-v2/service/sso v1.11.4 h1:Uw5wBybFQ1UeA9ts0Y07gbv0ncZnIAyw858tDW0NP2o=
|
||||||
github.com/aws/aws-sdk-go-v2/service/sso v1.12.13 h1:sWDv7cMITPcZ21QdreULwxOOAmE05JjEsT6fCDtDA9k=
|
github.com/aws/aws-sdk-go-v2/service/sso v1.11.4/go.mod h1:cPDwJwsP4Kff9mldCXAmddjJL6JGQqtA3Mzer2zyr88=
|
||||||
github.com/aws/aws-sdk-go-v2/service/sso v1.12.13/go.mod h1:DfX0sWuT46KpcqbMhJ9QWtxAIP1VozkDWf8VAkByjYY=
|
github.com/aws/aws-sdk-go-v2/service/sts v1.16.4 h1:+xtV90n3abQmgzk1pS++FdxZTrPEDgQng6e4/56WR2A=
|
||||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13 h1:BFubHS/xN5bjl818QaroN6mQdjneYQ+AOx44KNXlyH4=
|
github.com/aws/aws-sdk-go-v2/service/sts v1.16.4/go.mod h1:lfSYenAXtavyX2A1LsViglqlG9eEFYxNryTZS5rn3QE=
|
||||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.13/go.mod h1:BzqsVVFduubEmzrVtUFQQIQdFqvUItF8XUq2EnS8Wog=
|
github.com/aws/smithy-go v1.11.2 h1:eG/N+CcUMAvsdffgMvjMKwfyDzIkjM6pfxMJ8Mzc6mE=
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.19.3 h1:e5mnydVdCVWxP+5rPAGi2PYxC7u2OZgH1ypC114H04U=
|
github.com/aws/smithy-go v1.11.2/go.mod h1:3xHYmszWVx2c0kIwQeEVf9uSm4fYZt67FBJnwub1bgM=
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.19.3/go.mod h1:yVGZA1CPkmUhBdA039jXNJJG7/6t+G+EBWmFq23xqnY=
|
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||||
github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8=
|
|
||||||
github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA=
|
|
||||||
github.com/benbjohnson/clock v1.3.5 h1:VvXlSJBzZpA/zum6Sj74hxwYI2DIxRWuNIoXAzHZz5o=
|
|
||||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||||
|
@ -107,6 +103,7 @@ github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0
|
||||||
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
|
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
|
||||||
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
||||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
@ -120,43 +117,42 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
|
||||||
|
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||||
|
github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||||
|
github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||||
|
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs=
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs=
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0=
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0=
|
||||||
github.com/dgraph-io/badger/v4 v4.1.0 h1:E38jc0f+RATYrycSUf9LMv/t47XAy+3CApyYSq4APOQ=
|
|
||||||
github.com/dgraph-io/badger/v4 v4.1.0/go.mod h1:P50u28d39ibBRmIJuQC/NSdBOg46HnHw7al2SW5QRHg=
|
|
||||||
github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8=
|
|
||||||
github.com/dgraph-io/ristretto v0.1.1/go.mod h1:S1GPSBCYCIhmVNfcth17y2zZtQT6wzkzgwUve0VDWWA=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczCTSixgIKmwPv6+wP5DGjqLYw5SUiA=
|
|
||||||
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
|
||||||
github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
|
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
|
||||||
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
|
github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
|
||||||
github.com/dop251/goja v0.0.0-20230626124041-ba8a63e79201 h1:+9NRIliCUhliHMCixEO0mcXmrv3HYwxs9oxM1Z+qnYM=
|
github.com/dop251/goja v0.0.0-20230427124612-428fc442ff5f h1:3Z9NjtffvA8Qoh8xjgUpPmyKawJw/mDRcJlR9oPCvqI=
|
||||||
github.com/dop251/goja v0.0.0-20230626124041-ba8a63e79201/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4=
|
github.com/dop251/goja v0.0.0-20230427124612-428fc442ff5f/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4=
|
||||||
github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y=
|
github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y=
|
||||||
github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM=
|
github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM=
|
||||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
|
||||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
|
||||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
|
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
|
||||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||||
|
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
|
||||||
|
github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
|
||||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||||
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
|
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
|
||||||
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
|
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
|
||||||
github.com/frankban/quicktest v1.14.5 h1:dfYrrRyLtiqT9GyKXgdh+k4inNeTvmGbuSgZ3lx3GhA=
|
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||||
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
||||||
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
||||||
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||||
|
@ -171,16 +167,12 @@ github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5Nq
|
||||||
github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible h1:bopx7t9jyUNX1ebhr0G4gtQWmUOgwQRI0QsYhdYLgkU=
|
github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible h1:bopx7t9jyUNX1ebhr0G4gtQWmUOgwQRI0QsYhdYLgkU=
|
||||||
github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
github.com/go-sourcemap/sourcemap v2.1.4-0.20211119122758-180fcef48034+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
|
||||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
|
||||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
|
github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4=
|
||||||
github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE=
|
github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE=
|
||||||
github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ=
|
|
||||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
|
||||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
|
||||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
|
||||||
|
@ -203,14 +195,11 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD
|
||||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||||
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
|
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
|
||||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
|
||||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
github.com/google/flatbuffers v1.12.1 h1:MVlul7pQNoDzWRLTw5imwYsl+usrS1TXG2H4jg6ImGw=
|
|
||||||
github.com/google/flatbuffers v1.12.1/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
|
||||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
@ -219,10 +208,10 @@ github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
|
||||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||||
|
@ -238,8 +227,8 @@ github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLe
|
||||||
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
|
github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
|
||||||
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8 h1:n6vlPhxsA+BW/XsS5+uqi7GyzaLa5MH7qlSLBZtRdiA=
|
github.com/google/pprof v0.0.0-20230510103437-eeec1cb781c3 h1:2XF1Vzq06X+inNqgJ9tRnGuw+ZVCB3FazXODD6JE1R8=
|
||||||
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA=
|
github.com/google/pprof v0.0.0-20230510103437-eeec1cb781c3/go.mod h1:79YE0hCXdHag9sBkw2o+N/YnZtTkXi0UT9Nnixa5eYk=
|
||||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||||
|
@ -250,22 +239,22 @@ github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8
|
||||||
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
|
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
|
||||||
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||||
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms=
|
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg=
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
|
||||||
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 h1:gDLXvp5S9izjldquuoAhDzccbskOL6tDC5jMSyx3zxE=
|
||||||
|
github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2/go.mod h1:7pdNwVWBBHGiCxa9lAszqCJMbfTISJ7oMftp8+UGV08=
|
||||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||||
github.com/hashicorp/golang-lru v0.6.0 h1:uL2shRDx7RTrOrTCUZEGP/wJUFiUI8QT6E7z5o8jga4=
|
github.com/hashicorp/golang-lru v0.6.0 h1:uL2shRDx7RTrOrTCUZEGP/wJUFiUI8QT6E7z5o8jga4=
|
||||||
github.com/hashicorp/golang-lru v0.6.0/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
github.com/hashicorp/golang-lru v0.6.0/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.4 h1:7GHuZcgid37q8o5i3QI9KMT4nCWQQ3Kx3Ov6bb9MfK0=
|
github.com/hashicorp/golang-lru/v2 v2.0.2 h1:Dwmkdr5Nc/oBiXgJS3CDHNhJtIHkuZ3DZF5twqnfBdU=
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.4/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
github.com/hashicorp/golang-lru/v2 v2.0.2/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
|
github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
|
||||||
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
||||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
||||||
|
@ -276,10 +265,9 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm
|
||||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
|
github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI=
|
||||||
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||||
|
@ -303,8 +291,8 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfr
|
||||||
github.com/mccutchen/go-httpbin v1.1.2-0.20190116014521-c5cb2f4802fa h1:lx8ZnNPwjkXSzOROz0cg69RlErRXs+L3eDkggASWKLo=
|
github.com/mccutchen/go-httpbin v1.1.2-0.20190116014521-c5cb2f4802fa h1:lx8ZnNPwjkXSzOROz0cg69RlErRXs+L3eDkggASWKLo=
|
||||||
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
|
github.com/minio/highwayhash v1.0.2 h1:Aak5U0nElisjDCfPSG79Tgzkn2gl66NxOMspRrKnA/g=
|
||||||
github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY=
|
github.com/minio/highwayhash v1.0.2/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY=
|
||||||
github.com/minio/sio v0.3.1 h1:d59r5RTHb1OsQaSl1EaTWurzMMDRLA5fgNmjzD4eVu4=
|
github.com/minio/sio v0.3.0 h1:syEFBewzOMOYVzSTFpp1MqpSZk8rUNbz8VIIc+PNzus=
|
||||||
github.com/minio/sio v0.3.1/go.mod h1:S0ovgVgc+sTlQyhiXA1ppBLv7REM7TYi5yyq2qL/Y6o=
|
github.com/minio/sio v0.3.0/go.mod h1:8b0yPp2avGThviy/+OCJBI6OMpvxoUuiLvE6F1lebhw=
|
||||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
|
github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o=
|
||||||
|
@ -315,15 +303,15 @@ github.com/mstoykov/envconfig v1.4.1-0.20220114105314-765c6d8c76f1 h1:94EkGmhXrV
|
||||||
github.com/nats-io/jwt/v2 v2.4.1 h1:Y35W1dgbbz2SQUYDPCaclXcuqleVmpbRa7646Jf2EX4=
|
github.com/nats-io/jwt/v2 v2.4.1 h1:Y35W1dgbbz2SQUYDPCaclXcuqleVmpbRa7646Jf2EX4=
|
||||||
github.com/nats-io/jwt/v2 v2.4.1/go.mod h1:24BeQtRwxRV8ruvC4CojXlx/WQ/VjuwlYiH+vu/+ibI=
|
github.com/nats-io/jwt/v2 v2.4.1/go.mod h1:24BeQtRwxRV8ruvC4CojXlx/WQ/VjuwlYiH+vu/+ibI=
|
||||||
github.com/nats-io/nats-server/v2 v2.7.4 h1:c+BZJ3rGzUKCBIM4IXO8uNT2u1vajGbD1kPA6wqCEaM=
|
github.com/nats-io/nats-server/v2 v2.7.4 h1:c+BZJ3rGzUKCBIM4IXO8uNT2u1vajGbD1kPA6wqCEaM=
|
||||||
github.com/nats-io/nats.go v1.27.1 h1:OuYnal9aKVSnOzLQIzf7554OXMCG7KbaTkCSBHRcSoo=
|
github.com/nats-io/nats.go v1.25.0 h1:t5/wCPGciR7X3Mu8QOi4jiJaXaWM8qtkLu4lzGZvYHE=
|
||||||
github.com/nats-io/nats.go v1.27.1/go.mod h1:XpbWUlOElGwTYbMR7imivs7jJj9GtK7ypv321Wp6pjc=
|
github.com/nats-io/nats.go v1.25.0/go.mod h1:D2WALIhz7V8M0pH8Scx8JZXlg6Oqz5VG+nQkK8nJdvg=
|
||||||
github.com/nats-io/nkeys v0.4.4 h1:xvBJ8d69TznjcQl9t6//Q5xXuVhyYiSos6RPtvQNTwA=
|
github.com/nats-io/nkeys v0.4.4 h1:xvBJ8d69TznjcQl9t6//Q5xXuVhyYiSos6RPtvQNTwA=
|
||||||
github.com/nats-io/nkeys v0.4.4/go.mod h1:XUkxdLPTufzlihbamfzQ7mw/VGx6ObUs+0bN5sNvt64=
|
github.com/nats-io/nkeys v0.4.4/go.mod h1:XUkxdLPTufzlihbamfzQ7mw/VGx6ObUs+0bN5sNvt64=
|
||||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||||
github.com/nspcc-dev/go-ordered-json v0.0.0-20220111165707-25110be27d22 h1:n4ZaFCKt1pQJd7PXoMJabZWK9ejjbLOVrkl/lOUmshg=
|
github.com/nspcc-dev/go-ordered-json v0.0.0-20220111165707-25110be27d22 h1:n4ZaFCKt1pQJd7PXoMJabZWK9ejjbLOVrkl/lOUmshg=
|
||||||
github.com/nspcc-dev/neo-go v0.101.5-0.20230808195420-5fc61be5f6c5 h1:AXI2upTPeTqX+n4xrBEzPATgEviOM/Prg6UQ6KDm+DU=
|
github.com/nspcc-dev/neo-go v0.101.1 h1:TVdcIpH/+bxQBTLRwWE3+Pw3j6j/JwguENbBSGAGid0=
|
||||||
github.com/nspcc-dev/neo-go v0.101.5-0.20230808195420-5fc61be5f6c5/go.mod h1:Z0kpjwnTJj/ik/X6z18xjCL0X2+RNbqlnhKrl+MYgP8=
|
github.com/nspcc-dev/neo-go v0.101.1/go.mod h1:J4tspxWw7jknX06F+VSMsKvIiNpYGfVTb2IxVC005YU=
|
||||||
github.com/nspcc-dev/rfc6979 v0.2.0 h1:3e1WNxrN60/6N0DW7+UYisLeZJyfqZTNOjeV/toYvOE=
|
github.com/nspcc-dev/rfc6979 v0.2.0 h1:3e1WNxrN60/6N0DW7+UYisLeZJyfqZTNOjeV/toYvOE=
|
||||||
github.com/nspcc-dev/rfc6979 v0.2.0/go.mod h1:exhIh1PdpDC5vQmyEsGvc4YDM/lyQp/452QxGq/UEso=
|
github.com/nspcc-dev/rfc6979 v0.2.0/go.mod h1:exhIh1PdpDC5vQmyEsGvc4YDM/lyQp/452QxGq/UEso=
|
||||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||||
|
@ -337,44 +325,45 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
|
||||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||||
github.com/onsi/gomega v1.20.2 h1:8uQq0zMgLEfa0vRrrBgaJF2gyW9Da9BmfGV+OyUzfkY=
|
github.com/onsi/gomega v1.20.2 h1:8uQq0zMgLEfa0vRrrBgaJF2gyW9Da9BmfGV+OyUzfkY=
|
||||||
github.com/onsi/gomega v1.20.2/go.mod h1:iYAIXgPSaDHak0LCMA+AWBpIKBr8WZicMxnE8luStNc=
|
github.com/onsi/gomega v1.20.2/go.mod h1:iYAIXgPSaDHak0LCMA+AWBpIKBr8WZicMxnE8luStNc=
|
||||||
github.com/panjf2000/ants/v2 v2.8.0 h1:4p4gPabD6iNM9Y5NpMc0g0L15uXDmfn6jkW5KP+oiHQ=
|
github.com/panjf2000/ants/v2 v2.5.0 h1:1rWGWSnxCsQBga+nQbA4/iY6VMeNoOIAM0ZWh9u3q2Q=
|
||||||
github.com/panjf2000/ants/v2 v2.8.0/go.mod h1:KIBmYG9QQX5U2qzFP/yQJaq/nSb6rahS9iEHkrCMgM8=
|
github.com/panjf2000/ants/v2 v2.5.0/go.mod h1:cU93usDlihJZ5CfRGNDYsiBYvoilLvBF5Qp/BT2GNRE=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
github.com/pelletier/go-toml/v2 v2.0.7 h1:muncTPStnKRos5dpVKULv2FVd4bMOhNePj9CjgDb8Us=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
github.com/pelletier/go-toml/v2 v2.0.7/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek=
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
|
github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8=
|
github.com/prometheus/client_golang v1.15.1 h1:8tXpTmJbyH5lydzFPoxSIJ0J46jdh3tylbvM1xCv0LI=
|
||||||
github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc=
|
github.com/prometheus/client_golang v1.15.1/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt1N9XgF6zxWmaC0xOk=
|
||||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY=
|
github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY=
|
||||||
github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU=
|
github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU=
|
||||||
github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY=
|
github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY=
|
||||||
github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
|
github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
|
||||||
github.com/prometheus/procfs v0.11.0 h1:5EAgkfkMl659uZPbe9AS2N68a7Cc1TJbPEuGzFuRbyk=
|
github.com/prometheus/procfs v0.10.0 h1:UkG7GPYkO4UZyLnyXjaWYcgOSONqwdBqFUT95ugmt6I=
|
||||||
github.com/prometheus/procfs v0.11.0/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM=
|
github.com/prometheus/procfs v0.10.0/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM=
|
||||||
|
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
|
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
|
||||||
github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e h1:zWKUYT07mGmVBH+9UgnHXd/ekCK99C8EbDSAt5qsjXE=
|
github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e h1:zWKUYT07mGmVBH+9UgnHXd/ekCK99C8EbDSAt5qsjXE=
|
||||||
github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e/go.mod h1:Yow6lPLSAXx2ifx470yD/nUe22Dv5vBvxK/UK9UUTVs=
|
github.com/serenize/snaker v0.0.0-20201027110005-a7ad2135616e/go.mod h1:Yow6lPLSAXx2ifx470yD/nUe22Dv5vBvxK/UK9UUTVs=
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y=
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
|
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||||
|
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
||||||
|
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||||
github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM=
|
github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM=
|
||||||
github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ=
|
github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ=
|
||||||
github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
|
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
|
||||||
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||||
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
|
||||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc=
|
github.com/spf13/viper v1.15.0 h1:js3yy885G8xwJa6iOISGFwd+qlUo5AvyXb7CiihdtiU=
|
||||||
github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg=
|
github.com/spf13/viper v1.15.0/go.mod h1:fFcTBJxvhhzSJiZy8n+PeW6t8l+KeT/uTARa0jHOQLA=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
@ -385,48 +374,42 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
|
||||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
|
||||||
github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8=
|
github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8=
|
||||||
github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
|
github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
|
||||||
github.com/twmb/murmur3 v1.1.8 h1:8Yt9taO/WN3l08xErzjeschgZU2QSrwm1kclYq+0aRg=
|
|
||||||
github.com/twmb/murmur3 v1.1.8/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ=
|
|
||||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
|
go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU=
|
||||||
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
|
go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4=
|
||||||
go.k6.io/k6 v0.45.1 h1:z+iVxE7Qze2Ka8tKvnjerOsoTuQb8e27Vqd1wcG2IFI=
|
go.k6.io/k6 v0.44.2-0.20230524054758-add1a5fe5019 h1:A1PEfh3iJqm6M9CqX54le7m4Sq9+sTidfz7pvyI/+xw=
|
||||||
go.k6.io/k6 v0.45.1/go.mod h1:SBO/sqx6h/a0lJqEioMEpneb6zULogIyDmz+ufFqtIE=
|
go.k6.io/k6 v0.44.2-0.20230524054758-add1a5fe5019/go.mod h1:KJdE8JIa1i6fcrX9flX63CuK3YcZGaSF/pBk8gpwu+U=
|
||||||
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
|
||||||
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
|
||||||
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||||
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
||||||
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
|
go.opentelemetry.io/otel v1.15.1 h1:3Iwq3lfRByPaws0f6bU3naAqOR1n5IeDWd9390kWHa8=
|
||||||
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
|
go.opentelemetry.io/otel v1.15.1/go.mod h1:mHHGEHVDLal6YrKMmk9LqC4a3sF5g+fHfrttQIB1NTc=
|
||||||
go.opentelemetry.io/otel v1.16.0 h1:Z7GVAX/UkAXPKsy94IU+i6thsQS4nb7LviLpnaNeW8s=
|
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.15.1 h1:XYDQtNzdb2T4uM1pku2m76eSMDJgqhJ+6KzkqgQBALc=
|
||||||
go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4=
|
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.15.1/go.mod h1:uOTV75+LOzV+ODmL8ahRLWkFA3eQcSC2aAsbxIu4duk=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0 h1:t4ZwRPU+emrcvM2e9DHd0Fsf0JTPVcbfa/BhTDF03d0=
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.15.1 h1:tyoeaUh8REKay72DVYsSEBYV18+fGONe+YYPaOxgLoE=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.16.0/go.mod h1:vLarbg68dH2Wa77g71zmKQqlQ8+8Rq3GRG31uc0WcWI=
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.15.1/go.mod h1:HUSnrjQQ19KX9ECjpQxufsF+3ioD3zISPMlauTPZu2g=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0 h1:cbsD4cUcviQGXdw8+bo5x2wazq10SKz8hEbtCRPcU78=
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.15.1 h1:pIfoG5IAZFzp9EUlJzdSkpUwpaUAAnD+Ru1nBLTACIQ=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.16.0/go.mod h1:JgXSGah17croqhJfhByOLVY719k1emAXC8MVhCIJlRs=
|
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.15.1/go.mod h1:poNKBqF5+nR/6ke2oGTDjHfksrsHDOHXAl2g4+9ONsY=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0 h1:TVQp/bboR4mhZSav+MdgXB8FaRho1RC8UwVn3T0vjVc=
|
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.15.1 h1:2PunuO5SbkN5MhCbuHCd3tC6qrcaj+uDAkX/qBU5BAs=
|
||||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.16.0/go.mod h1:I33vtIe0sR96wfrUcilIzLoA3mLHhRmz9S9Te0S3gDo=
|
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.15.1/go.mod h1:q8+Tha+5LThjeSU8BW93uUC5w5/+DnYHMKBMpRCsui0=
|
||||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0 h1:+XWJd3jf75RXJq29mxbuXhCXFDG3S3R4vBUeSI2P7tE=
|
go.opentelemetry.io/otel/sdk v1.15.1 h1:5FKR+skgpzvhPQHIEfcwMYjCBr14LWzs3uSqKiQzETI=
|
||||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.16.0/go.mod h1:hqgzBPTf4yONMFgdZvL/bK42R/iinTyVQtiWihs3SZc=
|
go.opentelemetry.io/otel/sdk v1.15.1/go.mod h1:8rVtxQfrbmbHKfqzpQkT5EzZMcbMBwTzNAggbEAM0KA=
|
||||||
go.opentelemetry.io/otel/metric v1.16.0 h1:RbrpwVG1Hfv85LgnZ7+txXioPDoh6EdbZHo26Q3hqOo=
|
go.opentelemetry.io/otel/trace v1.15.1 h1:uXLo6iHJEzDfrNC0L0mNjItIp06SyaBQxu5t3xMlngY=
|
||||||
go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4=
|
go.opentelemetry.io/otel/trace v1.15.1/go.mod h1:IWdQG/5N1x7f6YUlmdLeJvH9yxtuJAfc4VW5Agv9r/8=
|
||||||
go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE=
|
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||||
go.opentelemetry.io/otel/sdk v1.16.0/go.mod h1:tMsIuKXuuIWPBAOrH+eHtvhTL+SntFtXF9QD68aP6p4=
|
go.opentelemetry.io/proto/otlp v0.19.0 h1:IVN6GR+mhC4s5yfcTbmzHYODqvWAp3ZedA2SJPI1Nnw=
|
||||||
go.opentelemetry.io/otel/trace v1.16.0 h1:8JRpaObFoW0pxuVPapkgH8UhHQj+bJW8jJsCZEu5MQs=
|
go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||||
go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0=
|
|
||||||
go.opentelemetry.io/proto/otlp v0.20.0 h1:BLOA1cZBAGSbRiNuGCCKiFrCdYB7deeHDeD1SueyOfA=
|
|
||||||
go.opentelemetry.io/proto/otlp v0.20.0/go.mod h1:3QgjzPALBIv9pcknj2EXGPXjYPFdUh/RQfF8Lz3+Vnw=
|
|
||||||
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||||
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
|
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
|
||||||
|
@ -436,14 +419,15 @@ go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60=
|
||||||
go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
|
go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
|
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
|
||||||
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
|
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
|
@ -454,8 +438,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||||
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME=
|
golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc h1:mCRnTeVUjcrhlRmO0VK8a6k6Rrf6TF9htwo2pJVSjIU=
|
||||||
golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
|
golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w=
|
||||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
|
@ -510,15 +494,15 @@ golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81R
|
||||||
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
|
||||||
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
||||||
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
|
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
|
@ -528,6 +512,7 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ
|
||||||
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||||
golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||||
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
@ -539,9 +524,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
|
||||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
@ -573,6 +557,7 @@ golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
@ -580,9 +565,12 @@ golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
@ -590,26 +578,23 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
|
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
||||||
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
|
||||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
||||||
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
@ -652,7 +637,6 @@ golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roY
|
||||||
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||||
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||||
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
|
||||||
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||||
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||||
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
|
||||||
|
@ -662,7 +646,6 @@ golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4f
|
||||||
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
|
||||||
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
@ -719,6 +702,7 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG
|
||||||
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||||
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||||
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||||
|
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||||
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
|
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
|
||||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||||
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
|
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
|
||||||
|
@ -732,11 +716,9 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D
|
||||||
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||||
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||||
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||||
google.golang.org/genproto v0.0.0-20230628200519-e449d1ea0e82 h1:Wdfp5Hc1bqGCWYZNrir4A1Jb+SmVaV2j1DL/pbMMTGI=
|
google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20230629202037-9506855d4529 h1:s5YSX+ZH5b5vS9rnpGymvIyMpLRJizowqDlOuyjXnTk=
|
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A=
|
||||||
google.golang.org/genproto/googleapis/api v0.0.0-20230629202037-9506855d4529/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig=
|
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230629202037-9506855d4529 h1:DEH99RbiLZhMxrpEJCZ0A+wdTe0EOgou/poSLx9vWf4=
|
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230629202037-9506855d4529/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA=
|
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
|
@ -750,11 +732,15 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji
|
||||||
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||||
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||||
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
|
||||||
|
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
|
||||||
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
|
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
|
||||||
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
|
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
|
||||||
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||||
google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ=
|
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||||
google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s=
|
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
|
||||||
|
google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
|
||||||
|
google.golang.org/grpc v1.55.0 h1:3Oj82/tFSCeUrRTg/5E/7d/W5A1tj6Ky1ABAuZuv5ag=
|
||||||
|
google.golang.org/grpc v1.55.0/go.mod h1:iYEXKGkEBhg1PjZQvoYEVPTDkHo1/bjTnfwTeGONTY8=
|
||||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||||
|
@ -767,8 +753,9 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
||||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
|
||||||
|
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
|
@ -782,6 +769,7 @@ gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
|
22
help.mk
22
help.mk
|
@ -1,22 +0,0 @@
|
||||||
.PHONY: help
|
|
||||||
|
|
||||||
# Show this help prompt
|
|
||||||
help:
|
|
||||||
@echo ' Usage:'
|
|
||||||
@echo ''
|
|
||||||
@echo ' make <target>'
|
|
||||||
@echo ''
|
|
||||||
@echo ' Targets:'
|
|
||||||
@echo ''
|
|
||||||
@awk '/^#/{ comment = substr($$0,3) } comment && /^[a-zA-Z][a-zA-Z0-9.%_/-]+ ?:/{ print " ", $$1, comment }' $(MAKEFILE_LIST) | column -t -s ':' | grep -v 'IGNORE' | sort | uniq
|
|
||||||
|
|
||||||
# Show help for docker/% IGNORE
|
|
||||||
help.docker/%:
|
|
||||||
$(eval TARGETS:=$(notdir all lint) ${BINS})
|
|
||||||
@echo ' Usage:'
|
|
||||||
@echo ''
|
|
||||||
@echo ' make docker/% -- Run `make %` in Golang container'
|
|
||||||
@echo ''
|
|
||||||
@echo ' Supported docker targets:'
|
|
||||||
@echo ''
|
|
||||||
@$(foreach bin, $(TARGETS), echo ' ' $(bin);)
|
|
|
@ -38,7 +38,7 @@ func (d *Datagen) Exports() modules.Exports {
|
||||||
return modules.Exports{Default: d}
|
return modules.Exports{Default: d}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Datagen) Generator(size int, typ string, streaming bool) *Generator {
|
func (d *Datagen) Generator(size int, typ string) *Generator {
|
||||||
g := NewGenerator(d.vu, size, strings.ToLower(typ), streaming)
|
g := NewGenerator(d.vu, size, strings.ToLower(typ))
|
||||||
return &g
|
return &g
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,10 +2,11 @@ package datagen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
|
"github.com/dop251/goja"
|
||||||
"github.com/go-loremipsum/loremipsum"
|
"github.com/go-loremipsum/loremipsum"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
)
|
)
|
||||||
|
@ -26,9 +27,11 @@ type (
|
||||||
buf []byte
|
buf []byte
|
||||||
typ string
|
typ string
|
||||||
offset int
|
offset int
|
||||||
|
}
|
||||||
|
|
||||||
streaming bool
|
GenPayloadResponse struct {
|
||||||
seed *atomic.Int64
|
Payload goja.ArrayBuffer
|
||||||
|
Hash string
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -41,7 +44,7 @@ var payloadTypes = []string{
|
||||||
"",
|
"",
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewGenerator(vu modules.VU, size int, typ string, streaming bool) Generator {
|
func NewGenerator(vu modules.VU, size int, typ string) Generator {
|
||||||
if size <= 0 {
|
if size <= 0 {
|
||||||
panic("size should be positive")
|
panic("size should be positive")
|
||||||
}
|
}
|
||||||
|
@ -50,57 +53,46 @@ func NewGenerator(vu modules.VU, size int, typ string, streaming bool) Generator
|
||||||
for i := range payloadTypes {
|
for i := range payloadTypes {
|
||||||
if payloadTypes[i] == typ {
|
if payloadTypes[i] == typ {
|
||||||
found = true
|
found = true
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !found {
|
if !found {
|
||||||
vu.InitEnv().Logger.Info("Unknown payload type '%s', random will be used.", typ)
|
vu.InitEnv().Logger.Info("Unknown payload type '%s', random will be used.", typ)
|
||||||
}
|
}
|
||||||
g := Generator{
|
return Generator{vu: vu, size: size, buf: nil, typ: typ, offset: 0}
|
||||||
vu: vu,
|
|
||||||
size: size,
|
|
||||||
typ: typ,
|
|
||||||
}
|
|
||||||
|
|
||||||
if streaming {
|
|
||||||
g.streaming = true
|
|
||||||
g.seed = new(atomic.Int64)
|
|
||||||
} else {
|
|
||||||
g.rand = rand.New(rand.NewSource(time.Now().UnixNano()))
|
|
||||||
g.buf = make([]byte, size+TailSize)
|
|
||||||
g.fillBuffer()
|
|
||||||
}
|
|
||||||
return g
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *Generator) fillBuffer() {
|
func (g *Generator) GenPayload(calcHash bool) GenPayloadResponse {
|
||||||
switch g.typ {
|
|
||||||
case "text":
|
|
||||||
li := loremipsum.New()
|
|
||||||
b := bytes.NewBuffer(g.buf[:0])
|
|
||||||
for b.Len() < g.size+TailSize {
|
|
||||||
b.WriteString(li.Paragraph())
|
|
||||||
b.WriteRune('\n')
|
|
||||||
}
|
|
||||||
g.buf = b.Bytes()
|
|
||||||
default:
|
|
||||||
g.rand.Read(g.buf) // Per docs, err is always nil here
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) GenPayload() Payload {
|
|
||||||
if g.streaming {
|
|
||||||
return NewStreamPayload(g.size, g.seed.Add(1), g.typ)
|
|
||||||
}
|
|
||||||
|
|
||||||
data := g.nextSlice()
|
data := g.nextSlice()
|
||||||
return NewFixedPayload(data)
|
|
||||||
|
dataHash := ""
|
||||||
|
if calcHash {
|
||||||
|
hashBytes := sha256.Sum256(data)
|
||||||
|
dataHash = hex.EncodeToString(hashBytes[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
payload := g.vu.Runtime().NewArrayBuffer(data)
|
||||||
|
return GenPayloadResponse{Payload: payload, Hash: dataHash}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *Generator) nextSlice() []byte {
|
func (g *Generator) nextSlice() []byte {
|
||||||
if g.offset+g.size >= len(g.buf) {
|
if g.buf == nil {
|
||||||
g.offset = 0
|
// Allocate buffer with extra tail for sliding and populate it with random bytes
|
||||||
g.fillBuffer()
|
g.buf = make([]byte, g.size+TailSize)
|
||||||
|
rand.Read(g.buf) // Per docs, err is always nil here
|
||||||
|
switch g.typ {
|
||||||
|
case "text":
|
||||||
|
li := loremipsum.New()
|
||||||
|
b := bytes.NewBuffer(nil)
|
||||||
|
for b.Len() < g.size+TailSize {
|
||||||
|
b.WriteString(li.Paragraph())
|
||||||
|
b.WriteRune('\n')
|
||||||
|
}
|
||||||
|
g.buf = b.Bytes()
|
||||||
|
default:
|
||||||
|
// Allocate buffer with extra tail for sliding and populate it with random bytes
|
||||||
|
g.buf = make([]byte, g.size+TailSize)
|
||||||
|
rand.Read(g.buf) // Per docs, err is always nil here
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result := g.buf[g.offset : g.offset+g.size]
|
result := g.buf[g.offset : g.offset+g.size]
|
||||||
|
@ -108,5 +100,10 @@ func (g *Generator) nextSlice() []byte {
|
||||||
// Shift the offset for the next call. If we've used our entire tail, then erase
|
// Shift the offset for the next call. If we've used our entire tail, then erase
|
||||||
// the buffer so that on the next call it is regenerated anew
|
// the buffer so that on the next call it is regenerated anew
|
||||||
g.offset += 1
|
g.offset += 1
|
||||||
|
if g.offset+g.size >= len(g.buf) {
|
||||||
|
g.buf = nil
|
||||||
|
g.offset = 0
|
||||||
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,25 +16,25 @@ func TestGenerator(t *testing.T) {
|
||||||
|
|
||||||
t.Run("fails on negative size", func(t *testing.T) {
|
t.Run("fails on negative size", func(t *testing.T) {
|
||||||
require.Panics(t, func() {
|
require.Panics(t, func() {
|
||||||
_ = NewGenerator(vu, -1, "", false)
|
_ = NewGenerator(vu, -1, "")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("fails on zero size", func(t *testing.T) {
|
t.Run("fails on zero size", func(t *testing.T) {
|
||||||
require.Panics(t, func() {
|
require.Panics(t, func() {
|
||||||
_ = NewGenerator(vu, 0, "", false)
|
_ = NewGenerator(vu, 0, "")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("creates slice of specified size", func(t *testing.T) {
|
t.Run("creates slice of specified size", func(t *testing.T) {
|
||||||
size := 10
|
size := 10
|
||||||
g := NewGenerator(vu, size, "", false)
|
g := NewGenerator(vu, size, "")
|
||||||
slice := g.nextSlice()
|
slice := g.nextSlice()
|
||||||
require.Len(t, slice, size)
|
require.Len(t, slice, size)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("creates a different slice on each call", func(t *testing.T) {
|
t.Run("creates a different slice on each call", func(t *testing.T) {
|
||||||
g := NewGenerator(vu, 1000, "", false)
|
g := NewGenerator(vu, 1000, "")
|
||||||
slice1 := g.nextSlice()
|
slice1 := g.nextSlice()
|
||||||
slice2 := g.nextSlice()
|
slice2 := g.nextSlice()
|
||||||
// Each slice should be unique (assuming that 1000 random bytes will never coincide
|
// Each slice should be unique (assuming that 1000 random bytes will never coincide
|
||||||
|
@ -43,7 +43,7 @@ func TestGenerator(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("keeps generating slices after consuming entire tail", func(t *testing.T) {
|
t.Run("keeps generating slices after consuming entire tail", func(t *testing.T) {
|
||||||
g := NewGenerator(vu, 1000, "", false)
|
g := NewGenerator(vu, 1000, "")
|
||||||
initialSlice := g.nextSlice()
|
initialSlice := g.nextSlice()
|
||||||
for i := 0; i < TailSize; i++ {
|
for i := 0; i < TailSize; i++ {
|
||||||
g.nextSlice()
|
g.nextSlice()
|
||||||
|
|
|
@ -1,121 +0,0 @@
|
||||||
package datagen
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"bytes"
|
|
||||||
"crypto/sha256"
|
|
||||||
"encoding/hex"
|
|
||||||
"hash"
|
|
||||||
"io"
|
|
||||||
"math/rand"
|
|
||||||
|
|
||||||
"github.com/go-loremipsum/loremipsum"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Payload represents arbitrary data to be packed into S3 or native object.
|
|
||||||
// Implementations could be thread-unsafe.
|
|
||||||
type Payload interface {
|
|
||||||
// Reader returns io.Reader instance to read the payload.
|
|
||||||
// Must not be called twice.
|
|
||||||
Reader() io.Reader
|
|
||||||
// Bytes is a helper which reads all data from Reader() into slice.
|
|
||||||
// The sole purpose of this method is to simplify HTTP scenario,
|
|
||||||
// where all payload needs to be read and wrapped.
|
|
||||||
Bytes() []byte
|
|
||||||
// Size returns payload size, which is equal to the total amount of data
|
|
||||||
// that could be read from the Reader().
|
|
||||||
Size() int
|
|
||||||
// Hash returns payload sha256 hash. Must be called after all data is read from the reader.
|
|
||||||
Hash() string
|
|
||||||
}
|
|
||||||
|
|
||||||
type bytesPayload struct {
|
|
||||||
data []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *bytesPayload) Reader() io.Reader {
|
|
||||||
return bytes.NewReader(p.data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *bytesPayload) Size() int {
|
|
||||||
return len(p.data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *bytesPayload) Hash() string {
|
|
||||||
h := sha256.Sum256(p.data[:])
|
|
||||||
return hex.EncodeToString(h[:])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *bytesPayload) Bytes() []byte {
|
|
||||||
return p.data
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewFixedPayload(data []byte) Payload {
|
|
||||||
return &bytesPayload{data: data}
|
|
||||||
}
|
|
||||||
|
|
||||||
type randomPayload struct {
|
|
||||||
r io.Reader
|
|
||||||
s hash.Hash
|
|
||||||
h string
|
|
||||||
size int
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewStreamPayload(size int, seed int64, typ string) Payload {
|
|
||||||
var rr io.Reader
|
|
||||||
switch typ {
|
|
||||||
case "text":
|
|
||||||
rr = &textReader{li: loremipsum.NewWithSeed(seed)}
|
|
||||||
default:
|
|
||||||
rr = rand.New(rand.NewSource(seed))
|
|
||||||
}
|
|
||||||
|
|
||||||
lr := io.LimitReader(rr, int64(size))
|
|
||||||
// We need some buffering to write complete blocks in the TeeReader.
|
|
||||||
// Streaming payload read is expected to be used for big objects, thus 4k seems like a good choice.
|
|
||||||
br := bufio.NewReaderSize(lr, 4096)
|
|
||||||
s := sha256.New()
|
|
||||||
tr := io.TeeReader(br, s)
|
|
||||||
return &randomPayload{
|
|
||||||
r: tr,
|
|
||||||
s: s,
|
|
||||||
size: size,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *randomPayload) Reader() io.Reader {
|
|
||||||
return p.r
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *randomPayload) Size() int {
|
|
||||||
return p.size
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *randomPayload) Hash() string {
|
|
||||||
if p.h == "" {
|
|
||||||
p.h = hex.EncodeToString(p.s.Sum(nil))
|
|
||||||
// Prevent possible misuse.
|
|
||||||
p.r = nil
|
|
||||||
p.s = nil
|
|
||||||
}
|
|
||||||
return p.h
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *randomPayload) Bytes() []byte {
|
|
||||||
data, err := io.ReadAll(p.r)
|
|
||||||
if err != nil {
|
|
||||||
// We use only 2 readers, either `bytes.Reader` or `rand.Reader`.
|
|
||||||
// None of them returns errors, thus encountering an error is a fatal error.
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
type textReader struct {
|
|
||||||
li *loremipsum.LoremIpsum
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *textReader) Read(p []byte) (n int, err error) {
|
|
||||||
paragraph := r.li.Paragraph()
|
|
||||||
return copy(p, paragraph), nil
|
|
||||||
}
|
|
|
@ -1,40 +0,0 @@
|
||||||
package datagen
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/rand"
|
|
||||||
"crypto/sha256"
|
|
||||||
"encoding/hex"
|
|
||||||
"io"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestFixedPayload(t *testing.T) {
|
|
||||||
const size = 123
|
|
||||||
data := make([]byte, size)
|
|
||||||
_, err := rand.Read(data)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
p := NewFixedPayload(data)
|
|
||||||
require.Equal(t, size, p.Size())
|
|
||||||
|
|
||||||
actual, err := io.ReadAll(p.Reader())
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Equal(t, data, actual)
|
|
||||||
|
|
||||||
h := sha256.Sum256(data)
|
|
||||||
require.Equal(t, hex.EncodeToString(h[:]), p.Hash())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestStreamingPayload(t *testing.T) {
|
|
||||||
const size = 123
|
|
||||||
|
|
||||||
p := NewStreamPayload(size, 0, "")
|
|
||||||
require.Equal(t, size, p.Size())
|
|
||||||
|
|
||||||
actual, err := io.ReadAll(p.Reader())
|
|
||||||
require.NoError(t, err)
|
|
||||||
require.Equal(t, size, len(actual))
|
|
||||||
require.Equal(t, sha256.Size*2, len(p.Hash()))
|
|
||||||
}
|
|
|
@ -5,15 +5,14 @@ import (
|
||||||
|
|
||||||
cid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/container/id"
|
cid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/container/id"
|
||||||
oid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object/id"
|
oid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object/id"
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/datagen"
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/local/rawclient"
|
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/local/rawclient"
|
||||||
|
"github.com/dop251/goja"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Client struct {
|
type Client struct {
|
||||||
vu modules.VU
|
vu modules.VU
|
||||||
rc *rawclient.RawClient
|
rc *rawclient.RawClient
|
||||||
l Limiter
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type (
|
type (
|
||||||
|
@ -26,21 +25,13 @@ type (
|
||||||
Success bool
|
Success bool
|
||||||
ObjectID string
|
ObjectID string
|
||||||
Error string
|
Error string
|
||||||
Abort bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
GetResponse SuccessOrErrorResponse
|
GetResponse SuccessOrErrorResponse
|
||||||
DeleteResponse SuccessOrErrorResponse
|
DeleteResponse SuccessOrErrorResponse
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c *Client) Put(containerID string, headers map[string]string, payload datagen.Payload) PutResponse {
|
func (c *Client) Put(containerID string, headers map[string]string, payload goja.ArrayBuffer) PutResponse {
|
||||||
if c.l.IsFull() {
|
|
||||||
return PutResponse{
|
|
||||||
Success: false,
|
|
||||||
Error: "engine size limit reached",
|
|
||||||
Abort: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
id, err := c.rc.Put(c.vu.Context(), mustParseContainerID(containerID), nil, headers, payload.Bytes())
|
id, err := c.rc.Put(c.vu.Context(), mustParseContainerID(containerID), nil, headers, payload.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return PutResponse{Error: err.Error()}
|
return PutResponse{Error: err.Error()}
|
||||||
|
|
|
@ -1,89 +0,0 @@
|
||||||
package local
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/engine"
|
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/shard/mode"
|
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/metrics"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
_ Limiter = &noopLimiter{}
|
|
||||||
_ Limiter = &sizeLimiter{}
|
|
||||||
)
|
|
||||||
|
|
||||||
type Limiter interface {
|
|
||||||
engine.MetricRegister
|
|
||||||
IsFull() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewLimiter(maxSizeGB int64) Limiter {
|
|
||||||
if maxSizeGB < 0 {
|
|
||||||
panic("max size is negative")
|
|
||||||
}
|
|
||||||
if maxSizeGB == 0 {
|
|
||||||
return &noopLimiter{}
|
|
||||||
}
|
|
||||||
return &sizeLimiter{
|
|
||||||
maxSize: maxSizeGB * 1024 * 1024 * 1024,
|
|
||||||
currentSize: &atomic.Int64{},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type sizeLimiter struct {
|
|
||||||
maxSize int64
|
|
||||||
currentSize *atomic.Int64
|
|
||||||
}
|
|
||||||
|
|
||||||
func (*sizeLimiter) AddMethodDuration(method string, d time.Duration) {}
|
|
||||||
func (*sizeLimiter) AddToContainerSize(cnrID string, size int64) {}
|
|
||||||
func (*sizeLimiter) AddToObjectCounter(shardID string, objectType string, delta int) {}
|
|
||||||
func (*sizeLimiter) ClearErrorCounter(shardID string) {}
|
|
||||||
func (*sizeLimiter) DeleteShardMetrics(shardID string) {}
|
|
||||||
func (*sizeLimiter) GC() metrics.GCMetrics { return &noopGCMetrics{} }
|
|
||||||
func (*sizeLimiter) IncErrorCounter(shardID string) {}
|
|
||||||
func (*sizeLimiter) SetMode(shardID string, mode mode.Mode) {}
|
|
||||||
func (*sizeLimiter) SetObjectCounter(shardID string, objectType string, v uint64) {}
|
|
||||||
func (*sizeLimiter) WriteCache() metrics.WriteCacheMetrics { return &noopWriteCacheMetrics{} }
|
|
||||||
|
|
||||||
func (sl *sizeLimiter) AddToPayloadCounter(shardID string, size int64) {
|
|
||||||
sl.currentSize.Add(size)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sl *sizeLimiter) IsFull() bool {
|
|
||||||
cur := sl.currentSize.Load()
|
|
||||||
return cur > sl.maxSize
|
|
||||||
}
|
|
||||||
|
|
||||||
type noopLimiter struct{}
|
|
||||||
|
|
||||||
func (*noopLimiter) AddMethodDuration(method string, d time.Duration) {}
|
|
||||||
func (*noopLimiter) AddToContainerSize(cnrID string, size int64) {}
|
|
||||||
func (*noopLimiter) AddToObjectCounter(shardID string, objectType string, delta int) {}
|
|
||||||
func (*noopLimiter) AddToPayloadCounter(shardID string, size int64) {}
|
|
||||||
func (*noopLimiter) ClearErrorCounter(shardID string) {}
|
|
||||||
func (*noopLimiter) DeleteShardMetrics(shardID string) {}
|
|
||||||
func (*noopLimiter) GC() metrics.GCMetrics { return &noopGCMetrics{} }
|
|
||||||
func (*noopLimiter) IncErrorCounter(shardID string) {}
|
|
||||||
func (*noopLimiter) SetMode(shardID string, mode mode.Mode) {}
|
|
||||||
func (*noopLimiter) SetObjectCounter(shardID string, objectType string, v uint64) {}
|
|
||||||
func (*noopLimiter) WriteCache() metrics.WriteCacheMetrics { return &noopWriteCacheMetrics{} }
|
|
||||||
func (*noopLimiter) IsFull() bool { return false }
|
|
||||||
|
|
||||||
type noopGCMetrics struct{}
|
|
||||||
|
|
||||||
func (*noopGCMetrics) AddDeletedCount(shardID string, deleted uint64, failed uint64) {}
|
|
||||||
func (*noopGCMetrics) AddExpiredObjectCollectionDuration(string, time.Duration, bool, string) {}
|
|
||||||
func (*noopGCMetrics) AddInhumedObjectCount(shardID string, count uint64, objectType string) {}
|
|
||||||
func (*noopGCMetrics) AddRunDuration(shardID string, d time.Duration, success bool) {}
|
|
||||||
|
|
||||||
type noopWriteCacheMetrics struct{}
|
|
||||||
|
|
||||||
func (*noopWriteCacheMetrics) AddMethodDuration(string, string, bool, time.Duration, string) {}
|
|
||||||
func (*noopWriteCacheMetrics) Close(shardID string) {}
|
|
||||||
func (*noopWriteCacheMetrics) IncOperationCounter(string, string, metrics.NullBool, string) {}
|
|
||||||
func (*noopWriteCacheMetrics) SetActualCount(shardID string, count uint64, storageType string) {}
|
|
||||||
func (*noopWriteCacheMetrics) SetEstimateSize(shardID string, size uint64, storageType string) {}
|
|
||||||
func (*noopWriteCacheMetrics) SetMode(shardID string, mode string) {}
|
|
|
@ -19,8 +19,7 @@ import (
|
||||||
metabase "git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/metabase"
|
metabase "git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/metabase"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/pilorama"
|
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/pilorama"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/shard"
|
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/shard"
|
||||||
writecache "git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/writecache/config"
|
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/writecache"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/local_object_storage/writecache/writecachebbolt"
|
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/util"
|
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/util"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/util/logger"
|
"git.frostfs.info/TrueCloudLab/frostfs-node/pkg/util/logger"
|
||||||
objectSDK "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object"
|
objectSDK "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object"
|
||||||
|
@ -41,18 +40,15 @@ type RootModule struct {
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
// configFile is the name of the configuration file used during one test.
|
// configFile is the name of the configuration file used during one test.
|
||||||
configFile string
|
configFile string
|
||||||
// configDir is the name of the configuration directory used during one test.
|
|
||||||
configDir string
|
|
||||||
// ng is the engine instance used during one test, corresponding to the configFile. Each VU
|
// ng is the engine instance used during one test, corresponding to the configFile. Each VU
|
||||||
// gets the same engine instance.
|
// gets the same engine instance.
|
||||||
ng *engine.StorageEngine
|
ng *engine.StorageEngine
|
||||||
l Limiter
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Local represents an instance of the module for every VU.
|
// Local represents an instance of the module for every VU.
|
||||||
type Local struct {
|
type Local struct {
|
||||||
vu modules.VU
|
vu modules.VU
|
||||||
ResolveEngine func(context.Context, string, string, bool, int64) (*engine.StorageEngine, Limiter, error)
|
ResolveEngine func(context.Context, string, bool) (*engine.StorageEngine, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure the interfaces are implemented correctly.
|
// Ensure the interfaces are implemented correctly.
|
||||||
|
@ -60,9 +56,9 @@ var (
|
||||||
_ modules.Module = &RootModule{}
|
_ modules.Module = &RootModule{}
|
||||||
_ modules.Instance = &Local{}
|
_ modules.Instance = &Local{}
|
||||||
|
|
||||||
objPutSuccess, objPutFails, objPutDuration, objPutData *metrics.Metric
|
objPutTotal, objPutFails, objPutDuration *metrics.Metric
|
||||||
objGetSuccess, objGetFails, objGetDuration, objGetData *metrics.Metric
|
objGetTotal, objGetFails, objGetDuration *metrics.Metric
|
||||||
objDeleteSuccess, objDeleteFails, objDeleteDuration *metrics.Metric
|
objDeleteTotal, objDeleteFails, objDeleteDuration *metrics.Metric
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -75,7 +71,7 @@ func (r *RootModule) NewModuleInstance(vu modules.VU) modules.Instance {
|
||||||
return NewLocalModuleInstance(vu, r.GetOrCreateEngine)
|
return NewLocalModuleInstance(vu, r.GetOrCreateEngine)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLocalModuleInstance(vu modules.VU, resolveEngine func(context.Context, string, string, bool, int64) (*engine.StorageEngine, Limiter, error)) *Local {
|
func NewLocalModuleInstance(vu modules.VU, resolveEngine func(context.Context, string, bool) (*engine.StorageEngine, error)) *Local {
|
||||||
return &Local{
|
return &Local{
|
||||||
vu: vu,
|
vu: vu,
|
||||||
ResolveEngine: resolveEngine,
|
ResolveEngine: resolveEngine,
|
||||||
|
@ -104,53 +100,45 @@ func checkResourceLimits() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetOrCreateEngine returns the current engine instance for the given configuration file or directory,
|
// GetOrCreateEngine returns the current engine instance for the given configuration file,
|
||||||
// creating a new one if none exists. Note that the identity of configuration files is their
|
// creating a new one if none exists. Note that the identity of configuration files is their
|
||||||
// file name for the purposes of test runs.
|
// file name for the purposes of test runs.
|
||||||
func (r *RootModule) GetOrCreateEngine(ctx context.Context, configFile string, configDir string, debug bool, maxSizeGB int64) (*engine.StorageEngine, Limiter, error) {
|
func (r *RootModule) GetOrCreateEngine(ctx context.Context, configFile string, debug bool) (*engine.StorageEngine, error) {
|
||||||
r.mu.Lock()
|
r.mu.Lock()
|
||||||
defer r.mu.Unlock()
|
defer r.mu.Unlock()
|
||||||
|
|
||||||
if len(configFile) == 0 && len(configDir) == 0 {
|
if len(configFile) == 0 {
|
||||||
return nil, nil, errors.New("provide configFile or configDir")
|
return nil, errors.New("configFile cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.l == nil {
|
|
||||||
r.l = NewLimiter(maxSizeGB)
|
|
||||||
}
|
|
||||||
// Create and initialize engine for the given configFile if it doesn't exist already
|
// Create and initialize engine for the given configFile if it doesn't exist already
|
||||||
if r.ng == nil {
|
if r.ng == nil {
|
||||||
r.configFile = configFile
|
r.configFile = configFile
|
||||||
r.configDir = configDir
|
appCfg := config.New(configFile, "", "")
|
||||||
appCfg := config.New(configFile, configDir, "")
|
ngOpts, shardOpts, err := storageEngineOptionsFromConfig(appCfg, debug)
|
||||||
ngOpts, shardOpts, err := storageEngineOptionsFromConfig(appCfg, debug, r.l)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("creating engine options from config: %v", err)
|
return nil, fmt.Errorf("creating engine options from config: %v", err)
|
||||||
}
|
}
|
||||||
if err := checkResourceLimits(); err != nil {
|
if err := checkResourceLimits(); err != nil {
|
||||||
return nil, nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
r.ng = engine.New(ngOpts...)
|
r.ng = engine.New(ngOpts...)
|
||||||
for i, opts := range shardOpts {
|
for i, opts := range shardOpts {
|
||||||
if _, err := r.ng.AddShard(ctx, opts...); err != nil {
|
if _, err := r.ng.AddShard(opts...); err != nil {
|
||||||
return nil, nil, fmt.Errorf("adding shard %d: %v", i, err)
|
return nil, fmt.Errorf("adding shard %d: %v", i, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := r.ng.Open(ctx); err != nil {
|
if err := r.ng.Open(); err != nil {
|
||||||
return nil, nil, fmt.Errorf("opening engine: %v", err)
|
return nil, fmt.Errorf("opening engine: %v", err)
|
||||||
}
|
}
|
||||||
if err := r.ng.Init(ctx); err != nil {
|
if err := r.ng.Init(ctx); err != nil {
|
||||||
return nil, nil, fmt.Errorf("initializing engine: %v", err)
|
return nil, fmt.Errorf("initializing engine: %v", err)
|
||||||
}
|
}
|
||||||
} else if configFile != r.configFile {
|
} else if configFile != r.configFile {
|
||||||
return nil, nil, fmt.Errorf("GetOrCreateEngine called with mismatching configFile after engine was "+
|
return nil, fmt.Errorf("GetOrCreateEngine called with mismatching configFile after engine was initialized: got %q, want %q", configFile, r.configFile)
|
||||||
"initialized: got %q, want %q", configFile, r.configFile)
|
|
||||||
} else if configDir != r.configDir {
|
|
||||||
return nil, nil, fmt.Errorf("GetOrCreateEngine called with mismatching configDir after engine was "+
|
|
||||||
"initialized: got %q, want %q", configDir, r.configDir)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return r.ng, r.l, nil
|
return r.ng, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exports implements the modules.Instance interface and returns the exports
|
// Exports implements the modules.Instance interface and returns the exports
|
||||||
|
@ -161,10 +149,10 @@ func (s *Local) Exports() modules.Exports {
|
||||||
|
|
||||||
func (s *Local) VU() modules.VU { return s.vu }
|
func (s *Local) VU() modules.VU { return s.vu }
|
||||||
|
|
||||||
func (s *Local) Connect(configFile, configDir, hexKey string, debug bool, maxSizeGB int64) (*Client, error) {
|
func (s *Local) Connect(configFile, hexKey string, debug bool) (*Client, error) {
|
||||||
ng, l, err := s.ResolveEngine(s.VU().Context(), configFile, configDir, debug, maxSizeGB)
|
ng, err := s.ResolveEngine(s.VU().Context(), configFile, debug)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("connecting to engine for config - file %q dir %q: %v", configFile, configDir, err)
|
return nil, fmt.Errorf("connecting to engine for config %q: %v", configFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
key, err := ParseOrCreateKey(hexKey)
|
key, err := ParseOrCreateKey(hexKey)
|
||||||
|
@ -173,19 +161,18 @@ func (s *Local) Connect(configFile, configDir, hexKey string, debug bool, maxSiz
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register metrics.
|
// Register metrics.
|
||||||
objPutSuccess, _ = stats.Registry.NewMetric("local_obj_put_success", metrics.Counter)
|
registry := metrics.NewRegistry()
|
||||||
objPutFails, _ = stats.Registry.NewMetric("local_obj_put_fails", metrics.Counter)
|
objPutTotal, _ = registry.NewMetric("local_obj_put_total", metrics.Counter)
|
||||||
objPutDuration, _ = stats.Registry.NewMetric("local_obj_put_duration", metrics.Trend, metrics.Time)
|
objPutFails, _ = registry.NewMetric("local_obj_put_fails", metrics.Counter)
|
||||||
objPutData, _ = stats.Registry.NewMetric("local_obj_put_bytes", metrics.Counter, metrics.Data)
|
objPutDuration, _ = registry.NewMetric("local_obj_put_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
objGetSuccess, _ = stats.Registry.NewMetric("local_obj_get_success", metrics.Counter)
|
objGetTotal, _ = registry.NewMetric("local_obj_get_total", metrics.Counter)
|
||||||
objGetFails, _ = stats.Registry.NewMetric("local_obj_get_fails", metrics.Counter)
|
objGetFails, _ = registry.NewMetric("local_obj_get_fails", metrics.Counter)
|
||||||
objGetDuration, _ = stats.Registry.NewMetric("local_obj_get_duration", metrics.Trend, metrics.Time)
|
objGetDuration, _ = registry.NewMetric("local_obj_get_duration", metrics.Trend, metrics.Time)
|
||||||
objGetData, _ = stats.Registry.NewMetric("local_obj_get_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
objDeleteSuccess, _ = stats.Registry.NewMetric("local_obj_delete_success", metrics.Counter)
|
objDeleteTotal, _ = registry.NewMetric("local_obj_delete_total", metrics.Counter)
|
||||||
objDeleteFails, _ = stats.Registry.NewMetric("local_obj_delete_fails", metrics.Counter)
|
objDeleteFails, _ = registry.NewMetric("local_obj_delete_fails", metrics.Counter)
|
||||||
objDeleteDuration, _ = stats.Registry.NewMetric("local_obj_delete_duration", metrics.Trend, metrics.Time)
|
objDeleteDuration, _ = registry.NewMetric("local_obj_delete_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
// Create raw client backed by local storage engine.
|
// Create raw client backed by local storage engine.
|
||||||
rc := rawclient.New(ng,
|
rc := rawclient.New(ng,
|
||||||
|
@ -194,32 +181,30 @@ func (s *Local) Connect(configFile, configDir, hexKey string, debug bool, maxSiz
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(s.vu, objPutFails, 1)
|
stats.Report(s.vu, objPutFails, 1)
|
||||||
} else {
|
} else {
|
||||||
stats.Report(s.vu, objPutSuccess, 1)
|
stats.Report(s.vu, objPutTotal, 1)
|
||||||
stats.ReportDataSent(s.vu, float64(sz))
|
stats.ReportDataSent(s.vu, float64(sz))
|
||||||
stats.Report(s.vu, objPutDuration, metrics.D(dt))
|
stats.Report(s.vu, objPutDuration, metrics.D(dt))
|
||||||
stats.Report(s.vu, objPutData, float64(sz))
|
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
rawclient.WithGetHandler(func(sz uint64, err error, dt time.Duration) {
|
rawclient.WithGetHandler(func(sz uint64, err error, dt time.Duration) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(s.vu, objGetFails, 1)
|
stats.Report(s.vu, objGetFails, 1)
|
||||||
} else {
|
} else {
|
||||||
stats.Report(s.vu, objGetSuccess, 1)
|
stats.Report(s.vu, objGetTotal, 1)
|
||||||
stats.Report(s.vu, objGetDuration, metrics.D(dt))
|
stats.Report(s.vu, objGetDuration, metrics.D(dt))
|
||||||
stats.ReportDataReceived(s.vu, float64(sz))
|
stats.ReportDataReceived(s.vu, float64(sz))
|
||||||
stats.Report(s.vu, objGetData, float64(sz))
|
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
rawclient.WithDeleteHandler(func(err error, dt time.Duration) {
|
rawclient.WithDeleteHandler(func(err error, dt time.Duration) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(s.vu, objDeleteFails, 1)
|
stats.Report(s.vu, objDeleteFails, 1)
|
||||||
} else {
|
} else {
|
||||||
stats.Report(s.vu, objDeleteSuccess, 1)
|
stats.Report(s.vu, objDeleteTotal, 1)
|
||||||
stats.Report(s.vu, objDeleteDuration, metrics.D(dt))
|
stats.Report(s.vu, objDeleteDuration, metrics.D(dt))
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
return &Client{vu: s.vu, rc: rc, l: l}, nil
|
return &Client{vu: s.vu, rc: rc}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type epochState struct{}
|
type epochState struct{}
|
||||||
|
@ -232,7 +217,7 @@ func (epochState) CurrentEpoch() uint64 { return 0 }
|
||||||
// preloaded the storage (if any), by using the same configuration file.
|
// preloaded the storage (if any), by using the same configuration file.
|
||||||
//
|
//
|
||||||
// Note that the configuration file only needs to contain the storage-specific sections.
|
// Note that the configuration file only needs to contain the storage-specific sections.
|
||||||
func storageEngineOptionsFromConfig(c *config.Config, debug bool, l Limiter) ([]engine.Option, [][]shard.Option, error) {
|
func storageEngineOptionsFromConfig(c *config.Config, debug bool) ([]engine.Option, [][]shard.Option, error) {
|
||||||
log := zap.L()
|
log := zap.L()
|
||||||
if debug {
|
if debug {
|
||||||
var err error
|
var err error
|
||||||
|
@ -246,12 +231,11 @@ func storageEngineOptionsFromConfig(c *config.Config, debug bool, l Limiter) ([]
|
||||||
engine.WithErrorThreshold(engineconfig.ShardErrorThreshold(c)),
|
engine.WithErrorThreshold(engineconfig.ShardErrorThreshold(c)),
|
||||||
engine.WithShardPoolSize(engineconfig.ShardPoolSize(c)),
|
engine.WithShardPoolSize(engineconfig.ShardPoolSize(c)),
|
||||||
engine.WithLogger(&logger.Logger{Logger: log}),
|
engine.WithLogger(&logger.Logger{Logger: log}),
|
||||||
engine.WithMetrics(l),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var shOpts [][]shard.Option
|
var shOpts [][]shard.Option
|
||||||
|
|
||||||
err := engineconfig.IterateShards(c, false, func(sc *shardconfig.Config) error {
|
engineconfig.IterateShards(c, false, func(sc *shardconfig.Config) error {
|
||||||
opts := []shard.Option{
|
opts := []shard.Option{
|
||||||
shard.WithRefillMetabase(sc.RefillMetabase()),
|
shard.WithRefillMetabase(sc.RefillMetabase()),
|
||||||
shard.WithMode(sc.Mode()),
|
shard.WithMode(sc.Mode()),
|
||||||
|
@ -308,25 +292,17 @@ func storageEngineOptionsFromConfig(c *config.Config, debug bool, l Limiter) ([]
|
||||||
|
|
||||||
// write cache
|
// write cache
|
||||||
if wc := sc.WriteCache(); wc.Enabled() {
|
if wc := sc.WriteCache(); wc.Enabled() {
|
||||||
opts = append(opts,
|
opts = append(opts, shard.WithWriteCacheOptions(
|
||||||
shard.WithWriteCache(true),
|
writecache.WithPath(wc.Path()),
|
||||||
shard.WithWriteCacheOptions(
|
writecache.WithMaxBatchSize(wc.BoltDB().MaxBatchSize()),
|
||||||
writecache.Options{
|
writecache.WithMaxBatchDelay(wc.BoltDB().MaxBatchDelay()),
|
||||||
Type: writecache.TypeBBolt,
|
writecache.WithMaxObjectSize(wc.MaxObjectSize()),
|
||||||
BBoltOptions: []writecachebbolt.Option{
|
writecache.WithSmallObjectSize(wc.SmallObjectSize()),
|
||||||
writecachebbolt.WithPath(wc.Path()),
|
writecache.WithFlushWorkersCount(wc.WorkersNumber()),
|
||||||
writecachebbolt.WithMaxBatchSize(wc.BoltDB().MaxBatchSize()),
|
writecache.WithMaxCacheSize(wc.SizeLimit()),
|
||||||
writecachebbolt.WithMaxBatchDelay(wc.BoltDB().MaxBatchDelay()),
|
writecache.WithNoSync(wc.NoSync()),
|
||||||
writecachebbolt.WithMaxObjectSize(wc.MaxObjectSize()),
|
writecache.WithLogger(&logger.Logger{Logger: log}),
|
||||||
writecachebbolt.WithSmallObjectSize(wc.SmallObjectSize()),
|
))
|
||||||
writecachebbolt.WithFlushWorkersCount(wc.WorkersNumber()),
|
|
||||||
writecachebbolt.WithMaxCacheSize(wc.SizeLimit()),
|
|
||||||
writecachebbolt.WithNoSync(wc.NoSync()),
|
|
||||||
writecachebbolt.WithLogger(&logger.Logger{Logger: log}),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// tree
|
// tree
|
||||||
|
@ -378,9 +354,7 @@ func storageEngineOptionsFromConfig(c *config.Config, debug bool, l Limiter) ([]
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
|
||||||
return nil, nil, fmt.Errorf("iterate shards: %w", err)
|
|
||||||
}
|
|
||||||
return ngOpts, shOpts, nil
|
return ngOpts, shOpts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package native
|
package native
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"crypto/ecdsa"
|
"crypto/ecdsa"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
|
@ -22,19 +23,19 @@ import (
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/user"
|
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/user"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/version"
|
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/version"
|
||||||
"git.frostfs.info/TrueCloudLab/tzhash/tz"
|
"git.frostfs.info/TrueCloudLab/tzhash/tz"
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/datagen"
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
||||||
|
"github.com/dop251/goja"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
"go.k6.io/k6/metrics"
|
"go.k6.io/k6/metrics"
|
||||||
)
|
)
|
||||||
|
|
||||||
type (
|
type (
|
||||||
Client struct {
|
Client struct {
|
||||||
vu modules.VU
|
vu modules.VU
|
||||||
key ecdsa.PrivateKey
|
key ecdsa.PrivateKey
|
||||||
tok session.Object
|
tok session.Object
|
||||||
cli *client.Client
|
cli *client.Client
|
||||||
prepareLocally bool
|
bufsize int
|
||||||
}
|
}
|
||||||
|
|
||||||
PutResponse struct {
|
PutResponse struct {
|
||||||
|
@ -65,18 +66,30 @@ type (
|
||||||
}
|
}
|
||||||
|
|
||||||
PreparedObject struct {
|
PreparedObject struct {
|
||||||
vu modules.VU
|
vu modules.VU
|
||||||
key ecdsa.PrivateKey
|
key ecdsa.PrivateKey
|
||||||
cli *client.Client
|
cli *client.Client
|
||||||
hdr object.Object
|
bufsize int
|
||||||
payload []byte
|
|
||||||
prepareLocally bool
|
hdr object.Object
|
||||||
|
payload []byte
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const defaultBufferSize = 64 * 1024
|
const defaultBufferSize = 64 * 1024
|
||||||
|
|
||||||
func (c *Client) Put(containerID string, headers map[string]string, payload datagen.Payload, chunkSize int) PutResponse {
|
func (c *Client) SetBufferSize(size int) {
|
||||||
|
if size < 0 {
|
||||||
|
panic("buffer size must be positive")
|
||||||
|
}
|
||||||
|
if size == 0 {
|
||||||
|
c.bufsize = defaultBufferSize
|
||||||
|
} else {
|
||||||
|
c.bufsize = size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) Put(containerID string, headers map[string]string, payload goja.ArrayBuffer) PutResponse {
|
||||||
cliContainerID := parseContainerID(containerID)
|
cliContainerID := parseContainerID(containerID)
|
||||||
|
|
||||||
tok := c.tok
|
tok := c.tok
|
||||||
|
@ -103,7 +116,7 @@ func (c *Client) Put(containerID string, headers map[string]string, payload data
|
||||||
o.SetOwnerID(&owner)
|
o.SetOwnerID(&owner)
|
||||||
o.SetAttributes(attrs...)
|
o.SetAttributes(attrs...)
|
||||||
|
|
||||||
resp, err := put(c.vu, c.cli, c.prepareLocally, &tok, &o, payload, chunkSize)
|
resp, err := put(c.vu, c.bufsize, c.cli, &tok, &o, payload.Bytes())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return PutResponse{Success: false, Error: err.Error()}
|
return PutResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
@ -127,9 +140,9 @@ func (c *Client) Delete(containerID string, objectID string) DeleteResponse {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
var prm client.PrmObjectDelete
|
var prm client.PrmObjectDelete
|
||||||
prm.ObjectID = &cliObjectID
|
prm.ByID(cliObjectID)
|
||||||
prm.ContainerID = &cliContainerID
|
prm.FromContainer(cliContainerID)
|
||||||
prm.Session = &tok
|
prm.WithinSession(tok)
|
||||||
|
|
||||||
_, err = c.cli.ObjectDelete(c.vu.Context(), prm)
|
_, err = c.cli.ObjectDelete(c.vu.Context(), prm)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -137,7 +150,7 @@ func (c *Client) Delete(containerID string, objectID string) DeleteResponse {
|
||||||
return DeleteResponse{Success: false, Error: err.Error()}
|
return DeleteResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objDeleteSuccess, 1)
|
stats.Report(c.vu, objDeleteTotal, 1)
|
||||||
stats.Report(c.vu, objDeleteDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objDeleteDuration, metrics.D(time.Since(start)))
|
||||||
return DeleteResponse{Success: true}
|
return DeleteResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
@ -158,12 +171,12 @@ func (c *Client) Get(containerID, objectID string) GetResponse {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
var prm client.PrmObjectGet
|
var prm client.PrmObjectGet
|
||||||
prm.ObjectID = &cliObjectID
|
prm.ByID(cliObjectID)
|
||||||
prm.ContainerID = &cliContainerID
|
prm.FromContainer(cliContainerID)
|
||||||
prm.Session = &tok
|
prm.WithinSession(tok)
|
||||||
|
|
||||||
objSize := 0
|
var objSize = 0
|
||||||
err = get(c.cli, prm, c.vu.Context(), func(data []byte) {
|
err = get(c.cli, prm, c.vu.Context(), c.bufsize, func(data []byte) {
|
||||||
objSize += len(data)
|
objSize += len(data)
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -171,10 +184,9 @@ func (c *Client) Get(containerID, objectID string) GetResponse {
|
||||||
return GetResponse{Success: false, Error: err.Error()}
|
return GetResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objGetSuccess, 1)
|
stats.Report(c.vu, objGetTotal, 1)
|
||||||
stats.Report(c.vu, objGetDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objGetDuration, metrics.D(time.Since(start)))
|
||||||
stats.ReportDataReceived(c.vu, float64(objSize))
|
stats.ReportDataReceived(c.vu, float64(objSize))
|
||||||
stats.Report(c.vu, objGetData, float64(objSize))
|
|
||||||
return GetResponse{Success: true}
|
return GetResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,9 +194,10 @@ func get(
|
||||||
cli *client.Client,
|
cli *client.Client,
|
||||||
prm client.PrmObjectGet,
|
prm client.PrmObjectGet,
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
bufSize int,
|
||||||
onDataChunk func(chunk []byte),
|
onDataChunk func(chunk []byte),
|
||||||
) error {
|
) error {
|
||||||
buf := make([]byte, defaultBufferSize)
|
var buf = make([]byte, bufSize)
|
||||||
|
|
||||||
objectReader, err := cli.ObjectGetInit(ctx, prm)
|
objectReader, err := cli.ObjectGetInit(ctx, prm)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -227,12 +240,12 @@ func (c *Client) VerifyHash(containerID, objectID, expectedHash string) VerifyHa
|
||||||
}
|
}
|
||||||
|
|
||||||
var prm client.PrmObjectGet
|
var prm client.PrmObjectGet
|
||||||
prm.ObjectID = &cliObjectID
|
prm.ByID(cliObjectID)
|
||||||
prm.ContainerID = &cliContainerID
|
prm.FromContainer(cliContainerID)
|
||||||
prm.Session = &tok
|
prm.WithinSession(tok)
|
||||||
|
|
||||||
hasher := sha256.New()
|
hasher := sha256.New()
|
||||||
err = get(c.cli, prm, c.vu.Context(), func(data []byte) {
|
err = get(c.cli, prm, c.vu.Context(), c.bufsize, func(data []byte) {
|
||||||
hasher.Write(data)
|
hasher.Write(data)
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -309,9 +322,10 @@ func (c *Client) PutContainer(params map[string]string) PutContainerResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
res, err := c.cli.ContainerPut(c.vu.Context(), client.PrmContainerPut{
|
var prm client.PrmContainerPut
|
||||||
Container: &cnr,
|
prm.SetContainer(cnr)
|
||||||
})
|
|
||||||
|
res, err := c.cli.ContainerPut(c.vu.Context(), prm)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.putCnrErrorResponse(err)
|
return c.putCnrErrorResponse(err)
|
||||||
}
|
}
|
||||||
|
@ -327,7 +341,7 @@ func (c *Client) PutContainer(params map[string]string) PutContainerResponse {
|
||||||
return PutContainerResponse{Success: true, ContainerID: res.ID().EncodeToString()}
|
return PutContainerResponse{Success: true, ContainerID: res.ID().EncodeToString()}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) Onsite(containerID string, payload datagen.Payload) PreparedObject {
|
func (c *Client) Onsite(containerID string, payload goja.ArrayBuffer) PreparedObject {
|
||||||
maxObjectSize, epoch, hhDisabled, err := parseNetworkInfo(c.vu.Context(), c.cli)
|
maxObjectSize, epoch, hhDisabled, err := parseNetworkInfo(c.vu.Context(), c.cli)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -367,12 +381,13 @@ func (c *Client) Onsite(containerID string, payload datagen.Payload) PreparedObj
|
||||||
}
|
}
|
||||||
|
|
||||||
return PreparedObject{
|
return PreparedObject{
|
||||||
vu: c.vu,
|
vu: c.vu,
|
||||||
key: c.key,
|
key: c.key,
|
||||||
cli: c.cli,
|
cli: c.cli,
|
||||||
hdr: *obj,
|
bufsize: c.bufsize,
|
||||||
payload: data,
|
|
||||||
prepareLocally: c.prepareLocally,
|
hdr: *obj,
|
||||||
|
payload: data,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -398,7 +413,7 @@ func (p PreparedObject) Put(headers map[string]string) PutResponse {
|
||||||
return PutResponse{Success: false, Error: err.Error()}
|
return PutResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = put(p.vu, p.cli, p.prepareLocally, nil, &obj, datagen.NewFixedPayload(p.payload), 0)
|
_, err = put(p.vu, p.bufsize, p.cli, nil, &obj, p.payload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return PutResponse{Success: false, Error: err.Error()}
|
return PutResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
@ -406,22 +421,11 @@ func (p PreparedObject) Put(headers map[string]string) PutResponse {
|
||||||
return PutResponse{Success: true, ObjectID: id.String()}
|
return PutResponse{Success: true, ObjectID: id.String()}
|
||||||
}
|
}
|
||||||
|
|
||||||
type epochSource uint64
|
func put(vu modules.VU, bufSize int, cli *client.Client, tok *session.Object,
|
||||||
|
hdr *object.Object, payload []byte) (*client.ResObjectPut, error) {
|
||||||
func (s epochSource) CurrentEpoch() uint64 {
|
|
||||||
return uint64(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func put(vu modules.VU, cli *client.Client, prepareLocally bool, tok *session.Object,
|
|
||||||
hdr *object.Object, payload datagen.Payload, chunkSize int,
|
|
||||||
) (*client.ResObjectPut, error) {
|
|
||||||
bufSize := defaultBufferSize
|
|
||||||
if chunkSize > 0 {
|
|
||||||
bufSize = chunkSize
|
|
||||||
}
|
|
||||||
buf := make([]byte, bufSize)
|
buf := make([]byte, bufSize)
|
||||||
rdr := payload.Reader()
|
rdr := bytes.NewReader(payload)
|
||||||
sz := payload.Size()
|
sz := rdr.Size()
|
||||||
|
|
||||||
// starting upload
|
// starting upload
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
@ -430,18 +434,6 @@ func put(vu modules.VU, cli *client.Client, prepareLocally bool, tok *session.Ob
|
||||||
if tok != nil {
|
if tok != nil {
|
||||||
prm.WithinSession(*tok)
|
prm.WithinSession(*tok)
|
||||||
}
|
}
|
||||||
if chunkSize > 0 {
|
|
||||||
prm.SetGRPCPayloadChunkLen(chunkSize)
|
|
||||||
}
|
|
||||||
if prepareLocally {
|
|
||||||
res, err := cli.NetworkInfo(vu.Context(), client.PrmNetworkInfo{})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
prm.WithObjectMaxSize(res.Info().MaxObjectSize())
|
|
||||||
prm.WithEpochSource(epochSource(res.Info().CurrentEpoch()))
|
|
||||||
prm.WithoutHomomorphicHash(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
objectWriter, err := cli.ObjectPutInit(vu.Context(), prm)
|
objectWriter, err := cli.ObjectPutInit(vu.Context(), prm)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -449,30 +441,29 @@ func put(vu modules.VU, cli *client.Client, prepareLocally bool, tok *session.Ob
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !objectWriter.WriteHeader(vu.Context(), *hdr) {
|
if !objectWriter.WriteHeader(*hdr) {
|
||||||
stats.Report(vu, objPutFails, 1)
|
stats.Report(vu, objPutFails, 1)
|
||||||
_, err = objectWriter.Close(vu.Context())
|
_, err = objectWriter.Close()
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
n, _ := rdr.Read(buf)
|
n, _ := rdr.Read(buf)
|
||||||
for n > 0 {
|
for n > 0 {
|
||||||
if !objectWriter.WritePayloadChunk(vu.Context(), buf[:n]) {
|
if !objectWriter.WritePayloadChunk(buf[:n]) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
n, _ = rdr.Read(buf)
|
n, _ = rdr.Read(buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := objectWriter.Close(vu.Context())
|
resp, err := objectWriter.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(vu, objPutFails, 1)
|
stats.Report(vu, objPutFails, 1)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(vu, objPutSuccess, 1)
|
stats.Report(vu, objPutTotal, 1)
|
||||||
stats.ReportDataSent(vu, float64(sz))
|
stats.ReportDataSent(vu, float64(sz))
|
||||||
stats.Report(vu, objPutDuration, metrics.D(time.Since(start)))
|
stats.Report(vu, objPutDuration, metrics.D(time.Since(start)))
|
||||||
stats.Report(vu, objPutData, float64(sz))
|
|
||||||
|
|
||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
@ -500,9 +491,10 @@ func (x *waitParams) setDefaults() {
|
||||||
|
|
||||||
func (c *Client) waitForContainerPresence(ctx context.Context, cnrID cid.ID, wp *waitParams) error {
|
func (c *Client) waitForContainerPresence(ctx context.Context, cnrID cid.ID, wp *waitParams) error {
|
||||||
return waitFor(ctx, wp, func(ctx context.Context) bool {
|
return waitFor(ctx, wp, func(ctx context.Context) bool {
|
||||||
_, err := c.cli.ContainerGet(ctx, client.PrmContainerGet{
|
var prm client.PrmContainerGet
|
||||||
ContainerID: &cnrID,
|
prm.SetContainer(cnrID)
|
||||||
})
|
|
||||||
|
_, err := c.cli.ContainerGet(ctx, prm)
|
||||||
return err == nil
|
return err == nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,6 @@ import (
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/client"
|
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/client"
|
||||||
frostfsecdsa "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/crypto/ecdsa"
|
frostfsecdsa "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/crypto/ecdsa"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/session"
|
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/session"
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/nspcc-dev/neo-go/pkg/crypto/keys"
|
"github.com/nspcc-dev/neo-go/pkg/crypto/keys"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
|
@ -29,10 +28,10 @@ var (
|
||||||
_ modules.Instance = &Native{}
|
_ modules.Instance = &Native{}
|
||||||
_ modules.Module = &RootModule{}
|
_ modules.Module = &RootModule{}
|
||||||
|
|
||||||
objPutSuccess, objPutFails, objPutDuration, objPutData *metrics.Metric
|
objPutTotal, objPutFails, objPutDuration *metrics.Metric
|
||||||
objGetSuccess, objGetFails, objGetDuration, objGetData *metrics.Metric
|
objGetTotal, objGetFails, objGetDuration *metrics.Metric
|
||||||
objDeleteSuccess, objDeleteFails, objDeleteDuration *metrics.Metric
|
objDeleteTotal, objDeleteFails, objDeleteDuration *metrics.Metric
|
||||||
cnrPutTotal, cnrPutFails, cnrPutDuration *metrics.Metric
|
cnrPutTotal, cnrPutFails, cnrPutDuration *metrics.Metric
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -52,7 +51,7 @@ func (n *Native) Exports() modules.Exports {
|
||||||
return modules.Exports{Default: n}
|
return modules.Exports{Default: n}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *Native) Connect(endpoint, hexPrivateKey string, dialTimeout, streamTimeout int, prepareLocally bool) (*Client, error) {
|
func (n *Native) Connect(endpoint, hexPrivateKey string, dialTimeout, streamTimeout int) (*Client, error) {
|
||||||
var (
|
var (
|
||||||
cli client.Client
|
cli client.Client
|
||||||
pk *keys.PrivateKey
|
pk *keys.PrivateKey
|
||||||
|
@ -90,9 +89,9 @@ func (n *Native) Connect(endpoint, hexPrivateKey string, dialTimeout, streamTime
|
||||||
|
|
||||||
// generate session token
|
// generate session token
|
||||||
exp := uint64(math.MaxUint64)
|
exp := uint64(math.MaxUint64)
|
||||||
sessionResp, err := cli.SessionCreate(n.vu.Context(), client.PrmSessionCreate{
|
var prmSessionCreate client.PrmSessionCreate
|
||||||
Expiration: exp,
|
prmSessionCreate.SetExp(exp)
|
||||||
})
|
sessionResp, err := cli.SessionCreate(n.vu.Context(), prmSessionCreate)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("dial endpoint: %s %w", endpoint, err)
|
return nil, fmt.Errorf("dial endpoint: %s %w", endpoint, err)
|
||||||
}
|
}
|
||||||
|
@ -116,30 +115,28 @@ func (n *Native) Connect(endpoint, hexPrivateKey string, dialTimeout, streamTime
|
||||||
tok.SetExp(exp)
|
tok.SetExp(exp)
|
||||||
|
|
||||||
// register metrics
|
// register metrics
|
||||||
|
registry := metrics.NewRegistry()
|
||||||
|
objPutTotal, _ = registry.NewMetric("frostfs_obj_put_total", metrics.Counter)
|
||||||
|
objPutFails, _ = registry.NewMetric("frostfs_obj_put_fails", metrics.Counter)
|
||||||
|
objPutDuration, _ = registry.NewMetric("frostfs_obj_put_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
objPutSuccess, _ = stats.Registry.NewMetric("frostfs_obj_put_success", metrics.Counter)
|
objGetTotal, _ = registry.NewMetric("frostfs_obj_get_total", metrics.Counter)
|
||||||
objPutFails, _ = stats.Registry.NewMetric("frostfs_obj_put_fails", metrics.Counter)
|
objGetFails, _ = registry.NewMetric("frostfs_obj_get_fails", metrics.Counter)
|
||||||
objPutDuration, _ = stats.Registry.NewMetric("frostfs_obj_put_duration", metrics.Trend, metrics.Time)
|
objGetDuration, _ = registry.NewMetric("frostfs_obj_get_duration", metrics.Trend, metrics.Time)
|
||||||
objPutData, _ = stats.Registry.NewMetric("frostfs_obj_put_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
objGetSuccess, _ = stats.Registry.NewMetric("frostfs_obj_get_success", metrics.Counter)
|
objDeleteTotal, _ = registry.NewMetric("frostfs_obj_delete_total", metrics.Counter)
|
||||||
objGetFails, _ = stats.Registry.NewMetric("frostfs_obj_get_fails", metrics.Counter)
|
objDeleteFails, _ = registry.NewMetric("frostfs_obj_delete_fails", metrics.Counter)
|
||||||
objGetDuration, _ = stats.Registry.NewMetric("frostfs_obj_get_duration", metrics.Trend, metrics.Time)
|
objDeleteDuration, _ = registry.NewMetric("frostfs_obj_delete_duration", metrics.Trend, metrics.Time)
|
||||||
objGetData, _ = stats.Registry.NewMetric("frostfs_obj_get_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
objDeleteSuccess, _ = stats.Registry.NewMetric("frostfs_obj_delete_success", metrics.Counter)
|
cnrPutTotal, _ = registry.NewMetric("frostfs_cnr_put_total", metrics.Counter)
|
||||||
objDeleteFails, _ = stats.Registry.NewMetric("frostfs_obj_delete_fails", metrics.Counter)
|
cnrPutFails, _ = registry.NewMetric("frostfs_cnr_put_fails", metrics.Counter)
|
||||||
objDeleteDuration, _ = stats.Registry.NewMetric("frostfs_obj_delete_duration", metrics.Trend, metrics.Time)
|
cnrPutDuration, _ = registry.NewMetric("frostfs_cnr_put_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
cnrPutTotal, _ = stats.Registry.NewMetric("frostfs_cnr_put_total", metrics.Counter)
|
|
||||||
cnrPutFails, _ = stats.Registry.NewMetric("frostfs_cnr_put_fails", metrics.Counter)
|
|
||||||
cnrPutDuration, _ = stats.Registry.NewMetric("frostfs_cnr_put_duration", metrics.Trend, metrics.Time)
|
|
||||||
|
|
||||||
return &Client{
|
return &Client{
|
||||||
vu: n.vu,
|
vu: n.vu,
|
||||||
key: pk.PrivateKey,
|
key: pk.PrivateKey,
|
||||||
tok: tok,
|
tok: tok,
|
||||||
cli: &cli,
|
cli: &cli,
|
||||||
prepareLocally: prepareLocally,
|
bufsize: defaultBufferSize,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,82 +0,0 @@
|
||||||
package registry
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ObjExporter struct {
|
|
||||||
selector *ObjSelector
|
|
||||||
}
|
|
||||||
|
|
||||||
type PreGenerateInfo struct {
|
|
||||||
Buckets []string `json:"buckets"`
|
|
||||||
Objects []ObjInfo `json:"objects"`
|
|
||||||
ObjSize string `json:"obj_size"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ObjInfo struct {
|
|
||||||
Bucket string `json:"bucket"`
|
|
||||||
Object string `json:"object"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewObjExporter(selector *ObjSelector) *ObjExporter {
|
|
||||||
return &ObjExporter{selector: selector}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *ObjExporter) ExportJSONPreGen(fileName string) error {
|
|
||||||
f, err := os.Create(fileName)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
// there can be a lot of object, so manually form json
|
|
||||||
if _, err = f.WriteString(`{"objects":[`); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
bucketMap := make(map[string]struct{})
|
|
||||||
|
|
||||||
count, err := o.selector.Count()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var comma string
|
|
||||||
for i := 0; i < count; i++ {
|
|
||||||
info := o.selector.NextObject()
|
|
||||||
if info == nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err = f.WriteString(fmt.Sprintf(`%s{"bucket":"%s","object":"%s"}`, comma, info.S3Bucket, info.S3Key)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if i == 0 {
|
|
||||||
comma = ","
|
|
||||||
}
|
|
||||||
|
|
||||||
bucketMap[info.S3Bucket] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err = f.WriteString(`],"buckets":[`); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
i := 0
|
|
||||||
comma = ""
|
|
||||||
for bucket := range bucketMap {
|
|
||||||
if _, err = f.WriteString(fmt.Sprintf(`%s"%s"`, comma, bucket)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if i == 0 {
|
|
||||||
comma = ","
|
|
||||||
}
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = f.WriteString(`]}`)
|
|
||||||
return err
|
|
||||||
}
|
|
|
@ -20,7 +20,6 @@ type ObjSelector struct {
|
||||||
boltDB *bbolt.DB
|
boltDB *bbolt.DB
|
||||||
filter *ObjFilter
|
filter *ObjFilter
|
||||||
cacheSize int
|
cacheSize int
|
||||||
kind SelectorKind
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// objectSelectCache is the default maximum size of a batch to select from DB.
|
// objectSelectCache is the default maximum size of a batch to select from DB.
|
||||||
|
@ -28,7 +27,7 @@ const objectSelectCache = 1000
|
||||||
|
|
||||||
// NewObjSelector creates a new instance of object selector that can iterate over
|
// NewObjSelector creates a new instance of object selector that can iterate over
|
||||||
// objects in the specified registry.
|
// objects in the specified registry.
|
||||||
func NewObjSelector(registry *ObjRegistry, selectionSize int, kind SelectorKind, filter *ObjFilter) *ObjSelector {
|
func NewObjSelector(registry *ObjRegistry, selectionSize int, filter *ObjFilter) *ObjSelector {
|
||||||
if selectionSize <= 0 {
|
if selectionSize <= 0 {
|
||||||
selectionSize = objectSelectCache
|
selectionSize = objectSelectCache
|
||||||
}
|
}
|
||||||
|
@ -41,7 +40,6 @@ func NewObjSelector(registry *ObjRegistry, selectionSize int, kind SelectorKind,
|
||||||
filter: filter,
|
filter: filter,
|
||||||
objChan: make(chan *ObjectInfo, selectionSize*2),
|
objChan: make(chan *ObjectInfo, selectionSize*2),
|
||||||
cacheSize: selectionSize,
|
cacheSize: selectionSize,
|
||||||
kind: kind,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
go objSelector.selectLoop()
|
go objSelector.selectLoop()
|
||||||
|
@ -62,7 +60,7 @@ func (o *ObjSelector) NextObject() *ObjectInfo {
|
||||||
|
|
||||||
// Count returns total number of objects that match filter of the selector.
|
// Count returns total number of objects that match filter of the selector.
|
||||||
func (o *ObjSelector) Count() (int, error) {
|
func (o *ObjSelector) Count() (int, error) {
|
||||||
count := 0
|
var count = 0
|
||||||
err := o.boltDB.View(func(tx *bbolt.Tx) error {
|
err := o.boltDB.View(func(tx *bbolt.Tx) error {
|
||||||
b := tx.Bucket([]byte(o.filter.Status))
|
b := tx.Bucket([]byte(o.filter.Status))
|
||||||
if b == nil {
|
if b == nil {
|
||||||
|
@ -162,23 +160,15 @@ func (o *ObjSelector) selectLoop() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if o.kind == SelectorOneshot && len(cache) != o.cacheSize {
|
if len(cache) != o.cacheSize {
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if o.kind != SelectorLooped && len(cache) != o.cacheSize {
|
|
||||||
// no more objects, wait a little; the logic could be improved.
|
// no more objects, wait a little; the logic could be improved.
|
||||||
select {
|
select {
|
||||||
case <-time.After(time.Second):
|
case <-time.After(time.Second * time.Duration(o.filter.Age/2)):
|
||||||
case <-o.ctx.Done():
|
case <-o.ctx.Done():
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if o.kind == SelectorLooped && len(cache) != o.cacheSize {
|
|
||||||
lastID = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// clean handled objects
|
// clean handled objects
|
||||||
cache = cache[:0]
|
cache = cache[:0]
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,35 +74,7 @@ func (r *Registry) open(dbFilePath string) *ObjRegistry {
|
||||||
return registry
|
return registry
|
||||||
}
|
}
|
||||||
|
|
||||||
// SelectorKind represents selector behaviour when no items are available.
|
|
||||||
type SelectorKind byte
|
|
||||||
|
|
||||||
const (
|
|
||||||
// SelectorAwaiting waits for a new item to arrive.
|
|
||||||
// This selector visits each item exactly once and can be used when items
|
|
||||||
// to select are being pushed into registry concurrently.
|
|
||||||
SelectorAwaiting = iota
|
|
||||||
// SelectorLooped rewinds cursor to the start after all items have been read.
|
|
||||||
// It can encounter duplicates and should be used mostly for read scenarious.
|
|
||||||
SelectorLooped
|
|
||||||
// SelectorOneshot visits each item exactly once and exits immediately afterwards.
|
|
||||||
// It may be used to artificially abort the test after all items were processed.
|
|
||||||
SelectorOneshot
|
|
||||||
)
|
|
||||||
|
|
||||||
func (r *Registry) GetSelector(dbFilePath string, name string, cacheSize int, filter map[string]string) *ObjSelector {
|
func (r *Registry) GetSelector(dbFilePath string, name string, cacheSize int, filter map[string]string) *ObjSelector {
|
||||||
return r.getSelectorInternal(dbFilePath, name, cacheSize, SelectorAwaiting, filter)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Registry) GetLoopedSelector(dbFilePath string, name string, cacheSize int, filter map[string]string) *ObjSelector {
|
|
||||||
return r.getSelectorInternal(dbFilePath, name, cacheSize, SelectorLooped, filter)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Registry) GetOneshotSelector(dbFilePath string, name string, cacheSize int, filter map[string]string) *ObjSelector {
|
|
||||||
return r.getSelectorInternal(dbFilePath, name, cacheSize, SelectorOneshot, filter)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *Registry) getSelectorInternal(dbFilePath string, name string, cacheSize int, kind SelectorKind, filter map[string]string) *ObjSelector {
|
|
||||||
objFilter, err := parseFilter(filter)
|
objFilter, err := parseFilter(filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -114,7 +86,7 @@ func (r *Registry) getSelectorInternal(dbFilePath string, name string, cacheSize
|
||||||
selector := r.root.selectors[name]
|
selector := r.root.selectors[name]
|
||||||
if selector == nil {
|
if selector == nil {
|
||||||
registry := r.open(dbFilePath)
|
registry := r.open(dbFilePath)
|
||||||
selector = NewObjSelector(registry, cacheSize, kind, objFilter)
|
selector = NewObjSelector(registry, cacheSize, objFilter)
|
||||||
r.root.selectors[name] = selector
|
r.root.selectors[name] = selector
|
||||||
} else if !reflect.DeepEqual(selector.filter, objFilter) {
|
} else if !reflect.DeepEqual(selector.filter, objFilter) {
|
||||||
panic(fmt.Sprintf("selector %s already has been created with a different filter", name))
|
panic(fmt.Sprintf("selector %s already has been created with a different filter", name))
|
||||||
|
@ -122,10 +94,6 @@ func (r *Registry) getSelectorInternal(dbFilePath string, name string, cacheSize
|
||||||
return selector
|
return selector
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Registry) GetExporter(selector *ObjSelector) *ObjExporter {
|
|
||||||
return NewObjExporter(selector)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseFilter(filter map[string]string) (*ObjFilter, error) {
|
func parseFilter(filter map[string]string) (*ObjFilter, error) {
|
||||||
objFilter := ObjFilter{}
|
objFilter := ObjFilter{}
|
||||||
objFilter.Status = filter["status"]
|
objFilter.Status = filter["status"]
|
||||||
|
|
|
@ -1,19 +1,18 @@
|
||||||
package s3
|
package s3
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/datagen"
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
||||||
"github.com/aws/aws-sdk-go-v2/aws"
|
"github.com/aws/aws-sdk-go-v2/aws"
|
||||||
"github.com/aws/aws-sdk-go-v2/feature/s3/manager"
|
|
||||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||||
"github.com/aws/aws-sdk-go-v2/service/s3/types"
|
"github.com/aws/aws-sdk-go-v2/service/s3/types"
|
||||||
|
"github.com/dop251/goja"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
"go.k6.io/k6/metrics"
|
"go.k6.io/k6/metrics"
|
||||||
)
|
)
|
||||||
|
@ -50,9 +49,9 @@ type (
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c *Client) Put(bucket, key string, payload datagen.Payload) PutResponse {
|
func (c *Client) Put(bucket, key string, payload goja.ArrayBuffer) PutResponse {
|
||||||
rdr := payload.Reader()
|
rdr := bytes.NewReader(payload.Bytes())
|
||||||
sz := payload.Size()
|
sz := rdr.Size()
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
_, err := c.cli.PutObject(c.vu.Context(), &s3.PutObjectInput{
|
_, err := c.cli.PutObject(c.vu.Context(), &s3.PutObjectInput{
|
||||||
|
@ -65,44 +64,9 @@ func (c *Client) Put(bucket, key string, payload datagen.Payload) PutResponse {
|
||||||
return PutResponse{Success: false, Error: err.Error()}
|
return PutResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objPutSuccess, 1)
|
stats.Report(c.vu, objPutTotal, 1)
|
||||||
stats.ReportDataSent(c.vu, float64(sz))
|
stats.ReportDataSent(c.vu, float64(sz))
|
||||||
stats.Report(c.vu, objPutDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objPutDuration, metrics.D(time.Since(start)))
|
||||||
stats.Report(c.vu, objPutData, float64(sz))
|
|
||||||
return PutResponse{Success: true}
|
|
||||||
}
|
|
||||||
|
|
||||||
const multipartUploadMinPartSize = 5 * 1024 * 1024 // 5MB
|
|
||||||
|
|
||||||
func (c *Client) Multipart(bucket, key string, objPartSize, concurrency int, payload datagen.Payload) PutResponse {
|
|
||||||
if objPartSize < multipartUploadMinPartSize {
|
|
||||||
stats.Report(c.vu, objPutFails, 1)
|
|
||||||
return PutResponse{Success: false, Error: fmt.Sprintf("part size '%d' must be greater than '%d'(5 MB)", objPartSize, multipartUploadMinPartSize)}
|
|
||||||
}
|
|
||||||
|
|
||||||
start := time.Now()
|
|
||||||
uploader := manager.NewUploader(c.cli, func(u *manager.Uploader) {
|
|
||||||
u.PartSize = int64(objPartSize)
|
|
||||||
u.Concurrency = concurrency
|
|
||||||
})
|
|
||||||
|
|
||||||
payloadReader := payload.Reader()
|
|
||||||
sz := payload.Size()
|
|
||||||
|
|
||||||
_, err := uploader.Upload(c.vu.Context(), &s3.PutObjectInput{
|
|
||||||
Bucket: aws.String(bucket),
|
|
||||||
Key: aws.String(key),
|
|
||||||
Body: payloadReader,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
stats.Report(c.vu, objPutFails, 1)
|
|
||||||
return PutResponse{Success: false, Error: err.Error()}
|
|
||||||
}
|
|
||||||
|
|
||||||
stats.Report(c.vu, objPutSuccess, 1)
|
|
||||||
stats.ReportDataSent(c.vu, float64(sz))
|
|
||||||
stats.Report(c.vu, objPutDuration, metrics.D(time.Since(start)))
|
|
||||||
stats.Report(c.vu, objPutData, float64(sz))
|
|
||||||
return PutResponse{Success: true}
|
return PutResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,7 +82,7 @@ func (c *Client) Delete(bucket, key string) DeleteResponse {
|
||||||
return DeleteResponse{Success: false, Error: err.Error()}
|
return DeleteResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objDeleteSuccess, 1)
|
stats.Report(c.vu, objDeleteTotal, 1)
|
||||||
stats.Report(c.vu, objDeleteDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objDeleteDuration, metrics.D(time.Since(start)))
|
||||||
return DeleteResponse{Success: true}
|
return DeleteResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
@ -126,7 +90,7 @@ func (c *Client) Delete(bucket, key string) DeleteResponse {
|
||||||
func (c *Client) Get(bucket, key string) GetResponse {
|
func (c *Client) Get(bucket, key string) GetResponse {
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
|
||||||
objSize := 0
|
var objSize = 0
|
||||||
err := get(c.cli, bucket, key, func(chunk []byte) {
|
err := get(c.cli, bucket, key, func(chunk []byte) {
|
||||||
objSize += len(chunk)
|
objSize += len(chunk)
|
||||||
})
|
})
|
||||||
|
@ -135,10 +99,9 @@ func (c *Client) Get(bucket, key string) GetResponse {
|
||||||
return GetResponse{Success: false, Error: err.Error()}
|
return GetResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objGetSuccess, 1)
|
stats.Report(c.vu, objGetTotal, 1)
|
||||||
stats.Report(c.vu, objGetDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objGetDuration, metrics.D(time.Since(start)))
|
||||||
stats.ReportDataReceived(c.vu, float64(objSize))
|
stats.ReportDataReceived(c.vu, float64(objSize))
|
||||||
stats.Report(c.vu, objGetData, float64(objSize))
|
|
||||||
return GetResponse{Success: true}
|
return GetResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,7 +178,7 @@ func (c *Client) CreateBucket(bucket string, params map[string]string) CreateBuc
|
||||||
return CreateBucketResponse{Success: false, Error: err.Error()}
|
return CreateBucketResponse{Success: false, Error: err.Error()}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, createBucketSuccess, 1)
|
stats.Report(c.vu, createBucketTotal, 1)
|
||||||
stats.Report(c.vu, createBucketDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, createBucketDuration, metrics.D(time.Since(start)))
|
||||||
return CreateBucketResponse{Success: true}
|
return CreateBucketResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,6 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
|
||||||
"github.com/aws/aws-sdk-go-v2/aws"
|
"github.com/aws/aws-sdk-go-v2/aws"
|
||||||
"github.com/aws/aws-sdk-go-v2/config"
|
"github.com/aws/aws-sdk-go-v2/config"
|
||||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||||
|
@ -29,10 +28,10 @@ var (
|
||||||
_ modules.Instance = &S3{}
|
_ modules.Instance = &S3{}
|
||||||
_ modules.Module = &RootModule{}
|
_ modules.Module = &RootModule{}
|
||||||
|
|
||||||
objPutSuccess, objPutFails, objPutDuration, objPutData *metrics.Metric
|
objPutTotal, objPutFails, objPutDuration *metrics.Metric
|
||||||
objGetSuccess, objGetFails, objGetDuration, objGetData *metrics.Metric
|
objGetTotal, objGetFails, objGetDuration *metrics.Metric
|
||||||
objDeleteSuccess, objDeleteFails, objDeleteDuration *metrics.Metric
|
objDeleteTotal, objDeleteFails, objDeleteDuration *metrics.Metric
|
||||||
createBucketSuccess, createBucketFails, createBucketDuration *metrics.Metric
|
createBucketTotal, createBucketFails, createBucketDuration *metrics.Metric
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -95,23 +94,22 @@ func (s *S3) Connect(endpoint string, params map[string]string) (*Client, error)
|
||||||
})
|
})
|
||||||
|
|
||||||
// register metrics
|
// register metrics
|
||||||
objPutSuccess, _ = stats.Registry.NewMetric("aws_obj_put_success", metrics.Counter)
|
registry := metrics.NewRegistry()
|
||||||
objPutFails, _ = stats.Registry.NewMetric("aws_obj_put_fails", metrics.Counter)
|
objPutTotal, _ = registry.NewMetric("aws_obj_put_total", metrics.Counter)
|
||||||
objPutDuration, _ = stats.Registry.NewMetric("aws_obj_put_duration", metrics.Trend, metrics.Time)
|
objPutFails, _ = registry.NewMetric("aws_obj_put_fails", metrics.Counter)
|
||||||
objPutData, _ = stats.Registry.NewMetric("aws_obj_put_bytes", metrics.Counter, metrics.Data)
|
objPutDuration, _ = registry.NewMetric("aws_obj_put_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
objGetSuccess, _ = stats.Registry.NewMetric("aws_obj_get_success", metrics.Counter)
|
objGetTotal, _ = registry.NewMetric("aws_obj_get_total", metrics.Counter)
|
||||||
objGetFails, _ = stats.Registry.NewMetric("aws_obj_get_fails", metrics.Counter)
|
objGetFails, _ = registry.NewMetric("aws_obj_get_fails", metrics.Counter)
|
||||||
objGetDuration, _ = stats.Registry.NewMetric("aws_obj_get_duration", metrics.Trend, metrics.Time)
|
objGetDuration, _ = registry.NewMetric("aws_obj_get_duration", metrics.Trend, metrics.Time)
|
||||||
objGetData, _ = stats.Registry.NewMetric("aws_obj_get_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
objDeleteSuccess, _ = stats.Registry.NewMetric("aws_obj_delete_success", metrics.Counter)
|
objDeleteTotal, _ = registry.NewMetric("aws_obj_delete_total", metrics.Counter)
|
||||||
objDeleteFails, _ = stats.Registry.NewMetric("aws_obj_delete_fails", metrics.Counter)
|
objDeleteFails, _ = registry.NewMetric("aws_obj_delete_fails", metrics.Counter)
|
||||||
objDeleteDuration, _ = stats.Registry.NewMetric("aws_obj_delete_duration", metrics.Trend, metrics.Time)
|
objDeleteDuration, _ = registry.NewMetric("aws_obj_delete_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
createBucketSuccess, _ = stats.Registry.NewMetric("aws_create_bucket_success", metrics.Counter)
|
createBucketTotal, _ = registry.NewMetric("aws_create_bucket_total", metrics.Counter)
|
||||||
createBucketFails, _ = stats.Registry.NewMetric("aws_create_bucket_fails", metrics.Counter)
|
createBucketFails, _ = registry.NewMetric("aws_create_bucket_fails", metrics.Counter)
|
||||||
createBucketDuration, _ = stats.Registry.NewMetric("aws_create_bucket_duration", metrics.Trend, metrics.Time)
|
createBucketDuration, _ = registry.NewMetric("aws_create_bucket_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
return &Client{
|
return &Client{
|
||||||
vu: s.vu,
|
vu: s.vu,
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
package s3local
|
package s3local
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer"
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer"
|
||||||
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/user"
|
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/user"
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/datagen"
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/local"
|
|
||||||
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
"git.frostfs.info/TrueCloudLab/xk6-frostfs/internal/stats"
|
||||||
|
"github.com/dop251/goja"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
"go.k6.io/k6/metrics"
|
"go.k6.io/k6/metrics"
|
||||||
)
|
)
|
||||||
|
@ -18,13 +18,11 @@ type Client struct {
|
||||||
l layer.Client
|
l layer.Client
|
||||||
ownerID *user.ID
|
ownerID *user.ID
|
||||||
resolver layer.BucketResolver
|
resolver layer.BucketResolver
|
||||||
limiter local.Limiter
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type (
|
type (
|
||||||
SuccessOrErrorResponse struct {
|
SuccessOrErrorResponse struct {
|
||||||
Success bool
|
Success bool
|
||||||
Abort bool
|
|
||||||
Error string
|
Error string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,14 +32,7 @@ type (
|
||||||
GetResponse SuccessOrErrorResponse
|
GetResponse SuccessOrErrorResponse
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c *Client) Put(bucket, key string, payload datagen.Payload) PutResponse {
|
func (c *Client) Put(bucket, key string, payload goja.ArrayBuffer) PutResponse {
|
||||||
if c.limiter.IsFull() {
|
|
||||||
return PutResponse{
|
|
||||||
Success: false,
|
|
||||||
Abort: true,
|
|
||||||
Error: "engine size limit reached",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cid, err := c.resolver.Resolve(c.vu.Context(), bucket)
|
cid, err := c.resolver.Resolve(c.vu.Context(), bucket)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(c.vu, objPutFails, 1)
|
stats.Report(c.vu, objPutFails, 1)
|
||||||
|
@ -57,8 +48,8 @@ func (c *Client) Put(bucket, key string, payload datagen.Payload) PutResponse {
|
||||||
},
|
},
|
||||||
Header: map[string]string{},
|
Header: map[string]string{},
|
||||||
Object: key,
|
Object: key,
|
||||||
Size: int64(payload.Size()),
|
Size: int64(len(payload.Bytes())),
|
||||||
Reader: payload.Reader(),
|
Reader: bytes.NewReader(payload.Bytes()),
|
||||||
}
|
}
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
|
@ -68,9 +59,8 @@ func (c *Client) Put(bucket, key string, payload datagen.Payload) PutResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objPutDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objPutDuration, metrics.D(time.Since(start)))
|
||||||
stats.Report(c.vu, objPutSuccess, 1)
|
stats.Report(c.vu, objPutTotal, 1)
|
||||||
stats.ReportDataSent(c.vu, float64(prm.Size))
|
stats.ReportDataSent(c.vu, float64(prm.Size))
|
||||||
stats.Report(c.vu, objPutData, float64(prm.Size))
|
|
||||||
|
|
||||||
return PutResponse{Success: true}
|
return PutResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
@ -116,9 +106,8 @@ func (c *Client) Get(bucket, key string) GetResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
stats.Report(c.vu, objGetDuration, metrics.D(time.Since(start)))
|
stats.Report(c.vu, objGetDuration, metrics.D(time.Since(start)))
|
||||||
stats.Report(c.vu, objGetSuccess, 1)
|
stats.Report(c.vu, objGetTotal, 1)
|
||||||
stats.ReportDataReceived(c.vu, wr.total)
|
stats.ReportDataReceived(c.vu, wr.total)
|
||||||
stats.Report(c.vu, objGetData, wr.total)
|
|
||||||
|
|
||||||
return GetResponse{Success: true}
|
return GetResponse{Success: true}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,10 +32,10 @@ var (
|
||||||
_ modules.Module = &RootModule{}
|
_ modules.Module = &RootModule{}
|
||||||
_ modules.Instance = &Local{}
|
_ modules.Instance = &Local{}
|
||||||
|
|
||||||
internalObjPutSuccess, internalObjPutFails, internalObjPutDuration, internalObjPutData *metrics.Metric
|
internalObjPutTotal, internalObjPutFails, internalObjPutDuration *metrics.Metric
|
||||||
internalObjGetSuccess, internalObjGetFails, internalObjGetDuration, internalObjGetData *metrics.Metric
|
internalObjGetTotal, internalObjGetFails, internalObjGetDuration *metrics.Metric
|
||||||
objPutSuccess, objPutFails, objPutDuration, objPutData *metrics.Metric
|
objPutTotal, objPutFails, objPutDuration *metrics.Metric
|
||||||
objGetSuccess, objGetFails, objGetDuration, objGetData *metrics.Metric
|
objGetTotal, objGetFails, objGetDuration *metrics.Metric
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -56,7 +56,7 @@ func (s *Local) Exports() modules.Exports {
|
||||||
return modules.Exports{Default: s}
|
return modules.Exports{Default: s}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Local) Connect(configFile string, configDir string, params map[string]string, bucketMapping map[string]string, maxSizeGB int64) (*Client, error) {
|
func (s *Local) Connect(configFile string, params map[string]string, bucketMapping map[string]string) (*Client, error) {
|
||||||
// Parse configuration flags.
|
// Parse configuration flags.
|
||||||
fs := flag.NewFlagSet("s3local", flag.ContinueOnError)
|
fs := flag.NewFlagSet("s3local", flag.ContinueOnError)
|
||||||
|
|
||||||
|
@ -88,37 +88,35 @@ func (s *Local) Connect(configFile string, configDir string, params map[string]s
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register metrics.
|
// Register metrics.
|
||||||
internalObjPutSuccess, _ = stats.Registry.NewMetric("s3local_internal_obj_put_success", metrics.Counter)
|
registry := metrics.NewRegistry()
|
||||||
internalObjPutFails, _ = stats.Registry.NewMetric("s3local_internal_obj_put_fails", metrics.Counter)
|
|
||||||
internalObjPutDuration, _ = stats.Registry.NewMetric("s3local_internal_obj_put_duration", metrics.Trend, metrics.Time)
|
|
||||||
internalObjPutData, _ = stats.Registry.NewMetric("s3local_internal_obj_put_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
internalObjGetSuccess, _ = stats.Registry.NewMetric("s3local_internal_obj_get_success", metrics.Counter)
|
internalObjPutTotal, _ = registry.NewMetric("s3local_internal_obj_put_total", metrics.Counter)
|
||||||
internalObjGetFails, _ = stats.Registry.NewMetric("s3local_internal_obj_get_fails", metrics.Counter)
|
internalObjPutFails, _ = registry.NewMetric("s3local_internal_obj_put_fails", metrics.Counter)
|
||||||
internalObjGetDuration, _ = stats.Registry.NewMetric("s3local_internal_obj_get_duration", metrics.Trend, metrics.Time)
|
internalObjPutDuration, _ = registry.NewMetric("s3local_internal_obj_put_duration", metrics.Trend, metrics.Time)
|
||||||
internalObjGetData, _ = stats.Registry.NewMetric("s3local_internal_obj_get_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
objPutSuccess, _ = stats.Registry.NewMetric("s3local_obj_put_success", metrics.Counter)
|
internalObjGetTotal, _ = registry.NewMetric("s3local_internal_obj_get_total", metrics.Counter)
|
||||||
objPutFails, _ = stats.Registry.NewMetric("s3local_obj_put_fails", metrics.Counter)
|
internalObjGetFails, _ = registry.NewMetric("s3local_internal_obj_get_fails", metrics.Counter)
|
||||||
objPutDuration, _ = stats.Registry.NewMetric("s3local_obj_put_duration", metrics.Trend, metrics.Time)
|
internalObjGetDuration, _ = registry.NewMetric("s3local_internal_obj_get_duration", metrics.Trend, metrics.Time)
|
||||||
objPutData, _ = stats.Registry.NewMetric("s3local_obj_put_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
|
||||||
objGetSuccess, _ = stats.Registry.NewMetric("s3local_obj_get_success", metrics.Counter)
|
objPutTotal, _ = registry.NewMetric("s3local_obj_put_total", metrics.Counter)
|
||||||
objGetFails, _ = stats.Registry.NewMetric("s3local_obj_get_fails", metrics.Counter)
|
objPutFails, _ = registry.NewMetric("s3local_obj_put_fails", metrics.Counter)
|
||||||
objGetDuration, _ = stats.Registry.NewMetric("s3local_obj_get_duration", metrics.Trend, metrics.Time)
|
objPutDuration, _ = registry.NewMetric("s3local_obj_put_duration", metrics.Trend, metrics.Time)
|
||||||
objGetData, _ = stats.Registry.NewMetric("s3local_obj_get_bytes", metrics.Counter, metrics.Data)
|
|
||||||
|
objGetTotal, _ = registry.NewMetric("s3local_obj_get_total", metrics.Counter)
|
||||||
|
objGetFails, _ = registry.NewMetric("s3local_obj_get_fails", metrics.Counter)
|
||||||
|
objGetDuration, _ = registry.NewMetric("s3local_obj_get_duration", metrics.Trend, metrics.Time)
|
||||||
|
|
||||||
// Create S3 layer backed by local storage engine and tree service.
|
// Create S3 layer backed by local storage engine and tree service.
|
||||||
ng, limiter, err := s.l.ResolveEngine(s.l.VU().Context(), configFile, configDir, *debugLogger, maxSizeGB)
|
ng, err := s.l.ResolveEngine(s.l.VU().Context(), configFile, *debugLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("connecting to engine for config - file %q dir %q: %v", configFile, configDir, err)
|
return nil, fmt.Errorf("connecting to engine for config %q: %v", configFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
treeSvc := tree.NewTree(treeServiceEngineWrapper{
|
treeSvc := tree.NewTree(treeServiceEngineWrapper{
|
||||||
ng: ng,
|
ng: ng,
|
||||||
pos: *nodePosition,
|
pos: *nodePosition,
|
||||||
size: *nodeCount,
|
size: *nodeCount,
|
||||||
}, zap.L())
|
})
|
||||||
|
|
||||||
rc := rawclient.New(ng,
|
rc := rawclient.New(ng,
|
||||||
rawclient.WithKey(key.PrivateKey),
|
rawclient.WithKey(key.PrivateKey),
|
||||||
|
@ -126,18 +124,16 @@ func (s *Local) Connect(configFile string, configDir string, params map[string]s
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(s.l.VU(), internalObjPutFails, 1)
|
stats.Report(s.l.VU(), internalObjPutFails, 1)
|
||||||
} else {
|
} else {
|
||||||
stats.Report(s.l.VU(), internalObjPutSuccess, 1)
|
stats.Report(s.l.VU(), internalObjPutTotal, 1)
|
||||||
stats.Report(s.l.VU(), internalObjPutDuration, metrics.D(dt))
|
stats.Report(s.l.VU(), internalObjPutDuration, metrics.D(dt))
|
||||||
stats.Report(s.l.VU(), internalObjPutData, float64(sz))
|
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
rawclient.WithGetHandler(func(sz uint64, err error, dt time.Duration) {
|
rawclient.WithGetHandler(func(sz uint64, err error, dt time.Duration) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
stats.Report(s.l.VU(), internalObjGetFails, 1)
|
stats.Report(s.l.VU(), internalObjGetFails, 1)
|
||||||
} else {
|
} else {
|
||||||
stats.Report(s.l.VU(), internalObjGetSuccess, 1)
|
stats.Report(s.l.VU(), internalObjGetTotal, 1)
|
||||||
stats.Report(s.l.VU(), internalObjGetDuration, metrics.D(dt))
|
stats.Report(s.l.VU(), internalObjGetDuration, metrics.D(dt))
|
||||||
stats.Report(s.l.VU(), internalObjGetData, float64(sz))
|
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
@ -155,17 +151,13 @@ func (s *Local) Connect(configFile string, configDir string, params map[string]s
|
||||||
}
|
}
|
||||||
|
|
||||||
l := layer.NewLayer(zap.L(), &frostfs{rc}, cfg)
|
l := layer.NewLayer(zap.L(), &frostfs{rc}, cfg)
|
||||||
err = l.Initialize(s.l.VU().Context(), nopEventListener{})
|
l.Initialize(s.l.VU().Context(), nopEventListener{})
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("initialize: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Client{
|
return &Client{
|
||||||
vu: s.l.VU(),
|
vu: s.l.VU(),
|
||||||
l: l,
|
l: l,
|
||||||
ownerID: rc.OwnerID(),
|
ownerID: rc.OwnerID(),
|
||||||
resolver: resolver,
|
resolver: resolver,
|
||||||
limiter: limiter,
|
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -115,7 +115,7 @@ func (s treeServiceEngineWrapper) GetSubTree(ctx context.Context, bktInfo *data.
|
||||||
return fmt.Errorf("getting children: %v", err)
|
return fmt.Errorf("getting children: %v", err)
|
||||||
}
|
}
|
||||||
for _, child := range children {
|
for _, child := range children {
|
||||||
if err := traverse(child.ID, curDepth+1); err != nil {
|
if err := traverse(child, curDepth+1); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,54 +1,16 @@
|
||||||
package stats
|
package stats
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
"go.k6.io/k6/metrics"
|
"go.k6.io/k6/metrics"
|
||||||
)
|
)
|
||||||
|
|
||||||
// RootModule is the global module object type. It is instantiated once per test
|
|
||||||
// run and will be used to create k6/x/frostfs/stats module instances for each VU.
|
|
||||||
type RootModule struct {
|
|
||||||
Instance string
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
tagSet *metrics.TagSet
|
|
||||||
|
|
||||||
Registry *metrics.Registry
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
Registry = metrics.NewRegistry()
|
|
||||||
tagSet = Registry.RootTagSet()
|
|
||||||
modules.Register("k6/x/frostfs/stats", &RootModule{})
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetTags sets additional tags to custom metrics.
|
|
||||||
// Format: "key1:value1;key2:value2".
|
|
||||||
// Panics if input has invalid format.
|
|
||||||
func (m *RootModule) SetTags(labels string) {
|
|
||||||
kv := make(map[string]string)
|
|
||||||
pairs := strings.Split(labels, ";")
|
|
||||||
for _, pair := range pairs {
|
|
||||||
items := strings.Split(pair, ":")
|
|
||||||
if len(items) != 2 {
|
|
||||||
panic("invalid labels format")
|
|
||||||
}
|
|
||||||
kv[strings.TrimSpace(items[0])] = strings.TrimSpace(items[1])
|
|
||||||
}
|
|
||||||
for k, v := range kv {
|
|
||||||
tagSet = tagSet.With(k, v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Report(vu modules.VU, metric *metrics.Metric, value float64) {
|
func Report(vu modules.VU, metric *metrics.Metric, value float64) {
|
||||||
metrics.PushIfNotDone(vu.Context(), vu.State().Samples, metrics.Sample{
|
metrics.PushIfNotDone(vu.Context(), vu.State().Samples, metrics.Sample{
|
||||||
TimeSeries: metrics.TimeSeries{
|
TimeSeries: metrics.TimeSeries{
|
||||||
Metric: metric,
|
Metric: metric,
|
||||||
Tags: tagSet,
|
|
||||||
},
|
},
|
||||||
Time: time.Now(),
|
Time: time.Now(),
|
||||||
Value: value,
|
Value: value,
|
||||||
|
@ -60,11 +22,9 @@ func ReportDataReceived(vu modules.VU, value float64) {
|
||||||
metrics.Sample{
|
metrics.Sample{
|
||||||
TimeSeries: metrics.TimeSeries{
|
TimeSeries: metrics.TimeSeries{
|
||||||
Metric: &metrics.Metric{},
|
Metric: &metrics.Metric{},
|
||||||
Tags: tagSet,
|
|
||||||
},
|
},
|
||||||
Value: value,
|
Value: value,
|
||||||
Time: time.Now(),
|
Time: time.Now()},
|
||||||
},
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,10 +34,8 @@ func ReportDataSent(vu modules.VU, value float64) {
|
||||||
metrics.Sample{
|
metrics.Sample{
|
||||||
TimeSeries: metrics.TimeSeries{
|
TimeSeries: metrics.TimeSeries{
|
||||||
Metric: &metrics.Metric{},
|
Metric: &metrics.Metric{},
|
||||||
Tags: tagSet,
|
|
||||||
},
|
},
|
||||||
Value: value,
|
Value: value,
|
||||||
Time: time.Now(),
|
Time: time.Now()},
|
||||||
},
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
package version
|
|
||||||
|
|
||||||
var (
|
|
||||||
// Version is the xk6 command-line utils version.
|
|
||||||
Version = "dev"
|
|
||||||
)
|
|
|
@ -1,222 +1,173 @@
|
||||||
import {sleep} from 'k6';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import {SharedArray} from 'k6/data';
|
|
||||||
import exec from 'k6/execution';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
|
||||||
import native from 'k6/x/frostfs/native';
|
import native from 'k6/x/frostfs/native';
|
||||||
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import { SharedArray } from 'k6/data';
|
||||||
|
import { sleep } from 'k6';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray(
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
'obj_list',
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
|
});
|
||||||
|
|
||||||
const container_list = new SharedArray(
|
const container_list = new SharedArray('container_list', function () {
|
||||||
'container_list',
|
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
||||||
function() { return JSON.parse(open(__ENV.PREGEN_JSON)).containers; });
|
});
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
// Select random gRPC endpoint for current VU
|
// Select random gRPC endpoint for current VU
|
||||||
const grpc_endpoints = __ENV.GRPC_ENDPOINTS.split(',');
|
const grpc_endpoints = __ENV.GRPC_ENDPOINTS.split(',');
|
||||||
const grpc_endpoint =
|
const grpc_endpoint = grpc_endpoints[Math.floor(Math.random() * grpc_endpoints.length)];
|
||||||
grpc_endpoints[Math.floor(Math.random() * grpc_endpoints.length)];
|
const grpc_client = native.connect(grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60);
|
||||||
const grpc_client = native.connect(
|
const log = logging.new().withField("endpoint", grpc_endpoint);
|
||||||
grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5,
|
|
||||||
__ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60,
|
|
||||||
__ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true'
|
|
||||||
: false);
|
|
||||||
const log = logging.new().withField('endpoint', grpc_endpoint);
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
let obj_to_delete_selector = undefined;
|
||||||
|
if (registry_enabled && delete_age) {
|
||||||
|
obj_to_delete_selector = registry.getSelector(
|
||||||
|
__ENV.REGISTRY_FILE,
|
||||||
|
"obj_to_delete",
|
||||||
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
|
{
|
||||||
|
status: "created",
|
||||||
|
age: delete_age,
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_age = __ENV.READ_AGE ? parseInt(__ENV.READ_AGE) : 10;
|
|
||||||
let obj_to_read_selector = undefined;
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "");
|
||||||
if (registry_enabled) {
|
|
||||||
obj_to_read_selector = registry.getLoopedSelector(
|
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_read',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status : 'created',
|
|
||||||
age : read_age,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
||||||
const write_grpc_chunk_size = 1024 * parseInt(__ENV.GRPC_CHUNK_SIZE || '0')
|
|
||||||
const generator = newGenerator(write_vu_count > 0);
|
|
||||||
if (write_vu_count > 0) {
|
if (write_vu_count > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor : 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus : write_vu_count,
|
vus: write_vu_count,
|
||||||
duration : `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec : 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop : '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
|
||||||
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
|
||||||
let obj_to_delete_selector = undefined;
|
|
||||||
let obj_to_delete_exit_on_null = undefined;
|
|
||||||
if (registry_enabled && delete_age) {
|
|
||||||
obj_to_delete_exit_on_null = write_vu_count == 0;
|
|
||||||
|
|
||||||
let constructor = obj_to_delete_exit_on_null ? registry.getOneshotSelector
|
|
||||||
: registry.getSelector;
|
|
||||||
|
|
||||||
obj_to_delete_selector =
|
|
||||||
constructor(__ENV.REGISTRY_FILE, 'obj_to_delete',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status : 'created',
|
|
||||||
age : delete_age,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_vu_count = parseInt(__ENV.READERS || '0');
|
const read_vu_count = parseInt(__ENV.READERS || '0');
|
||||||
if (read_vu_count > 0) {
|
if (read_vu_count > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor : 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus : read_vu_count,
|
vus: read_vu_count,
|
||||||
duration : `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec : 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop : '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const delete_vu_count = parseInt(__ENV.DELETERS || '0');
|
const delete_vu_count = parseInt(__ENV.DELETERS || '0');
|
||||||
if (delete_vu_count > 0) {
|
if (delete_vu_count > 0) {
|
||||||
if (!obj_to_delete_selector) {
|
if (!obj_to_delete_selector) {
|
||||||
throw new Error(
|
throw new Error('Positive DELETE worker number without a proper object selector');
|
||||||
'Positive DELETE worker number without a proper object selector');
|
}
|
||||||
}
|
|
||||||
|
|
||||||
scenarios.delete = {
|
scenarios.delete = {
|
||||||
executor : 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus : delete_vu_count,
|
vus: delete_vu_count,
|
||||||
duration : `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec : 'obj_delete',
|
exec: 'obj_delete',
|
||||||
gracefulStop : '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout : '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_vu_count = write_vu_count + read_vu_count + delete_vu_count;
|
const total_vu_count = write_vu_count + read_vu_count + delete_vu_count;
|
||||||
|
|
||||||
console.log(`Pregenerated containers: ${container_list.length}`);
|
console.log(`Pregenerated containers: ${container_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Reading VUs: ${read_vu_count}`);
|
console.log(`Reading VUs: ${read_vu_count}`);
|
||||||
console.log(`Writing VUs: ${write_vu_count}`);
|
console.log(`Writing VUs: ${write_vu_count}`);
|
||||||
console.log(`Deleting VUs: ${delete_vu_count}`);
|
console.log(`Deleting VUs: ${delete_vu_count}`);
|
||||||
console.log(`Total VUs: ${total_vu_count}`);
|
console.log(`Total VUs: ${total_vu_count}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout' : textSummary(data, {indent : ' ', enableColors : false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json] : JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
if (__ENV.SLEEP_WRITE) {
|
if (__ENV.SLEEP_WRITE) {
|
||||||
sleep(__ENV.SLEEP_WRITE);
|
sleep(__ENV.SLEEP_WRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const headers = {unique_header : uuidv4()};
|
const headers = {
|
||||||
const container =
|
unique_header: uuidv4()
|
||||||
container_list[Math.floor(Math.random() * container_list.length)];
|
};
|
||||||
|
const container = container_list[Math.floor(Math.random() * container_list.length)];
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const resp =
|
const resp = grpc_client.put(container, headers, payload);
|
||||||
grpc_client.put(container, headers, payload, write_grpc_chunk_size);
|
if (!resp.success) {
|
||||||
if (!resp.success) {
|
log.withField("cid", container).error(resp.error);
|
||||||
log.withField('cid', container).error(resp.error);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject(container, resp.object_id, '', '', payload.hash());
|
obj_registry.addObject(container, resp.object_id, "", "", hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
if (__ENV.SLEEP_READ) {
|
if (__ENV.SLEEP_READ) {
|
||||||
sleep(__ENV.SLEEP_READ);
|
sleep(__ENV.SLEEP_READ);
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_to_read_selector) {
|
|
||||||
const obj = obj_to_read_selector.nextObject();
|
|
||||||
if (!obj) {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
const resp = grpc_client.get(obj.c_id, obj.o_id)
|
|
||||||
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
|
const resp = grpc_client.get(obj.container, obj.object)
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({cid : obj.c_id, oid : obj.o_id}).error(resp.error);
|
log.withFields({cid: obj.container, oid: obj.object}).error(resp.error);
|
||||||
}
|
}
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
|
||||||
const resp = grpc_client.get(obj.container, obj.object)
|
|
||||||
if (!resp.success) {
|
|
||||||
log.withFields({cid : obj.container, oid : obj.object}).error(resp.error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_delete() {
|
export function obj_delete() {
|
||||||
if (__ENV.SLEEP_DELETE) {
|
if (__ENV.SLEEP_DELETE) {
|
||||||
sleep(__ENV.SLEEP_DELETE);
|
sleep(__ENV.SLEEP_DELETE);
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_to_delete_selector.nextObject();
|
|
||||||
if (!obj) {
|
|
||||||
if (obj_to_delete_exit_on_null) {
|
|
||||||
exec.test.abort("No more objects to select");
|
|
||||||
}
|
}
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const resp = grpc_client.delete(obj.c_id, obj.o_id);
|
const obj = obj_to_delete_selector.nextObject();
|
||||||
if (!resp.success) {
|
if (!obj) {
|
||||||
// Log errors except (2052 - object already deleted)
|
return;
|
||||||
log.withFields({cid : obj.c_id, oid : obj.o_id}).error(resp.error);
|
}
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
obj_registry.deleteObject(obj.id);
|
const resp = grpc_client.delete(obj.c_id, obj.o_id);
|
||||||
|
if (!resp.success) {
|
||||||
|
// Log errors except (2052 - object already deleted)
|
||||||
|
log.withFields({cid: obj.c_id, oid: obj.o_id}).error(resp.error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
obj_registry.deleteObject(obj.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,70 +1,53 @@
|
||||||
import {sleep} from 'k6';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import {SharedArray} from 'k6/data';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
|
||||||
import native from 'k6/x/frostfs/native';
|
import native from 'k6/x/frostfs/native';
|
||||||
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import { SharedArray } from 'k6/data';
|
||||||
|
import { sleep } from 'k6';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray('obj_list', function() {
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
});
|
});
|
||||||
|
|
||||||
const container_list = new SharedArray('container_list', function() {
|
const container_list = new SharedArray('container_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
||||||
});
|
});
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
// Select random gRPC endpoint for current VU
|
// Select random gRPC endpoint for current VU
|
||||||
const grpc_endpoints = __ENV.GRPC_ENDPOINTS.split(',');
|
const grpc_endpoints = __ENV.GRPC_ENDPOINTS.split(',');
|
||||||
const grpc_endpoint =
|
const grpc_endpoint = grpc_endpoints[Math.floor(Math.random() * grpc_endpoints.length)];
|
||||||
grpc_endpoints[Math.floor(Math.random() * grpc_endpoints.length)];
|
const grpc_client = native.connect(grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60);
|
||||||
const grpc_client = native.connect(
|
const log = logging.new().withField("endpoint", grpc_endpoint);
|
||||||
grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5,
|
|
||||||
__ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 60,
|
|
||||||
__ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true' :
|
|
||||||
false);
|
|
||||||
const log = logging.new().withField('endpoint', grpc_endpoint);
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
||||||
let obj_to_delete_selector = undefined;
|
let obj_to_delete_selector = undefined;
|
||||||
if (registry_enabled && delete_age) {
|
if (registry_enabled && delete_age) {
|
||||||
obj_to_delete_selector = registry.getSelector(
|
obj_to_delete_selector = registry.getSelector(
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_delete',
|
__ENV.REGISTRY_FILE,
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
"obj_to_delete",
|
||||||
status: 'created',
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
age: delete_age,
|
{
|
||||||
});
|
status: "created",
|
||||||
|
age: delete_age,
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_age = __ENV.READ_AGE ? parseInt(__ENV.READ_AGE) : 10;
|
|
||||||
let obj_to_read_selector = undefined;
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
||||||
if (registry_enabled) {
|
|
||||||
obj_to_read_selector = registry.getLoopedSelector(
|
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_read',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status: 'created',
|
|
||||||
age: read_age,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
|
@ -72,167 +55,144 @@ const time_unit = __ENV.TIME_UNIT || '1s';
|
||||||
const pre_alloc_write_vus = parseInt(__ENV.PRE_ALLOC_WRITERS || '0');
|
const pre_alloc_write_vus = parseInt(__ENV.PRE_ALLOC_WRITERS || '0');
|
||||||
const max_write_vus = parseInt(__ENV.MAX_WRITERS || pre_alloc_write_vus);
|
const max_write_vus = parseInt(__ENV.MAX_WRITERS || pre_alloc_write_vus);
|
||||||
const write_rate = parseInt(__ENV.WRITE_RATE || '0');
|
const write_rate = parseInt(__ENV.WRITE_RATE || '0');
|
||||||
const write_grpc_chunk_size = 1024 * parseInt(__ENV.GRPC_CHUNK_SIZE || '0')
|
|
||||||
const generator = newGenerator(write_rate > 0);
|
|
||||||
if (write_rate > 0) {
|
if (write_rate > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor: 'constant-arrival-rate',
|
executor: 'constant-arrival-rate',
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
preAllocatedVUs: pre_alloc_write_vus,
|
preAllocatedVUs: pre_alloc_write_vus,
|
||||||
maxVUs: max_write_vus,
|
maxVUs: max_write_vus,
|
||||||
rate: write_rate,
|
rate: write_rate,
|
||||||
timeUnit: time_unit,
|
timeUnit: time_unit,
|
||||||
exec: 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const pre_alloc_read_vus = parseInt(__ENV.PRE_ALLOC_READERS || '0');
|
const pre_alloc_read_vus = parseInt(__ENV.PRE_ALLOC_READERS || '0');
|
||||||
const max_read_vus = parseInt(__ENV.MAX_READERS || pre_alloc_read_vus);
|
const max_read_vus = parseInt(__ENV.MAX_READERS || pre_alloc_read_vus);
|
||||||
const read_rate = parseInt(__ENV.READ_RATE || '0');
|
const read_rate = parseInt(__ENV.READ_RATE || '0');
|
||||||
if (read_rate > 0) {
|
if (read_rate > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor: 'constant-arrival-rate',
|
executor: 'constant-arrival-rate',
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
preAllocatedVUs: pre_alloc_write_vus,
|
preAllocatedVUs: pre_alloc_write_vus,
|
||||||
maxVUs: max_read_vus,
|
maxVUs: max_read_vus,
|
||||||
rate: read_rate,
|
rate: read_rate,
|
||||||
timeUnit: time_unit,
|
timeUnit: time_unit,
|
||||||
exec: 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const pre_alloc_delete_vus = parseInt(__ENV.PRE_ALLOC_DELETERS || '0');
|
const pre_alloc_delete_vus = parseInt(__ENV.PRE_ALLOC_DELETERS || '0');
|
||||||
const max_delete_vus = parseInt(__ENV.MAX_DELETERS || pre_alloc_write_vus);
|
const max_delete_vus = parseInt(__ENV.MAX_DELETERS || pre_alloc_write_vus);
|
||||||
const delete_rate = parseInt(__ENV.DELETE_RATE || '0');
|
const delete_rate = parseInt(__ENV.DELETE_RATE || '0');
|
||||||
if (delete_rate > 0) {
|
if (delete_rate > 0) {
|
||||||
if (!obj_to_delete_selector) {
|
if (!obj_to_delete_selector) {
|
||||||
throw new Error(
|
throw new Error('Positive DELETE worker number without a proper object selector');
|
||||||
'Positive DELETE worker number without a proper object selector');
|
}
|
||||||
}
|
|
||||||
|
|
||||||
scenarios.delete = {
|
scenarios.delete = {
|
||||||
executor: 'constant-arrival-rate',
|
executor: 'constant-arrival-rate',
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
preAllocatedVUs: pre_alloc_delete_vus,
|
preAllocatedVUs: pre_alloc_delete_vus,
|
||||||
maxVUs: max_delete_vus,
|
maxVUs: max_delete_vus,
|
||||||
rate: delete_rate,
|
rate: delete_rate,
|
||||||
timeUnit: time_unit,
|
timeUnit: time_unit,
|
||||||
exec: 'obj_delete',
|
exec: 'obj_delete',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout: '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_pre_allocated_vu_count =
|
const total_pre_allocated_vu_count = pre_alloc_write_vus + pre_alloc_read_vus + pre_alloc_delete_vus;
|
||||||
pre_alloc_write_vus + pre_alloc_read_vus + pre_alloc_delete_vus;
|
const total_max_vu_count = max_read_vus + max_write_vus + max_delete_vus
|
||||||
const total_max_vu_count = max_read_vus + max_write_vus + max_delete_vus
|
|
||||||
|
|
||||||
console.log(`Pregenerated containers: ${container_list.length}`);
|
console.log(`Pregenerated containers: ${container_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Pre allocated reading VUs: ${pre_alloc_read_vus}`);
|
console.log(`Pre allocated reading VUs: ${pre_alloc_read_vus}`);
|
||||||
console.log(`Pre allocated writing VUs: ${pre_alloc_write_vus}`);
|
console.log(`Pre allocated writing VUs: ${pre_alloc_write_vus}`);
|
||||||
console.log(`Pre allocated deleting VUs: ${pre_alloc_delete_vus}`);
|
console.log(`Pre allocated deleting VUs: ${pre_alloc_delete_vus}`);
|
||||||
console.log(`Total pre allocated VUs: ${total_pre_allocated_vu_count}`);
|
console.log(`Total pre allocated VUs: ${total_pre_allocated_vu_count}`);
|
||||||
console.log(`Max reading VUs: ${max_read_vus}`);
|
console.log(`Max reading VUs: ${max_read_vus}`);
|
||||||
console.log(`Max writing VUs: ${max_write_vus}`);
|
console.log(`Max writing VUs: ${max_write_vus}`);
|
||||||
console.log(`Max deleting VUs: ${max_delete_vus}`);
|
console.log(`Max deleting VUs: ${max_delete_vus}`);
|
||||||
console.log(`Total max VUs: ${total_max_vu_count}`);
|
console.log(`Total max VUs: ${total_max_vu_count}`);
|
||||||
console.log(`Time unit: ${time_unit}`);
|
console.log(`Time unit: ${time_unit}`);
|
||||||
console.log(`Read rate: ${read_rate}`);
|
console.log(`Read rate: ${read_rate}`);
|
||||||
console.log(`Writing rate: ${write_rate}`);
|
console.log(`Writing rate: ${write_rate}`);
|
||||||
console.log(`Delete rate: ${delete_rate}`);
|
console.log(`Delete rate: ${delete_rate}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json]: JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
if (__ENV.SLEEP_WRITE) {
|
if (__ENV.SLEEP_WRITE) {
|
||||||
sleep(__ENV.SLEEP_WRITE);
|
sleep(__ENV.SLEEP_WRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const headers = {unique_header: uuidv4()};
|
const headers = {
|
||||||
const container =
|
unique_header: uuidv4()
|
||||||
container_list[Math.floor(Math.random() * container_list.length)];
|
};
|
||||||
|
const container = container_list[Math.floor(Math.random() * container_list.length)];
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const resp =
|
const resp = grpc_client.put(container, headers, payload);
|
||||||
grpc_client.put(container, headers, payload, write_grpc_chunk_size);
|
if (!resp.success) {
|
||||||
if (!resp.success) {
|
log.withField("cid", container).error(resp.error);
|
||||||
log.withField('cid', container).error(resp.error);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject(container, resp.object_id, '', '', payload.hash());
|
obj_registry.addObject(container, resp.object_id, "", "", hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
if (__ENV.SLEEP_READ) {
|
if (__ENV.SLEEP_READ) {
|
||||||
sleep(__ENV.SLEEP_READ);
|
sleep(__ENV.SLEEP_READ);
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_to_read_selector) {
|
|
||||||
const obj = obj_to_read_selector.nextObject();
|
|
||||||
if (!obj) {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
const resp = grpc_client.get(obj.c_id, obj.o_id)
|
|
||||||
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
|
const resp = grpc_client.get(obj.container, obj.object)
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({cid: obj.c_id, oid: obj.o_id}).error(resp.error);
|
log.withFields({cid: obj.container, oid: obj.object}).error(resp.error);
|
||||||
}
|
}
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
|
||||||
const resp = grpc_client.get(obj.container, obj.object)
|
|
||||||
if (!resp.success) {
|
|
||||||
log.withFields({cid: obj.container, oid: obj.object}).error(resp.error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_delete() {
|
export function obj_delete() {
|
||||||
if (__ENV.SLEEP_DELETE) {
|
if (__ENV.SLEEP_DELETE) {
|
||||||
sleep(__ENV.SLEEP_DELETE);
|
sleep(__ENV.SLEEP_DELETE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const obj = obj_to_delete_selector.nextObject();
|
const obj = obj_to_delete_selector.nextObject();
|
||||||
if (!obj) {
|
if (!obj) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const resp = grpc_client.delete(obj.c_id, obj.o_id);
|
const resp = grpc_client.delete(obj.c_id, obj.o_id);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
// Log errors except (2052 - object already deleted)
|
// Log errors except (2052 - object already deleted)
|
||||||
log.withFields({cid: obj.c_id, oid: obj.o_id}).error(resp.error);
|
log.withFields({cid: obj.c_id, oid: obj.o_id}).error(resp.error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
obj_registry.deleteObject(obj.id);
|
obj_registry.deleteObject(obj.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,143 +1,123 @@
|
||||||
import {sleep} from 'k6';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import {SharedArray} from 'k6/data';
|
|
||||||
import http from 'k6/http';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import http from 'k6/http';
|
||||||
|
import { SharedArray } from 'k6/data';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { sleep } from 'k6';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray('obj_list', function() {
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
});
|
});
|
||||||
|
|
||||||
const container_list = new SharedArray('container_list', function() {
|
const container_list = new SharedArray('container_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
||||||
});
|
});
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
// Select random HTTP endpoint for current VU
|
// Select random HTTP endpoint for current VU
|
||||||
const http_endpoints = __ENV.HTTP_ENDPOINTS.split(',');
|
const http_endpoints = __ENV.HTTP_ENDPOINTS.split(',');
|
||||||
const http_endpoint =
|
const http_endpoint = http_endpoints[Math.floor(Math.random() * http_endpoints.length)];
|
||||||
http_endpoints[Math.floor(Math.random() * http_endpoints.length)];
|
const log = logging.new().withField("endpoint", http_endpoint);
|
||||||
const log = logging.new().withField('endpoint', http_endpoint);
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "");
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
||||||
const generator = newGenerator(write_vu_count > 0);
|
|
||||||
if (write_vu_count > 0) {
|
if (write_vu_count > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: write_vu_count,
|
vus: write_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_vu_count = parseInt(__ENV.READERS || '0');
|
const read_vu_count = parseInt(__ENV.READERS || '0');
|
||||||
if (read_vu_count > 0) {
|
if (read_vu_count > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: read_vu_count,
|
vus: read_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout: '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_vu_count = write_vu_count + read_vu_count;
|
const total_vu_count = write_vu_count + read_vu_count;
|
||||||
|
|
||||||
console.log(`Pregenerated containers: ${container_list.length}`);
|
console.log(`Pregenerated containers: ${container_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Reading VUs: ${read_vu_count}`);
|
console.log(`Reading VUs: ${read_vu_count}`);
|
||||||
console.log(`Writing VUs: ${write_vu_count}`);
|
console.log(`Writing VUs: ${write_vu_count}`);
|
||||||
console.log(`Total VUs: ${total_vu_count}`);
|
console.log(`Total VUs: ${total_vu_count}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json]: JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
if (__ENV.SLEEP_WRITE) {
|
if (__ENV.SLEEP_WRITE) {
|
||||||
sleep(__ENV.SLEEP_WRITE);
|
sleep(__ENV.SLEEP_WRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const container =
|
const container = container_list[Math.floor(Math.random() * container_list.length)];
|
||||||
container_list[Math.floor(Math.random() * container_list.length)];
|
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const data = {
|
const data = {
|
||||||
field: uuidv4(),
|
field: uuidv4(),
|
||||||
// Because we use `file` wrapping and it is not straightforward to use
|
file: http.file(payload, "random.data"),
|
||||||
// streams here,
|
};
|
||||||
// `-e STREAMING=1` has no effect for this scenario.
|
|
||||||
file: http.file(payload.bytes(), 'random.data'),
|
|
||||||
};
|
|
||||||
|
|
||||||
const resp = http.post(`http://${http_endpoint}/upload/${container}`, data);
|
const resp = http.post(`http://${http_endpoint}/upload/${container}`, data);
|
||||||
if (resp.status != 200) {
|
if (resp.status != 200) {
|
||||||
log.withFields({status: resp.status, cid: container}).error(resp.error);
|
log.withFields({status: resp.status, cid: container}).error(resp.error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const object_id = JSON.parse(resp.body).object_id;
|
const object_id = JSON.parse(resp.body).object_id;
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject(container, object_id, '', '', payload.hash());
|
obj_registry.addObject(container, object_id, "", "", hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
if (__ENV.SLEEP_READ) {
|
if (__ENV.SLEEP_READ) {
|
||||||
sleep(__ENV.SLEEP_READ);
|
sleep(__ENV.SLEEP_READ);
|
||||||
}
|
}
|
||||||
|
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
const resp =
|
const resp = http.get(`http://${http_endpoint}/get/${obj.container}/${obj.object}`);
|
||||||
http.get(`http://${http_endpoint}/get/${obj.container}/${obj.object}`);
|
if (resp.status != 200) {
|
||||||
if (resp.status != 200) {
|
log.withFields({status: resp.status, cid: obj.container, oid: obj.object}).error(resp.error);
|
||||||
log.withFields({status: resp.status, cid: obj.container, oid: obj.object})
|
}
|
||||||
.error(resp.error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
import datagen from 'k6/x/frostfs/datagen';
|
|
||||||
|
|
||||||
export function newGenerator(condition) {
|
|
||||||
if (condition) {
|
|
||||||
return datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "", !!__ENV.STREAMING);
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
|
@ -1,177 +1,158 @@
|
||||||
import {SharedArray} from 'k6/data';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import exec from 'k6/execution';
|
|
||||||
import local from 'k6/x/frostfs/local';
|
import local from 'k6/x/frostfs/local';
|
||||||
import logging from 'k6/x/frostfs/logging';
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import { SharedArray } from 'k6/data';
|
||||||
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray('obj_list', function() {
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
});
|
});
|
||||||
|
|
||||||
const container_list = new SharedArray('container_list', function() {
|
const container_list = new SharedArray('container_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
||||||
});
|
});
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
const config_file = __ENV.CONFIG_FILE;
|
const config_file = __ENV.CONFIG_FILE;
|
||||||
const config_dir = __ENV.CONFIG_DIR;
|
|
||||||
const debug_logger = (__ENV.DEBUG_LOGGER || 'false') == 'true';
|
const debug_logger = (__ENV.DEBUG_LOGGER || 'false') == 'true';
|
||||||
const max_total_size_gb =
|
const local_client = local.connect(config_file, '', debug_logger);
|
||||||
__ENV.MAX_TOTAL_SIZE_GB ? parseInt(__ENV.MAX_TOTAL_SIZE_GB) : 0;
|
const log = logging.new().withField("config", config_file);
|
||||||
const local_client =
|
|
||||||
local.connect(config_file, config_dir, '', debug_logger, max_total_size_gb);
|
|
||||||
const log = logging.new().withFields(
|
|
||||||
{'config_file': config_file, 'config_dir': config_dir});
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
||||||
let obj_to_delete_selector = undefined;
|
let obj_to_delete_selector = undefined;
|
||||||
if (registry_enabled && delete_age) {
|
if (registry_enabled && delete_age) {
|
||||||
obj_to_delete_selector = registry.getSelector(
|
obj_to_delete_selector = registry.getSelector(
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_delete',
|
__ENV.REGISTRY_FILE,
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
"obj_to_delete",
|
||||||
status: 'created',
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
age: delete_age,
|
{
|
||||||
});
|
status: "created",
|
||||||
|
age: delete_age,
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
||||||
const generator = newGenerator(write_vu_count > 0);
|
|
||||||
if (write_vu_count > 0) {
|
if (write_vu_count > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: write_vu_count,
|
vus: write_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_vu_count = parseInt(__ENV.READERS || '0');
|
const read_vu_count = parseInt(__ENV.READERS || '0');
|
||||||
if (read_vu_count > 0) {
|
if (read_vu_count > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: read_vu_count,
|
vus: read_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const delete_vu_count = parseInt(__ENV.DELETERS || '0');
|
const delete_vu_count = parseInt(__ENV.DELETERS || '0');
|
||||||
if (delete_vu_count > 0) {
|
if (delete_vu_count > 0) {
|
||||||
if (!obj_to_delete_selector) {
|
if (!obj_to_delete_selector) {
|
||||||
throw new Error(
|
throw new Error('Positive DELETE worker number without a proper object selector');
|
||||||
'Positive DELETE worker number without a proper object selector');
|
}
|
||||||
}
|
|
||||||
|
|
||||||
scenarios.delete = {
|
scenarios.delete = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: delete_vu_count,
|
vus: delete_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_delete',
|
exec: 'obj_delete',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout: '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_vu_count = write_vu_count + read_vu_count + delete_vu_count;
|
const total_vu_count = write_vu_count + read_vu_count + delete_vu_count;
|
||||||
|
|
||||||
console.log(`Pregenerated containers: ${container_list.length}`);
|
console.log(`Pregenerated containers: ${container_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Reading VUs: ${read_vu_count}`);
|
console.log(`Reading VUs: ${read_vu_count}`);
|
||||||
console.log(`Writing VUs: ${write_vu_count}`);
|
console.log(`Writing VUs: ${write_vu_count}`);
|
||||||
console.log(`Deleting VUs: ${delete_vu_count}`);
|
console.log(`Deleting VUs: ${delete_vu_count}`);
|
||||||
console.log(`Total VUs: ${total_vu_count}`);
|
console.log(`Total VUs: ${total_vu_count}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json]: JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
const headers = {unique_header: uuidv4()};
|
const headers = {
|
||||||
const container =
|
unique_header: uuidv4()
|
||||||
container_list[Math.floor(Math.random() * container_list.length)];
|
};
|
||||||
|
const container = container_list[Math.floor(Math.random() * container_list.length)];
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const resp = local_client.put(container, headers, payload);
|
const resp = local_client.put(container, headers, payload);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
if (resp.abort) {
|
log.withField("cid", container).error(resp.error);
|
||||||
exec.test.abort(resp.error);
|
return;
|
||||||
}
|
}
|
||||||
log.withField('cid', container).error(resp.error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject(container, resp.object_id, '', '', payload.hash());
|
obj_registry.addObject(container, resp.object_id, "", "", hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
const resp = local_client.get(obj.container, obj.object)
|
const resp = local_client.get(obj.container, obj.object)
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({cid: obj.container, oid: obj.object}).error(resp.error);
|
log.withFields({cid: obj.container, oid: obj.object}).error(resp.error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_delete() {
|
export function obj_delete() {
|
||||||
const obj = obj_to_delete_selector.nextObject();
|
const obj = obj_to_delete_selector.nextObject();
|
||||||
if (!obj) {
|
if (!obj) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const resp = local_client.delete(obj.c_id, obj.o_id);
|
const resp = local_client.delete(obj.c_id, obj.o_id);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
// Log errors except (2052 - object already deleted)
|
// Log errors except (2052 - object already deleted)
|
||||||
log.withFields({cid: obj.c_id, oid: obj.o_id}).error(resp.error);
|
log.withFields({cid: obj.c_id, oid: obj.o_id}).error(resp.error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
obj_registry.deleteObject(obj.id);
|
obj_registry.deleteObject(obj.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,53 +1,45 @@
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from helpers.cmd import execute_cmd, log
|
from helpers.cmd import execute_cmd
|
||||||
|
|
||||||
|
|
||||||
def create_bucket(endpoint, versioning, location, acl, no_verify_ssl):
|
def create_bucket(endpoint, versioning, location):
|
||||||
if location:
|
if location:
|
||||||
location = f"--create-bucket-configuration 'LocationConstraint={location}'"
|
location = f"--create-bucket-configuration 'LocationConstraint={location}'"
|
||||||
if acl:
|
|
||||||
acl = f"--acl {acl}"
|
|
||||||
|
|
||||||
bucket_name = str(uuid.uuid4())
|
bucket_name = str(uuid.uuid4())
|
||||||
no_verify_ssl_str = "--no-verify-ssl" if no_verify_ssl else ""
|
|
||||||
cmd_line = f"aws {no_verify_ssl_str} s3api create-bucket --bucket {bucket_name} " \
|
|
||||||
f"--endpoint {endpoint} {location} {acl} "
|
|
||||||
cmd_line_ver = f"aws {no_verify_ssl_str} s3api put-bucket-versioning --bucket {bucket_name} " \
|
|
||||||
f"--versioning-configuration Status=Enabled --endpoint {endpoint} {acl} "
|
|
||||||
|
|
||||||
output, success = execute_cmd(cmd_line)
|
cmd_line = f"aws --no-verify-ssl s3api create-bucket --bucket {bucket_name} " \
|
||||||
|
f"--endpoint http://{endpoint} {location}"
|
||||||
|
cmd_line_ver = f"aws --no-verify-ssl s3api put-bucket-versioning --bucket {bucket_name} " \
|
||||||
|
f"--versioning-configuration Status=Enabled --endpoint http://{endpoint} "
|
||||||
|
|
||||||
if not success and "succeeded and you already own it" not in output:
|
out, success = execute_cmd(cmd_line)
|
||||||
log(f"{cmd_line}\n"
|
|
||||||
f"Bucket {bucket_name} has not been created:\n"
|
if not success and "succeeded and you already own it" not in out:
|
||||||
f"Error: {output}", endpoint)
|
print(f" > Bucket {bucket_name} has not been created:\n{out}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if versioning == "True":
|
print(f"cmd: {cmd_line}")
|
||||||
output, success = execute_cmd(cmd_line_ver)
|
|
||||||
if not success:
|
if versioning == "True":
|
||||||
log(f"{cmd_line_ver}\n"
|
out, success = execute_cmd(cmd_line_ver)
|
||||||
f"Bucket versioning has not been applied for bucket {bucket_name}\n"
|
if not success:
|
||||||
f"Error: {output}", endpoint)
|
print(f" > Bucket versioning has not been applied for bucket {bucket_name}:\n{out}")
|
||||||
else:
|
else:
|
||||||
log(f"Bucket versioning has been applied for bucket {bucket_name}", endpoint)
|
print(f" > Bucket versioning has been applied.")
|
||||||
|
|
||||||
log(f"Created bucket: {bucket_name}", endpoint)
|
|
||||||
return bucket_name
|
return bucket_name
|
||||||
|
|
||||||
|
|
||||||
def upload_object(bucket, payload_filepath, endpoint, no_verify_ssl):
|
def upload_object(bucket, payload_filepath, endpoint):
|
||||||
object_name = str(uuid.uuid4())
|
object_name = str(uuid.uuid4())
|
||||||
no_verify_ssl_str = "--no-verify-ssl" if no_verify_ssl else ""
|
|
||||||
cmd_line = f"aws {no_verify_ssl_str} s3api put-object --bucket {bucket} --key {object_name} " \
|
cmd_line = f"aws --no-verify-ssl s3api put-object --bucket {bucket} --key {object_name} " \
|
||||||
f"--body {payload_filepath} --endpoint {endpoint}"
|
f"--body {payload_filepath} --endpoint http://{endpoint}"
|
||||||
output, success = execute_cmd(cmd_line)
|
out, success = execute_cmd(cmd_line)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
log(f"{cmd_line}\n"
|
print(f" > Object {object_name} has not been uploaded.")
|
||||||
f"Object {object_name} has not been uploaded\n"
|
|
||||||
f"Error: {output}", endpoint)
|
|
||||||
return False
|
return False
|
||||||
|
else:
|
||||||
return bucket, endpoint, object_name
|
return object_name
|
||||||
|
|
|
@ -1,12 +1,9 @@
|
||||||
import os
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
|
||||||
from subprocess import check_output, CalledProcessError, STDOUT
|
from subprocess import check_output, CalledProcessError, STDOUT
|
||||||
|
|
||||||
def log(message, endpoint):
|
|
||||||
time = datetime.utcnow()
|
|
||||||
print(f"{time} at {endpoint}: {message}")
|
|
||||||
|
|
||||||
def execute_cmd(cmd_line):
|
def execute_cmd(cmd_line):
|
||||||
cmd_args = shlex.split(cmd_line)
|
cmd_args = shlex.split(cmd_line)
|
||||||
|
|
|
@ -1,163 +1,81 @@
|
||||||
import re
|
import re
|
||||||
from helpers.cmd import execute_cmd, log
|
|
||||||
|
|
||||||
def create_container(endpoint, policy, wallet_path, config, acl, local=False, depth=0):
|
from helpers.cmd import execute_cmd
|
||||||
if depth > 20:
|
|
||||||
raise ValueError(f"unable to create container: too many unsuccessful attempts")
|
|
||||||
|
|
||||||
if wallet_path:
|
|
||||||
wallet_file = f"--wallet {wallet_path}"
|
def create_container(endpoint, policy, wallet_file, wallet_config):
|
||||||
if config:
|
cmd_line = f"frostfs-cli --rpc-endpoint {endpoint} container create --wallet {wallet_file} --config {wallet_config} " \
|
||||||
wallet_config = f"--config {config}"
|
f" --policy '{policy}' --basic-acl public-read-write --await"
|
||||||
if acl:
|
|
||||||
acl_param = f"--basic-acl {acl}"
|
|
||||||
cmd_line = f"frostfs-cli --rpc-endpoint {endpoint} container create {wallet_file} {wallet_config} " \
|
|
||||||
f" --policy '{policy}' {acl_param} --await"
|
|
||||||
|
|
||||||
output, success = execute_cmd(cmd_line)
|
output, success = execute_cmd(cmd_line)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
log(f"{cmd_line}\n"
|
print(f" > Container has not been created:\n{output}")
|
||||||
f"Container has not been created\n"
|
|
||||||
f"{output}", endpoint)
|
|
||||||
return False
|
return False
|
||||||
|
else:
|
||||||
try:
|
|
||||||
fst_str = output.split('\n')[0]
|
|
||||||
except Exception:
|
|
||||||
log(f"{cmd_line}\n"
|
|
||||||
f"Incorrect output\n"
|
|
||||||
f"Output: {output or '<empty>'}", endpoint)
|
|
||||||
return False
|
|
||||||
splitted = fst_str.split(": ")
|
|
||||||
if len(splitted) != 2:
|
|
||||||
raise ValueError(f"no CID was parsed from command output:\t{fst_str}")
|
|
||||||
cid = splitted[1]
|
|
||||||
|
|
||||||
log(f"Created container {cid}", endpoint)
|
|
||||||
|
|
||||||
if not local:
|
|
||||||
return cid
|
|
||||||
|
|
||||||
cmd_line = f"frostfs-cli netmap nodeinfo --rpc-endpoint {endpoint} {wallet_file} {wallet_config}"
|
|
||||||
output, success = execute_cmd(cmd_line)
|
|
||||||
|
|
||||||
if not success:
|
|
||||||
log(f"{cmd_line}\n"
|
|
||||||
f"Failed to get nodeinfo\n"
|
|
||||||
f"{output}", endpoint)
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
fst_str = output.split('\n')[0]
|
|
||||||
except Exception:
|
|
||||||
log(f"{cmd_line}\n"
|
|
||||||
f"Incorrect output\n"
|
|
||||||
f"Output: {output or '<empty>'}", endpoint)
|
|
||||||
return False
|
|
||||||
splitted = fst_str.split(": ")
|
|
||||||
if len(splitted) != 2 or len(splitted[1]) == 0:
|
|
||||||
raise ValueError(f"no node key was parsed from command output:\t{fst_str}")
|
|
||||||
|
|
||||||
node_key = splitted[1]
|
|
||||||
|
|
||||||
cmd_line = f"frostfs-cli container nodes --rpc-endpoint {endpoint} {wallet_file} {wallet_config} --cid {cid}"
|
|
||||||
output, success = execute_cmd(cmd_line)
|
|
||||||
|
|
||||||
if not success:
|
|
||||||
log(f"{cmd_line}\n"
|
|
||||||
f"Failed to get container nodes\n"
|
|
||||||
f"{output}", endpoint)
|
|
||||||
return False
|
|
||||||
|
|
||||||
for output_str in output.split('\n'):
|
|
||||||
output_str = output_str.lstrip().rstrip()
|
|
||||||
if not output_str.startswith("Node "):
|
|
||||||
continue
|
|
||||||
splitted = output_str.split(": ")
|
|
||||||
if len(splitted) != 2 or len(splitted[1]) == 0:
|
|
||||||
continue
|
|
||||||
try:
|
try:
|
||||||
k = splitted[1].split(" ")[0]
|
fst_str = output.split('\n')[0]
|
||||||
except Exception:
|
except Exception:
|
||||||
log(f"{cmd_line}\n"
|
print(f"Got empty output: {output}")
|
||||||
f"Incorrect output\n"
|
return False
|
||||||
f"Output: {output or '<empty>'}", endpoint)
|
splitted = fst_str.split(": ")
|
||||||
continue
|
if len(splitted) != 2:
|
||||||
if k == node_key:
|
raise ValueError(f"no CID was parsed from command output: \t{fst_str}")
|
||||||
return cid
|
|
||||||
|
|
||||||
log(f"Created container {cid} is not stored on {endpoint}, creating another one...", endpoint)
|
print(f"Created container: {splitted[1]}")
|
||||||
return create_container(endpoint, policy, wallet_path, config, acl, local, depth + 1)
|
|
||||||
|
return splitted[1]
|
||||||
|
|
||||||
|
|
||||||
def upload_object(container, payload_filepath, endpoint, wallet_file, wallet_config):
|
def upload_object(container, payload_filepath, endpoint, wallet_file, wallet_config):
|
||||||
object_name = ""
|
object_name = ""
|
||||||
if wallet_file:
|
cmd_line = f"frostfs-cli --rpc-endpoint {endpoint} object put --file {payload_filepath} --wallet {wallet_file} --config {wallet_config} " \
|
||||||
wallet_file = "--wallet " + wallet_file
|
|
||||||
if wallet_config:
|
|
||||||
wallet_config = "--config " + wallet_config
|
|
||||||
cmd_line = f"frostfs-cli --rpc-endpoint {endpoint} object put --file {payload_filepath} {wallet_file} {wallet_config} " \
|
|
||||||
f"--cid {container} --no-progress"
|
f"--cid {container} --no-progress"
|
||||||
output, success = execute_cmd(cmd_line)
|
output, success = execute_cmd(cmd_line)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
log(f"{cmd_line}\n"
|
print(f" > Object {object_name} has not been uploaded:\n{output}")
|
||||||
f"Object {object_name} has not been uploaded\n"
|
|
||||||
f"Error: {output}", endpoint)
|
|
||||||
return False
|
return False
|
||||||
|
else:
|
||||||
try:
|
try:
|
||||||
# taking second string from command output
|
# taking second string from command output
|
||||||
snd_str = output.split('\n')[1]
|
snd_str = output.split('\n')[1]
|
||||||
except Exception:
|
except Exception:
|
||||||
log(f"{cmd_line}\n"
|
print(f"Got empty input: {output}")
|
||||||
f"Incorrect output\n"
|
return False
|
||||||
f"Output: {output or '<empty>'}", endpoint)
|
splitted = snd_str.split(": ")
|
||||||
return False
|
if len(splitted) != 2:
|
||||||
splitted = snd_str.split(": ")
|
raise Exception(f"no OID was parsed from command output: \t{snd_str}")
|
||||||
if len(splitted) != 2:
|
return splitted[1]
|
||||||
raise Exception(f"no OID was parsed from command output: \t{snd_str}")
|
|
||||||
return container, endpoint, splitted[1]
|
|
||||||
|
|
||||||
|
|
||||||
def get_object(cid, oid, endpoint, out_filepath, wallet_file, wallet_config):
|
def get_object(cid, oid, endpoint, out_filepath, wallet_file, wallet_config):
|
||||||
if wallet_file:
|
cmd_line = f"frostfs-cli object get -r {endpoint} --cid {cid} --oid {oid} --wallet {wallet_file} --config {wallet_config} " \
|
||||||
wallet_file = "--wallet " + wallet_file
|
|
||||||
if wallet_config:
|
|
||||||
wallet_config = "--config " + wallet_config
|
|
||||||
cmd_line = f"frostfs-cli object get -r {endpoint} --cid {cid} --oid {oid} {wallet_file} {wallet_config} " \
|
|
||||||
f"--file {out_filepath}"
|
f"--file {out_filepath}"
|
||||||
|
|
||||||
output, success = execute_cmd(cmd_line)
|
output, success = execute_cmd(cmd_line)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
log(f"{cmd_line}\n"
|
print(f" > Failed to get object {output} from container {cid} \r\n"
|
||||||
f"Failed to get object {oid} from container {cid}\n"
|
f" > Error: {output}")
|
||||||
f"Error: {output}", endpoint)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def search_object_by_id(cid, oid, endpoint, wallet_file, wallet_config, ttl=2):
|
def search_object_by_id(cid, oid, endpoint, wallet_file, wallet_config, ttl=2):
|
||||||
if wallet_file:
|
cmd_line = f"frostfs-cli object search --ttl {ttl} -r {endpoint} --cid {cid} --oid {oid} --wallet {wallet_file} --config {wallet_config} "
|
||||||
wallet_file = "--wallet " + wallet_file
|
|
||||||
if wallet_config:
|
|
||||||
wallet_config = "--config " + wallet_config
|
|
||||||
cmd_line = f"frostfs-cli object search --ttl {ttl} -r {endpoint} --cid {cid} --oid {oid} {wallet_file} {wallet_config} "
|
|
||||||
|
|
||||||
output, success = execute_cmd(cmd_line)
|
output, success = execute_cmd(cmd_line)
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
log(f"{cmd_line}\n"
|
print(f" > Failed to search object {oid} for container {cid} \r\n"
|
||||||
f"Failed to search object {oid} for container {cid}\n"
|
f" > Error: {output}")
|
||||||
f"Error: {output}", endpoint)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
re_rst = re.search(r'Found (\d+) objects', output)
|
re_rst = re.search(r'Found (\d+) objects', output)
|
||||||
|
|
||||||
if not re_rst:
|
if not re_rst:
|
||||||
raise Exception("Failed to parse search results")
|
raise Exception("Failed to parce search results")
|
||||||
|
|
||||||
return re_rst.group(1)
|
return re_rst.group(1)
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from itertools import cycle
|
|
||||||
import json
|
import json
|
||||||
|
import random
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
|
||||||
|
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
from concurrent.futures import ProcessPoolExecutor
|
from concurrent.futures import ProcessPoolExecutor
|
||||||
|
|
||||||
from helpers.cmd import random_payload
|
from helpers.cmd import random_payload
|
||||||
from helpers.frostfs_cli import create_container, upload_object
|
from helpers.frostfs_cli import create_container, upload_object
|
||||||
|
|
||||||
|
@ -28,21 +28,17 @@ parser.add_argument(
|
||||||
help="Container placement policy",
|
help="Container placement policy",
|
||||||
default="REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
|
default="REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
|
||||||
)
|
)
|
||||||
parser.add_argument('--endpoint', help='Nodes addresses separated by comma.')
|
parser.add_argument('--endpoint', help='Node address')
|
||||||
parser.add_argument('--update', help='Save existed containers')
|
parser.add_argument('--update', help='Save existed containers')
|
||||||
parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true')
|
parser.add_argument('--ignore-errors', help='Ignore preset errors')
|
||||||
parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50)
|
parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50)
|
||||||
parser.add_argument('--sleep', help='Time to sleep between containers creation and objects upload (in seconds), '
|
|
||||||
'Default = 8', default=8)
|
|
||||||
parser.add_argument('--local', help='Create containers that store data on provided endpoints. Warning: additional empty containers may be created.', action='store_true')
|
|
||||||
parser.add_argument('--acl', help='Container ACL. Default is public-read-write.', default='public-read-write')
|
|
||||||
|
|
||||||
args: Namespace = parser.parse_args()
|
args: Namespace = parser.parse_args()
|
||||||
print(args)
|
print(args)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
containers = []
|
container_list = []
|
||||||
objects_list = []
|
objects_list = []
|
||||||
|
|
||||||
endpoints = args.endpoint.split(',')
|
endpoints = args.endpoint.split(',')
|
||||||
|
@ -52,73 +48,63 @@ def main():
|
||||||
workers = int(args.workers)
|
workers = int(args.workers)
|
||||||
objects_per_container = int(args.preload_obj)
|
objects_per_container = int(args.preload_obj)
|
||||||
|
|
||||||
ignore_errors = args.ignore_errors
|
ignore_errors = True if args.ignore_errors else False
|
||||||
if args.update:
|
if args.update:
|
||||||
# Open file
|
# Open file
|
||||||
with open(args.out) as f:
|
with open(args.out) as f:
|
||||||
data_json = json.load(f)
|
data_json = json.load(f)
|
||||||
containers = data_json['containers']
|
container_list = data_json['containers']
|
||||||
containers_count = len(containers)
|
containers_count = len(container_list)
|
||||||
else:
|
else:
|
||||||
containers_count = int(args.containers)
|
containers_count = int(args.containers)
|
||||||
print(f"Create containers: {containers_count}")
|
print(f"Create containers: {containers_count}")
|
||||||
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
||||||
containers_runs = [executor.submit(create_container, endpoint, args.policy, wallet, wallet_config, args.acl, args.local)
|
containers_runs = {executor.submit(create_container, endpoints[random.randrange(len(endpoints))],
|
||||||
for _, endpoint in
|
args.policy, wallet, wallet_config): _ for _ in range(containers_count)}
|
||||||
zip(range(containers_count), cycle(endpoints))]
|
|
||||||
|
|
||||||
for run in containers_runs:
|
for run in containers_runs:
|
||||||
container_id = run.result()
|
if run.result():
|
||||||
if container_id:
|
container_list.append(run.result())
|
||||||
containers.append(container_id)
|
|
||||||
|
|
||||||
print("Create containers: Completed")
|
print("Create containers: Completed")
|
||||||
|
|
||||||
print(f" > Containers: {containers}")
|
print(f" > Containers: {container_list}")
|
||||||
if containers_count > 0 and len(containers) != containers_count:
|
if containers_count == 0 or len(container_list) != containers_count:
|
||||||
print(f"Containers mismatch in preset: expected {containers_count}, created {len(containers)}")
|
print(f"Containers mismatch in preset: expected {containers_count}, created {len(container_list)}")
|
||||||
if not ignore_errors:
|
if not ignore_errors:
|
||||||
sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
|
sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
|
||||||
|
|
||||||
if args.sleep != 0:
|
|
||||||
print(f"Sleep for {args.sleep} seconds")
|
|
||||||
time.sleep(args.sleep)
|
|
||||||
|
|
||||||
print(f"Upload objects to each container: {args.preload_obj} ")
|
print(f"Upload objects to each container: {args.preload_obj} ")
|
||||||
payload_file = tempfile.NamedTemporaryFile()
|
payload_file = tempfile.NamedTemporaryFile()
|
||||||
random_payload(payload_file, args.size)
|
random_payload(payload_file, args.size)
|
||||||
print(" > Create random payload: Completed")
|
print(" > Create random payload: Completed")
|
||||||
|
|
||||||
total_objects = objects_per_container * containers_count
|
for container in container_list:
|
||||||
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
print(f" > Upload objects for container {container}")
|
||||||
objects_runs = [executor.submit(upload_object, container, payload_file.name,
|
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
||||||
endpoint, wallet, wallet_config)
|
objects_runs = {executor.submit(upload_object, container, payload_file.name,
|
||||||
for _, container, endpoint in
|
endpoints[random.randrange(len(endpoints))], wallet, wallet_config): _ for _ in range(objects_per_container)}
|
||||||
zip(range(total_objects), cycle(containers), cycle(endpoints))]
|
|
||||||
|
|
||||||
for run in objects_runs:
|
for run in objects_runs:
|
||||||
result = run.result()
|
if run.result():
|
||||||
if result:
|
objects_list.append({'container': container, 'object': run.result()})
|
||||||
container_id = result[0]
|
print(f" > Upload objects for container {container}: Completed")
|
||||||
endpoint = result[1]
|
|
||||||
object_id = result[2]
|
|
||||||
objects_list.append({'container': container_id, 'object': object_id})
|
|
||||||
print(f" > Uploaded object {object_id} for container {container_id} via endpoint {endpoint}.")
|
|
||||||
|
|
||||||
print("Upload objects to each container: Completed")
|
print("Upload objects to each container: Completed")
|
||||||
|
|
||||||
|
total_objects = objects_per_container * containers_count
|
||||||
if total_objects > 0 and len(objects_list) != total_objects:
|
if total_objects > 0 and len(objects_list) != total_objects:
|
||||||
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
|
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
|
||||||
if not ignore_errors:
|
if not ignore_errors:
|
||||||
sys.exit(ERROR_WRONG_OBJECTS_COUNT)
|
sys.exit(ERROR_WRONG_OBJECTS_COUNT)
|
||||||
|
|
||||||
data = {'containers': containers, 'objects': objects_list, 'obj_size': args.size + " Kb"}
|
data = {'containers': container_list, 'objects': objects_list, 'obj_size': args.size + " Kb"}
|
||||||
|
|
||||||
with open(args.out, 'w+') as f:
|
with open(args.out, 'w+') as f:
|
||||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
print("Result:")
|
print("Result:")
|
||||||
print(f" > Total Containers has been created: {len(containers)}.")
|
print(f" > Total Containers has been created: {len(container_list)}.")
|
||||||
print(f" > Total Objects has been created: {len(objects_list)}.")
|
print(f" > Total Objects has been created: {len(objects_list)}.")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
from itertools import cycle
|
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
|
||||||
from concurrent.futures import ProcessPoolExecutor
|
from concurrent.futures import ProcessPoolExecutor
|
||||||
|
|
||||||
from helpers.cmd import random_payload
|
from helpers.cmd import random_payload
|
||||||
|
@ -17,17 +15,13 @@ parser.add_argument('--size', help='Upload objects size in kb.')
|
||||||
parser.add_argument('--buckets', help='Number of buckets to create.')
|
parser.add_argument('--buckets', help='Number of buckets to create.')
|
||||||
parser.add_argument('--out', help='JSON file with output.')
|
parser.add_argument('--out', help='JSON file with output.')
|
||||||
parser.add_argument('--preload_obj', help='Number of pre-loaded objects.')
|
parser.add_argument('--preload_obj', help='Number of pre-loaded objects.')
|
||||||
parser.add_argument('--endpoint', help='S3 Gateways addresses separated by comma.')
|
parser.add_argument('--endpoint', help='S3 Gateway address.')
|
||||||
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). '
|
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). '
|
||||||
'New buckets will not be created.')
|
'New buckets will not be created.')
|
||||||
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.', default="")
|
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.', default="")
|
||||||
parser.add_argument('--versioning', help='True/False, False by default.')
|
parser.add_argument('--versioning', help='True/False, False by default.')
|
||||||
parser.add_argument('--ignore-errors', help='Ignore preset errors', action='store_true')
|
parser.add_argument('--ignore-errors', help='Ignore preset errors')
|
||||||
parser.add_argument('--no-verify-ssl', help='Ignore SSL verifications', action='store_true')
|
|
||||||
parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50)
|
parser.add_argument('--workers', help='Count of workers in preset. Max = 50, Default = 50', default=50)
|
||||||
parser.add_argument('--sleep', help='Time to sleep between buckets creation and objects upload (in seconds), '
|
|
||||||
'Default = 8', default=8)
|
|
||||||
parser.add_argument('--acl', help='Bucket ACL. Default is private. Expected values are: private, public-read or public-read-write.', default="private")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
print(args)
|
print(args)
|
||||||
|
@ -37,12 +31,9 @@ ERROR_WRONG_OBJECTS_COUNT = 2
|
||||||
MAX_WORKERS = 50
|
MAX_WORKERS = 50
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
buckets = []
|
bucket_list = []
|
||||||
objects_list = []
|
objects_list = []
|
||||||
ignore_errors = args.ignore_errors
|
ignore_errors = True if args.ignore_errors else False
|
||||||
no_verify_ssl = args.no_verify_ssl
|
|
||||||
|
|
||||||
endpoints = args.endpoint.split(',')
|
|
||||||
|
|
||||||
workers = int(args.workers)
|
workers = int(args.workers)
|
||||||
objects_per_bucket = int(args.preload_obj)
|
objects_per_bucket = int(args.preload_obj)
|
||||||
|
@ -51,68 +42,60 @@ def main():
|
||||||
# Open file
|
# Open file
|
||||||
with open(args.out) as f:
|
with open(args.out) as f:
|
||||||
data_json = json.load(f)
|
data_json = json.load(f)
|
||||||
buckets = data_json['buckets']
|
bucket_list = data_json['buckets']
|
||||||
buckets_count = len(buckets)
|
buckets_count = len(bucket_list)
|
||||||
# Get CID list
|
# Get CID list
|
||||||
else:
|
else:
|
||||||
buckets_count = int(args.buckets)
|
buckets_count = int(args.buckets)
|
||||||
print(f"Create buckets: {buckets_count}")
|
print(f"Create buckets: {buckets_count}")
|
||||||
|
|
||||||
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
||||||
buckets_runs = [executor.submit(create_bucket, endpoint, args.versioning, args.location, args.acl, no_verify_ssl)
|
buckets_runs = {executor.submit(create_bucket, args.endpoint, args.versioning,
|
||||||
for _, endpoint in
|
args.location): _ for _ in range(buckets_count)}
|
||||||
zip(range(buckets_count), cycle(endpoints))]
|
|
||||||
|
|
||||||
for run in buckets_runs:
|
for run in buckets_runs:
|
||||||
bucket_name = run.result()
|
if run.result():
|
||||||
if bucket_name:
|
bucket_list.append(run.result())
|
||||||
buckets.append(bucket_name)
|
|
||||||
|
|
||||||
print("Create buckets: Completed")
|
print("Create buckets: Completed")
|
||||||
|
|
||||||
print(f" > Buckets: {buckets}")
|
print(f" > Buckets: {bucket_list}")
|
||||||
if buckets_count > 0 and len(buckets) != buckets_count:
|
if buckets_count == 0 or len(bucket_list) != buckets_count:
|
||||||
print(f"Buckets mismatch in preset: expected {buckets_count}, created {len(buckets)}")
|
print(f"Buckets mismatch in preset: expected {buckets_count}, created {len(bucket_list)}")
|
||||||
if not ignore_errors:
|
if not ignore_errors:
|
||||||
sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
|
sys.exit(ERROR_WRONG_CONTAINERS_COUNT)
|
||||||
|
|
||||||
if args.sleep != 0:
|
|
||||||
print(f"Sleep for {args.sleep} seconds")
|
|
||||||
time.sleep(args.sleep)
|
|
||||||
|
|
||||||
print(f"Upload objects to each bucket: {objects_per_bucket} ")
|
print(f"Upload objects to each bucket: {objects_per_bucket} ")
|
||||||
payload_file = tempfile.NamedTemporaryFile()
|
payload_file = tempfile.NamedTemporaryFile()
|
||||||
random_payload(payload_file, args.size)
|
random_payload(payload_file, args.size)
|
||||||
print(" > Create random payload: Completed")
|
print(" > Create random payload: Completed")
|
||||||
|
|
||||||
|
for bucket in bucket_list:
|
||||||
|
print(f" > Upload objects for bucket {bucket}")
|
||||||
|
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
||||||
|
objects_runs = {executor.submit(upload_object, bucket, payload_file.name,
|
||||||
|
args.endpoint): _ for _ in range(objects_per_bucket)}
|
||||||
|
|
||||||
|
for run in objects_runs:
|
||||||
|
if run.result():
|
||||||
|
objects_list.append({'bucket': bucket, 'object': run.result()})
|
||||||
|
print(f" > Upload objects for bucket {bucket}: Completed")
|
||||||
|
|
||||||
|
print("Upload objects to each bucket: Completed")
|
||||||
|
|
||||||
total_objects = objects_per_bucket * buckets_count
|
total_objects = objects_per_bucket * buckets_count
|
||||||
|
|
||||||
with ProcessPoolExecutor(max_workers=min(MAX_WORKERS, workers)) as executor:
|
|
||||||
objects_runs = [executor.submit(upload_object, bucket, payload_file.name, endpoint, no_verify_ssl)
|
|
||||||
for _, bucket, endpoint in
|
|
||||||
zip(range(total_objects), cycle(buckets), cycle(endpoints))]
|
|
||||||
|
|
||||||
for run in objects_runs:
|
|
||||||
result = run.result()
|
|
||||||
if result:
|
|
||||||
bucket = result[0]
|
|
||||||
endpoint = result[1]
|
|
||||||
object_id = result[2]
|
|
||||||
objects_list.append({'bucket': bucket, 'object': object_id})
|
|
||||||
print(f" > Uploaded object {object_id} for bucket {bucket} via endpoint {endpoint}.")
|
|
||||||
|
|
||||||
if total_objects > 0 and len(objects_list) != total_objects:
|
if total_objects > 0 and len(objects_list) != total_objects:
|
||||||
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
|
print(f"Objects mismatch in preset: expected {total_objects}, created {len(objects_list)}")
|
||||||
if not ignore_errors:
|
if not ignore_errors:
|
||||||
sys.exit(ERROR_WRONG_OBJECTS_COUNT)
|
sys.exit(ERROR_WRONG_OBJECTS_COUNT)
|
||||||
|
|
||||||
data = {'buckets': buckets, 'objects': objects_list, 'obj_size': args.size + " Kb"}
|
data = {'buckets': bucket_list, 'objects': objects_list, 'obj_size': args.size + " Kb"}
|
||||||
|
|
||||||
with open(args.out, 'w+') as f:
|
with open(args.out, 'w+') as f:
|
||||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||||
|
|
||||||
print("Result:")
|
print("Result:")
|
||||||
print(f" > Total Buckets has been created: {len(buckets)}.")
|
print(f" > Total Buckets has been created: {len(bucket_list)}.")
|
||||||
print(f" > Total Objects has been created: {len(objects_list)}.")
|
print(f" > Total Objects has been created: {len(objects_list)}.")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,7 @@
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import http.client
|
import requests
|
||||||
import ssl
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('--endpoint', help='Endpoint of the S3 gateway')
|
parser.add_argument('--endpoint', help='Endpoint of the S3 gateway')
|
||||||
|
@ -17,13 +16,10 @@ def main():
|
||||||
|
|
||||||
preset = json.loads(preset_text)
|
preset = json.loads(preset_text)
|
||||||
|
|
||||||
conn = http.client.HTTPSConnection(args.endpoint, context = ssl._create_unverified_context())
|
|
||||||
containers = []
|
containers = []
|
||||||
for bucket in preset.get('buckets'):
|
for bucket in preset.get('buckets'):
|
||||||
conn.request("HEAD", f'/{bucket}')
|
resp = requests.head(f'{args.endpoint}/{bucket}', verify=False)
|
||||||
response = conn.getresponse()
|
containers.append(resp.headers['X-Container-Id'])
|
||||||
containers.append(response.getheader('X-Container-Id'))
|
|
||||||
response.read()
|
|
||||||
|
|
||||||
preset['containers'] = containers
|
preset['containers'] = containers
|
||||||
with open(args.preset_file, 'w+') as f:
|
with open(args.preset_file, 'w+') as f:
|
||||||
|
|
|
@ -19,8 +19,6 @@ Scenarios `grpc.js`, `local.js`, `http.js` and `s3.js` support the following opt
|
||||||
* `SLEEP_READ` - time interval (in seconds) between reading VU iterations.
|
* `SLEEP_READ` - time interval (in seconds) between reading VU iterations.
|
||||||
* `SELECTION_SIZE` - size of batch to select for deletion (default: 1000).
|
* `SELECTION_SIZE` - size of batch to select for deletion (default: 1000).
|
||||||
* `PAYLOAD_TYPE` - type of an object payload ("random" or "text", default: "random").
|
* `PAYLOAD_TYPE` - type of an object payload ("random" or "text", default: "random").
|
||||||
* `STREAMING` - if set, the payload is generated on the fly and is not read into memory fully.
|
|
||||||
* `METRIC_TAGS` - custom metrics tags (format `tag1:value1;tag2:value2`).
|
|
||||||
|
|
||||||
Additionally, the profiling extension can be enabled to generate CPU and memory profiles which can be inspected with `go tool pprof file.prof`:
|
Additionally, the profiling extension can be enabled to generate CPU and memory profiles which can be inspected with `go tool pprof file.prof`:
|
||||||
```shell
|
```shell
|
||||||
|
@ -71,15 +69,13 @@ $ ./scenarios/preset/preset_grpc.py --size 1024 --containers 1 --out grpc.json -
|
||||||
2. Execute scenario with options:
|
2. Execute scenario with options:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
$ ./k6 run -e DURATION=60 -e WRITE_OBJ_SIZE=8192 -e READERS=20 -e WRITERS=20 -e DELETERS=30 -e DELETE_AGE=10 -e REGISTRY_FILE=registry.bolt -e CONFIG_FILE=/path/to/config.yaml -e CONFIG_DIR=/path/to/dir/ -e PREGEN_JSON=./grpc.json scenarios/local.js
|
$ ./k6 run -e DURATION=60 -e WRITE_OBJ_SIZE=8192 -e READERS=20 -e WRITERS=20 -e DELETERS=30 -e DELETE_AGE=10 -e REGISTRY_FILE=registry.bolt -e CONFIG_FILE=/path/to/config.yaml -e PREGEN_JSON=./grpc.json scenarios/local.js
|
||||||
```
|
```
|
||||||
|
|
||||||
Options (in addition to the common options):
|
Options (in addition to the common options):
|
||||||
* `CONFIG_FILE` - path to the local configuration file used for the storage node. Only the storage configuration section is used.
|
* `CONFIG_FILE` - path to the local configuration file used for the storage node. Only the storage configuration section is used.
|
||||||
* `CONFIG_DIR` - path to the folder with local configuration files used for the storage node.
|
|
||||||
* `DELETERS` - number of VUs performing delete operations (using deleters requires that options `DELETE_AGE` and `REGISTRY_FILE` are specified as well).
|
* `DELETERS` - number of VUs performing delete operations (using deleters requires that options `DELETE_AGE` and `REGISTRY_FILE` are specified as well).
|
||||||
* `DELETE_AGE` - age of object in seconds before which it can not be deleted. This parameter can be used to control how many objects we have in the system under load.
|
* `DELETE_AGE` - age of object in seconds before which it can not be deleted. This parameter can be used to control how many objects we have in the system under load.
|
||||||
* `MAX_TOTAL_SIZE_GB` - if specified, max payload size in GB of the storage engine. If the storage engine is already full, no new objects will be saved.
|
|
||||||
|
|
||||||
## HTTP
|
## HTTP
|
||||||
|
|
||||||
|
@ -139,31 +135,6 @@ Options (in addition to the common options):
|
||||||
* `SLEEP_DELETE` - time interval (in seconds) between deleting VU iterations.
|
* `SLEEP_DELETE` - time interval (in seconds) between deleting VU iterations.
|
||||||
* `OBJ_NAME` - if specified, this name will be used for all write operations instead of random generation.
|
* `OBJ_NAME` - if specified, this name will be used for all write operations instead of random generation.
|
||||||
|
|
||||||
## S3 Multipart
|
|
||||||
|
|
||||||
Perform multipart upload operation, break up large objects, so they can be transferred in multiple parts, in parallel
|
|
||||||
|
|
||||||
```shell
|
|
||||||
$ ./k6 run -e DURATION=600 \
|
|
||||||
-e WRITERS=400 -e WRITERS_MULTIPART=10 \
|
|
||||||
-e WRITE_OBJ_SIZE=524288 -e WRITE_OBJ_PART_SIZE=10240 \
|
|
||||||
-e S3_ENDPOINTS=10.78.70.142:8084,10.78.70.143:8084,10.78.70.144:8084,10.78.70.145:8084 \
|
|
||||||
-e PREGEN_JSON=/home/service/s3_4kb.json \
|
|
||||||
scenarios/s3_multipart.js
|
|
||||||
```
|
|
||||||
|
|
||||||
Options:
|
|
||||||
* `DURATION` - duration of scenario in seconds.
|
|
||||||
* `REGISTRY_FILE` - if set, all produced objects will be stored in database for subsequent verification. Database file name will be set to the value of `REGISTRY_FILE`.
|
|
||||||
* `PREGEN_JSON` - path to json file with pre-generated containers.
|
|
||||||
* `SLEEP_WRITE` - time interval (in seconds) between writing VU iterations.
|
|
||||||
* `PAYLOAD_TYPE` - type of an object payload ("random" or "text", default: "random").
|
|
||||||
* `S3_ENDPOINTS` - - endpoints of S3 gateways in format `host:port`. To specify multiple endpoints separate them by comma.
|
|
||||||
* `WRITERS` - number of VUs performing upload payload operation
|
|
||||||
* `WRITERS_MULTIPART` - number of goroutines that will upload parts in parallel
|
|
||||||
* `WRITE_OBJ_SIZE` - object size in kb for write(PUT) operations.
|
|
||||||
* `WRITE_OBJ_PART_SIZE` - part size in kb for multipart upload operations (must be greater or equal 5mb).
|
|
||||||
|
|
||||||
## S3 Local
|
## S3 Local
|
||||||
|
|
||||||
1. Follow steps 1. and 2. from the normal S3 scenario in order to obtain credentials and a preset file with the information about the buckets and objects that were pre-created.
|
1. Follow steps 1. and 2. from the normal S3 scenario in order to obtain credentials and a preset file with the information about the buckets and objects that were pre-created.
|
||||||
|
@ -179,50 +150,13 @@ After this, the `pregen.json` file will contain a `containers` list field the sa
|
||||||
|
|
||||||
3. Execute the scenario with the desired options. For example:
|
3. Execute the scenario with the desired options. For example:
|
||||||
```shell
|
```shell
|
||||||
$ ./k6 run -e DURATION=60 -e WRITE_OBJ_SIZE=8192 -e READERS=20 -e WRITERS=20 -e CONFIG_FILE=/path/to/node/config.yml -e CONFIG_DIR=/path/to/dir/ -e PREGEN_JSON=pregen.json scenarios/s3local.js
|
$ ./k6 run -e DURATION=60 -e WRITE_OBJ_SIZE=8192 -e READERS=20 -e WRITERS=20 -e CONFIG_FILE=/path/to/node/config.yml -e PREGEN_JSON=pregen.json scenarios/s3local.js
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the `s3local` scenario currently does not support deleters.
|
Note that the `s3local` scenario currently does not support deleters.
|
||||||
|
|
||||||
Options (in addition to the common options):
|
Options (in addition to the common options):
|
||||||
* `OBJ_NAME` - if specified, this name will be used for all write operations instead of random generation.
|
* `OBJ_NAME` - if specified, this name will be used for all write operations instead of random generation.
|
||||||
* `MAX_TOTAL_SIZE_GB` - if specified, max payload size in GB of the storage engine. If the storage engine is already full, no new objects will be saved.
|
|
||||||
|
|
||||||
## Export metrics
|
|
||||||
|
|
||||||
To export metrics to Prometheus (also Grafana and Victoria Metrics support Prometheus format), you need to run `k6` with an option `-o experimental-prometheus-rw` and
|
|
||||||
an environment variable `K6_PROMETHEUS_RW_SERVER_URL` whose value corresponds to the URL for the remote write endpoint.
|
|
||||||
To specify percentiles for trend metrics, use an environment variable `K6_PROMETHEUS_RW_TREND_STATS`.
|
|
||||||
See [k6 docs](https://k6.io/docs/results-output/real-time/prometheus-remote-write/) for a list of all possible options.
|
|
||||||
To distinct metrics from different loaders, use an option `METRIC_TAGS`. These tags does not apply to builtin `k6` metrics.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```bash
|
|
||||||
K6_PROMETHEUS_RW_SERVER_URL=http://host:8428/api/v1/write \
|
|
||||||
K6_PROMETHEUS_RW_TREND_STATS="p(95),p(99),min,max" \
|
|
||||||
./k6 run ... -o experimental-prometheus-rw -e METRIC_TAGS="instance:server1;run:run1" scenario.js
|
|
||||||
```
|
|
||||||
|
|
||||||
## Grafana annotations
|
|
||||||
|
|
||||||
There is no option to export Grafana annotaions, but it can be easily done with `curl` and Grafana's annotations API.
|
|
||||||
Example:
|
|
||||||
```shell
|
|
||||||
curl --request POST \
|
|
||||||
--url https://user:password@grafana.host/api/annotations \
|
|
||||||
--header 'Content-Type: application/json' \
|
|
||||||
--data '{
|
|
||||||
"dashboardUID": "YsVWNpMIk",
|
|
||||||
"time": 1706533045014,
|
|
||||||
"timeEnd": 1706533085100,
|
|
||||||
"tags": [
|
|
||||||
"tag1",
|
|
||||||
"tag2"
|
|
||||||
],
|
|
||||||
"text": "Test annotation"
|
|
||||||
}'
|
|
||||||
```
|
|
||||||
See [Grafana docs](https://grafana.com/docs/grafana/latest/developers/http_api/annotations/) for details.
|
|
||||||
|
|
||||||
## Verify
|
## Verify
|
||||||
|
|
||||||
|
|
262
scenarios/s3.js
262
scenarios/s3.js
|
@ -1,220 +1,170 @@
|
||||||
import {sleep} from 'k6';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import {SharedArray} from 'k6/data';
|
|
||||||
import exec from 'k6/execution';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import s3 from 'k6/x/frostfs/s3';
|
import s3 from 'k6/x/frostfs/s3';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import { SharedArray } from 'k6/data';
|
||||||
|
import { sleep } from 'k6';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray(
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
'obj_list',
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
function() { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; });
|
});
|
||||||
|
|
||||||
const bucket_list = new SharedArray(
|
const bucket_list = new SharedArray('bucket_list', function () {
|
||||||
'bucket_list',
|
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets;
|
||||||
function() { return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; });
|
});
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
const no_verify_ssl = __ENV.NO_VERIFY_SSL || 'true';
|
|
||||||
const connection_args = {
|
|
||||||
no_verify_ssl : no_verify_ssl
|
|
||||||
}
|
|
||||||
// Select random S3 endpoint for current VU
|
// Select random S3 endpoint for current VU
|
||||||
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
||||||
const s3_endpoint =
|
const s3_endpoint = s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
||||||
s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
const s3_client = s3.connect(`http://${s3_endpoint}`);
|
||||||
const s3_client = s3.connect(s3_endpoint, connection_args);
|
const log = logging.new().withField("endpoint", s3_endpoint);
|
||||||
const log = logging.new().withField('endpoint', s3_endpoint);
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
let obj_to_delete_selector = undefined;
|
||||||
|
if (registry_enabled && delete_age) {
|
||||||
|
obj_to_delete_selector = registry.getSelector(
|
||||||
|
__ENV.REGISTRY_FILE,
|
||||||
|
"obj_to_delete",
|
||||||
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
|
{
|
||||||
|
status: "created",
|
||||||
|
age: delete_age,
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_age = __ENV.READ_AGE ? parseInt(__ENV.READ_AGE) : 10;
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "");
|
||||||
let obj_to_read_selector = undefined;
|
|
||||||
if (registry_enabled) {
|
|
||||||
obj_to_read_selector = registry.getLoopedSelector(
|
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_read',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status : 'created',
|
|
||||||
age : read_age,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
||||||
const generator = newGenerator(write_vu_count > 0);
|
|
||||||
if (write_vu_count > 0) {
|
if (write_vu_count > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor : 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus : write_vu_count,
|
vus: write_vu_count,
|
||||||
duration : `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec : 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop : '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
|
||||||
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
|
||||||
let obj_to_delete_selector = undefined;
|
|
||||||
let obj_to_delete_exit_on_null = undefined;
|
|
||||||
if (registry_enabled && delete_age) {
|
|
||||||
obj_to_delete_exit_on_null = write_vu_count == 0;
|
|
||||||
|
|
||||||
let constructor = obj_to_delete_exit_on_null ? registry.getOneshotSelector
|
|
||||||
: registry.getSelector;
|
|
||||||
|
|
||||||
obj_to_delete_selector =
|
|
||||||
constructor(__ENV.REGISTRY_FILE, 'obj_to_delete',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status : 'created',
|
|
||||||
age : delete_age,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_vu_count = parseInt(__ENV.READERS || '0');
|
const read_vu_count = parseInt(__ENV.READERS || '0');
|
||||||
if (read_vu_count > 0) {
|
if (read_vu_count > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor : 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus : read_vu_count,
|
vus: read_vu_count,
|
||||||
duration : `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec : 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop : '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const delete_vu_count = parseInt(__ENV.DELETERS || '0');
|
const delete_vu_count = parseInt(__ENV.DELETERS || '0');
|
||||||
if (delete_vu_count > 0) {
|
if (delete_vu_count > 0) {
|
||||||
if (!obj_to_delete_selector) {
|
if (!obj_to_delete_selector) {
|
||||||
throw 'Positive DELETE worker number without a proper object selector';
|
throw 'Positive DELETE worker number without a proper object selector';
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios.delete = {
|
scenarios.delete = {
|
||||||
executor : 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus : delete_vu_count,
|
vus: delete_vu_count,
|
||||||
duration : `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec : 'obj_delete',
|
exec: 'obj_delete',
|
||||||
gracefulStop : '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout : '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_vu_count = write_vu_count + read_vu_count + delete_vu_count;
|
const total_vu_count = write_vu_count + read_vu_count + delete_vu_count;
|
||||||
|
|
||||||
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Reading VUs: ${read_vu_count}`);
|
console.log(`Reading VUs: ${read_vu_count}`);
|
||||||
console.log(`Writing VUs: ${write_vu_count}`);
|
console.log(`Writing VUs: ${write_vu_count}`);
|
||||||
console.log(`Deleting VUs: ${delete_vu_count}`);
|
console.log(`Deleting VUs: ${delete_vu_count}`);
|
||||||
console.log(`Total VUs: ${total_vu_count}`);
|
console.log(`Total VUs: ${total_vu_count}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout' : textSummary(data, {indent : ' ', enableColors : false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json] : JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
if (__ENV.SLEEP_WRITE) {
|
if (__ENV.SLEEP_WRITE) {
|
||||||
sleep(__ENV.SLEEP_WRITE);
|
sleep(__ENV.SLEEP_WRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = __ENV.OBJ_NAME || uuidv4();
|
const key = __ENV.OBJ_NAME || uuidv4();
|
||||||
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const resp = s3_client.put(bucket, key, payload);
|
const resp = s3_client.put(bucket, key, payload);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({bucket : bucket, key : key}).error(resp.error);
|
log.withFields({bucket: bucket, key: key}).error(resp.error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject('', '', bucket, key, payload.hash());
|
obj_registry.addObject("", "", bucket, key, hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
if (__ENV.SLEEP_READ) {
|
if (__ENV.SLEEP_READ) {
|
||||||
sleep(__ENV.SLEEP_READ);
|
sleep(__ENV.SLEEP_READ);
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_to_read_selector) {
|
|
||||||
const obj = obj_to_read_selector.nextObject();
|
|
||||||
if (!obj) {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
const resp = s3_client.get(obj.s3_bucket, obj.s3_key)
|
|
||||||
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
|
|
||||||
|
const resp = s3_client.get(obj.bucket, obj.object);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({bucket : obj.s3_bucket, key : obj.s3_key})
|
log.withFields({bucket: obj.bucket, key: obj.object}).error(resp.error);
|
||||||
.error(resp.error);
|
|
||||||
}
|
}
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
|
||||||
|
|
||||||
const resp = s3_client.get(obj.bucket, obj.object);
|
|
||||||
if (!resp.success) {
|
|
||||||
log.withFields({bucket : obj.bucket, key : obj.object}).error(resp.error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_delete() {
|
export function obj_delete() {
|
||||||
if (__ENV.SLEEP_DELETE) {
|
if (__ENV.SLEEP_DELETE) {
|
||||||
sleep(__ENV.SLEEP_DELETE);
|
sleep(__ENV.SLEEP_DELETE);
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_to_delete_selector.nextObject();
|
|
||||||
if (!obj) {
|
|
||||||
if (obj_to_delete_exit_on_null) {
|
|
||||||
exec.test.abort("No more objects to select");
|
|
||||||
}
|
}
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const resp = s3_client.delete(obj.s3_bucket, obj.s3_key);
|
const obj = obj_to_delete_selector.nextObject();
|
||||||
if (!resp.success) {
|
if (!obj) {
|
||||||
log.withFields({bucket : obj.s3_bucket, key : obj.s3_key, op : 'DELETE'})
|
return;
|
||||||
.error(resp.error);
|
}
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
obj_registry.deleteObject(obj.id);
|
const resp = s3_client.delete(obj.s3_bucket, obj.s3_key);
|
||||||
|
if (!resp.success) {
|
||||||
|
log.withFields({bucket: obj.s3_bucket, key: obj.s3_key, op: "DELETE"}).error(resp.error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
obj_registry.deleteObject(obj.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,70 +1,52 @@
|
||||||
import {sleep} from 'k6';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import {SharedArray} from 'k6/data';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import s3 from 'k6/x/frostfs/s3';
|
import s3 from 'k6/x/frostfs/s3';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import { SharedArray } from 'k6/data';
|
||||||
|
import { sleep } from 'k6';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray('obj_list', function() {
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
});
|
});
|
||||||
|
|
||||||
const bucket_list = new SharedArray('bucket_list', function() {
|
const bucket_list = new SharedArray('bucket_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets;
|
||||||
});
|
});
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
// Select random S3 endpoint for current VU
|
// Select random S3 endpoint for current VU
|
||||||
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
||||||
const s3_endpoint =
|
const s3_endpoint = s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
||||||
s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
const s3_client = s3.connect(`http://${s3_endpoint}`);
|
||||||
const no_verify_ssl = __ENV.NO_VERIFY_SSL || 'true';
|
const log = logging.new().withField("endpoint", s3_endpoint);
|
||||||
const connection_args = {
|
|
||||||
no_verify_ssl: no_verify_ssl
|
|
||||||
};
|
|
||||||
const s3_client = s3.connect(s3_endpoint, connection_args);
|
|
||||||
const log = logging.new().withField('endpoint', s3_endpoint);
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
const delete_age = __ENV.DELETE_AGE ? parseInt(__ENV.DELETE_AGE) : undefined;
|
||||||
let obj_to_delete_selector = undefined;
|
let obj_to_delete_selector = undefined;
|
||||||
if (registry_enabled && delete_age) {
|
if (registry_enabled && delete_age) {
|
||||||
obj_to_delete_selector = registry.getSelector(
|
obj_to_delete_selector = registry.getSelector(
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_delete',
|
__ENV.REGISTRY_FILE,
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
"obj_to_delete",
|
||||||
status: 'created',
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
age: delete_age,
|
{
|
||||||
});
|
status: "created",
|
||||||
|
age: delete_age,
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_age = __ENV.READ_AGE ? parseInt(__ENV.READ_AGE) : 10;
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
||||||
let obj_to_read_selector = undefined;
|
|
||||||
if (registry_enabled) {
|
|
||||||
obj_to_read_selector = registry.getLoopedSelector(
|
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_read',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status: 'created',
|
|
||||||
age: read_age,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
|
@ -72,18 +54,17 @@ const time_unit = __ENV.TIME_UNIT || '1s';
|
||||||
const pre_alloc_write_vus = parseInt(__ENV.PRE_ALLOC_WRITERS || '0');
|
const pre_alloc_write_vus = parseInt(__ENV.PRE_ALLOC_WRITERS || '0');
|
||||||
const max_write_vus = parseInt(__ENV.MAX_WRITERS || pre_alloc_write_vus);
|
const max_write_vus = parseInt(__ENV.MAX_WRITERS || pre_alloc_write_vus);
|
||||||
const write_rate = parseInt(__ENV.WRITE_RATE || '0');
|
const write_rate = parseInt(__ENV.WRITE_RATE || '0');
|
||||||
const generator = newGenerator(write_rate > 0);
|
|
||||||
if (write_rate > 0) {
|
if (write_rate > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor: 'constant-arrival-rate',
|
executor: 'constant-arrival-rate',
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
preAllocatedVUs: pre_alloc_write_vus,
|
preAllocatedVUs: pre_alloc_write_vus,
|
||||||
maxVUs: max_write_vus,
|
maxVUs: max_write_vus,
|
||||||
rate: write_rate,
|
rate: write_rate,
|
||||||
timeUnit: time_unit,
|
timeUnit: time_unit,
|
||||||
exec: 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -91,16 +72,16 @@ const pre_alloc_read_vus = parseInt(__ENV.PRE_ALLOC_READERS || '0');
|
||||||
const max_read_vus = parseInt(__ENV.MAX_READERS || pre_alloc_read_vus);
|
const max_read_vus = parseInt(__ENV.MAX_READERS || pre_alloc_read_vus);
|
||||||
const read_rate = parseInt(__ENV.READ_RATE || '0');
|
const read_rate = parseInt(__ENV.READ_RATE || '0');
|
||||||
if (read_rate > 0) {
|
if (read_rate > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor: 'constant-arrival-rate',
|
executor: 'constant-arrival-rate',
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
preAllocatedVUs: pre_alloc_write_vus,
|
preAllocatedVUs: pre_alloc_write_vus,
|
||||||
maxVUs: max_read_vus,
|
maxVUs: max_read_vus,
|
||||||
rate: read_rate,
|
rate: read_rate,
|
||||||
timeUnit: time_unit,
|
timeUnit: time_unit,
|
||||||
exec: 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -108,132 +89,109 @@ const pre_alloc_delete_vus = parseInt(__ENV.PRE_ALLOC_DELETERS || '0');
|
||||||
const max_delete_vus = parseInt(__ENV.MAX_DELETERS || pre_alloc_write_vus);
|
const max_delete_vus = parseInt(__ENV.MAX_DELETERS || pre_alloc_write_vus);
|
||||||
const delete_rate = parseInt(__ENV.DELETE_RATE || '0');
|
const delete_rate = parseInt(__ENV.DELETE_RATE || '0');
|
||||||
if (delete_rate > 0) {
|
if (delete_rate > 0) {
|
||||||
if (!obj_to_delete_selector) {
|
if (!obj_to_delete_selector) {
|
||||||
throw new Error(
|
throw new Error('Positive DELETE worker number without a proper object selector');
|
||||||
'Positive DELETE worker number without a proper object selector');
|
}
|
||||||
}
|
|
||||||
|
|
||||||
scenarios.delete = {
|
scenarios.delete = {
|
||||||
executor: 'constant-arrival-rate',
|
executor: 'constant-arrival-rate',
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
preAllocatedVUs: pre_alloc_delete_vus,
|
preAllocatedVUs: pre_alloc_delete_vus,
|
||||||
maxVUs: max_delete_vus,
|
maxVUs: max_delete_vus,
|
||||||
rate: delete_rate,
|
rate: delete_rate,
|
||||||
timeUnit: time_unit,
|
timeUnit: time_unit,
|
||||||
exec: 'obj_delete',
|
exec: 'obj_delete',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout: '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_pre_allocated_vu_count =
|
const total_pre_allocated_vu_count = pre_alloc_write_vus + pre_alloc_read_vus + pre_alloc_delete_vus;
|
||||||
pre_alloc_write_vus + pre_alloc_read_vus + pre_alloc_delete_vus;
|
const total_max_vu_count = max_read_vus + max_write_vus + max_delete_vus
|
||||||
const total_max_vu_count = max_read_vus + max_write_vus + max_delete_vus
|
|
||||||
|
|
||||||
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Pre allocated reading VUs: ${pre_alloc_read_vus}`);
|
console.log(`Pre allocated reading VUs: ${pre_alloc_read_vus}`);
|
||||||
console.log(`Pre allocated writing VUs: ${pre_alloc_write_vus}`);
|
console.log(`Pre allocated writing VUs: ${pre_alloc_write_vus}`);
|
||||||
console.log(`Pre allocated deleting VUs: ${pre_alloc_delete_vus}`);
|
console.log(`Pre allocated deleting VUs: ${pre_alloc_delete_vus}`);
|
||||||
console.log(`Total pre allocated VUs: ${total_pre_allocated_vu_count}`);
|
console.log(`Total pre allocated VUs: ${total_pre_allocated_vu_count}`);
|
||||||
console.log(`Max reading VUs: ${max_read_vus}`);
|
console.log(`Max reading VUs: ${max_read_vus}`);
|
||||||
console.log(`Max writing VUs: ${max_write_vus}`);
|
console.log(`Max writing VUs: ${max_write_vus}`);
|
||||||
console.log(`Max deleting VUs: ${max_delete_vus}`);
|
console.log(`Max deleting VUs: ${max_delete_vus}`);
|
||||||
console.log(`Total max VUs: ${total_max_vu_count}`);
|
console.log(`Total max VUs: ${total_max_vu_count}`);
|
||||||
console.log(`Time unit: ${time_unit}`);
|
console.log(`Time unit: ${time_unit}`);
|
||||||
console.log(`Read rate: ${read_rate}`);
|
console.log(`Read rate: ${read_rate}`);
|
||||||
console.log(`Writing rate: ${write_rate}`);
|
console.log(`Writing rate: ${write_rate}`);
|
||||||
console.log(`Delete rate: ${delete_rate}`);
|
console.log(`Delete rate: ${delete_rate}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json]: JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
if (__ENV.SLEEP_WRITE) {
|
if (__ENV.SLEEP_WRITE) {
|
||||||
sleep(__ENV.SLEEP_WRITE);
|
sleep(__ENV.SLEEP_WRITE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = __ENV.OBJ_NAME || uuidv4();
|
const key = __ENV.OBJ_NAME || uuidv4();
|
||||||
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const resp = s3_client.put(bucket, key, payload);
|
const resp = s3_client.put(bucket, key, payload);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({bucket: bucket, key: key}).error(resp.error);
|
log.withFields({bucket: bucket, key: key}).error(resp.error);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject('', '', bucket, key, payload.hash());
|
obj_registry.addObject("", "", bucket, key, hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
if (__ENV.SLEEP_READ) {
|
if (__ENV.SLEEP_READ) {
|
||||||
sleep(__ENV.SLEEP_READ);
|
sleep(__ENV.SLEEP_READ);
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_to_read_selector) {
|
|
||||||
const obj = obj_to_read_selector.nextObject();
|
|
||||||
if (!obj) {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
const resp = s3_client.get(obj.s3_bucket, obj.s3_key)
|
|
||||||
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
|
|
||||||
|
const resp = s3_client.get(obj.bucket, obj.object);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({bucket: obj.s3_bucket, key: obj.s3_key})
|
log.withFields({bucket: obj.bucket, key: obj.object}).error(resp.error);
|
||||||
.error(resp.error);
|
|
||||||
}
|
}
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
|
||||||
|
|
||||||
const resp = s3_client.get(obj.bucket, obj.object);
|
|
||||||
if (!resp.success) {
|
|
||||||
log.withFields({bucket: obj.bucket, key: obj.object}).error(resp.error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_delete() {
|
export function obj_delete() {
|
||||||
if (__ENV.SLEEP_DELETE) {
|
if (__ENV.SLEEP_DELETE) {
|
||||||
sleep(__ENV.SLEEP_DELETE);
|
sleep(__ENV.SLEEP_DELETE);
|
||||||
}
|
}
|
||||||
|
|
||||||
const obj = obj_to_delete_selector.nextObject();
|
const obj = obj_to_delete_selector.nextObject();
|
||||||
if (!obj) {
|
if (!obj) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const resp = s3_client.delete(obj.s3_bucket, obj.s3_key);
|
const resp = s3_client.delete(obj.s3_bucket, obj.s3_key);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({bucket: obj.s3_bucket, key: obj.s3_key, op: 'DELETE'})
|
log.withFields({bucket: obj.s3_bucket, key: obj.s3_key, op: "DELETE"}).error(resp.error);
|
||||||
.error(resp.error);
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
obj_registry.deleteObject(obj.id);
|
obj_registry.deleteObject(obj.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,119 +0,0 @@
|
||||||
import {sleep} from 'k6';
|
|
||||||
import {SharedArray} from 'k6/data';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
|
||||||
import registry from 'k6/x/frostfs/registry';
|
|
||||||
import s3 from 'k6/x/frostfs/s3';
|
|
||||||
import stats from 'k6/x/frostfs/stats';
|
|
||||||
|
|
||||||
import {newGenerator} from './libs/datagen.js';
|
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
|
||||||
|
|
||||||
const bucket_list = new SharedArray('bucket_list', function() {
|
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets;
|
|
||||||
});
|
|
||||||
|
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
|
||||||
|
|
||||||
// Select random S3 endpoint for current VU
|
|
||||||
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
|
||||||
const s3_endpoint =
|
|
||||||
s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
|
||||||
const no_verify_ssl = __ENV.NO_VERIFY_SSL || 'true';
|
|
||||||
const connection_args = {
|
|
||||||
no_verify_ssl: no_verify_ssl
|
|
||||||
};
|
|
||||||
const s3_client = s3.connect(s3_endpoint, connection_args);
|
|
||||||
const log = logging.new().withField('endpoint', s3_endpoint);
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
|
||||||
const obj_registry =
|
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const scenarios = {};
|
|
||||||
|
|
||||||
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
|
||||||
if (write_vu_count < 1) {
|
|
||||||
throw 'number of VUs (env WRITERS) performing write operations should be greater than 0';
|
|
||||||
}
|
|
||||||
|
|
||||||
const write_multipart_vu_count = parseInt(__ENV.WRITERS_MULTIPART || '0');
|
|
||||||
if (write_multipart_vu_count < 1) {
|
|
||||||
throw 'number of parts (env WRITERS_MULTIPART) to upload in parallel should be greater than 0';
|
|
||||||
}
|
|
||||||
|
|
||||||
const generator =
|
|
||||||
newGenerator(write_vu_count > 0 || write_multipart_vu_count > 0);
|
|
||||||
if (write_vu_count > 0) {
|
|
||||||
scenarios.write_multipart = {
|
|
||||||
executor: 'constant-vus',
|
|
||||||
vus: write_vu_count,
|
|
||||||
duration: `${duration}s`,
|
|
||||||
exec: 'obj_write_multipart',
|
|
||||||
gracefulStop: '5s',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const options = {
|
|
||||||
scenarios,
|
|
||||||
setupTimeout: '5s',
|
|
||||||
};
|
|
||||||
|
|
||||||
export function setup() {
|
|
||||||
const total_vu_count = write_vu_count * write_multipart_vu_count;
|
|
||||||
|
|
||||||
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
|
||||||
console.log(`Writing VUs: ${write_vu_count}`);
|
|
||||||
console.log(`Writing multipart VUs: ${write_multipart_vu_count}`);
|
|
||||||
console.log(`Total VUs: ${total_vu_count}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function teardown(data) {
|
|
||||||
if (obj_registry) {
|
|
||||||
obj_registry.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function handleSummary(data) {
|
|
||||||
return {
|
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
|
||||||
[summary_json]: JSON.stringify(data),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const write_multipart_part_size =
|
|
||||||
1024 * parseInt(__ENV.WRITE_OBJ_PART_SIZE || '0')
|
|
||||||
if (write_multipart_part_size < 5 * 1024 * 1024) {
|
|
||||||
throw 'part size (env WRITE_OBJ_PART_SIZE * 1024) must be greater than (5 MB)';
|
|
||||||
}
|
|
||||||
|
|
||||||
export function obj_write_multipart() {
|
|
||||||
if (__ENV.SLEEP_WRITE) {
|
|
||||||
sleep(__ENV.SLEEP_WRITE);
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = __ENV.OBJ_NAME || uuidv4();
|
|
||||||
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
|
||||||
const resp = s3_client.multipart(
|
|
||||||
bucket, key, write_multipart_part_size, write_multipart_vu_count,
|
|
||||||
payload);
|
|
||||||
if (!resp.success) {
|
|
||||||
log.withFields({bucket: bucket, key: key}).error(resp.error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_registry) {
|
|
||||||
obj_registry.addObject('', '', bucket, key, payload.hash());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,172 +1,127 @@
|
||||||
import {SharedArray} from 'k6/data';
|
import datagen from 'k6/x/frostfs/datagen';
|
||||||
import exec from 'k6/execution';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
import logging from 'k6/x/frostfs/logging';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import s3local from 'k6/x/frostfs/s3local';
|
import s3local from 'k6/x/frostfs/s3local';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import { SharedArray } from 'k6/data';
|
||||||
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
import {newGenerator} from './libs/datagen.js';
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { uuidv4 } from './libs/k6-utils-1.4.0.js';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
|
||||||
import {uuidv4} from './libs/k6-utils-1.4.0.js';
|
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_list = new SharedArray('obj_list', function() {
|
const obj_list = new SharedArray('obj_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).objects;
|
||||||
});
|
});
|
||||||
|
|
||||||
const container_list = new SharedArray('container_list', function() {
|
const container_list = new SharedArray('container_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).containers;
|
||||||
});
|
});
|
||||||
|
|
||||||
const bucket_list = new SharedArray('bucket_list', function() {
|
const bucket_list = new SharedArray('bucket_list', function () {
|
||||||
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets;
|
return JSON.parse(open(__ENV.PREGEN_JSON)).buckets;
|
||||||
});
|
});
|
||||||
|
|
||||||
function bucket_mapping() {
|
function bucket_mapping() {
|
||||||
if (container_list.length != bucket_list.length) {
|
if (container_list.length != bucket_list.length) {
|
||||||
throw 'The number of containers and buckets in the preset file must be the same.';
|
throw 'The number of containers and buckets in the preset file must be the same.';
|
||||||
}
|
}
|
||||||
let mapping = {};
|
let mapping = {};
|
||||||
for (let i = 0; i < container_list.length; ++i) {
|
for (let i = 0; i < container_list.length; ++i) {
|
||||||
mapping[bucket_list[i]] = container_list[i];
|
mapping[bucket_list[i]] = container_list[i];
|
||||||
}
|
}
|
||||||
return mapping;
|
return mapping;
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
const config_file = __ENV.CONFIG_FILE;
|
const config_file = __ENV.CONFIG_FILE;
|
||||||
const config_dir = __ENV.CONFIG_DIR;
|
const s3_client = s3local.connect(config_file, {
|
||||||
const max_total_size_gb =
|
'debug_logger': __ENV.DEBUG_LOGGER || 'false',
|
||||||
__ENV.MAX_TOTAL_SIZE_GB ? parseInt(__ENV.MAX_TOTAL_SIZE_GB) : 0;
|
}, bucket_mapping());
|
||||||
const s3_client = s3local.connect(
|
const log = logging.new().withField("config", config_file);
|
||||||
config_file, config_dir, {
|
|
||||||
'debug_logger': __ENV.DEBUG_LOGGER || 'false',
|
|
||||||
},
|
|
||||||
bucket_mapping(), max_total_size_gb);
|
|
||||||
const log = logging.new().withFields(
|
|
||||||
{'config_file': config_file, 'config_dir': config_dir});
|
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
const registry_enabled = !!__ENV.REGISTRY_FILE;
|
||||||
const obj_registry =
|
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
||||||
registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
|
|
||||||
|
|
||||||
let obj_to_read_selector = undefined;
|
|
||||||
if (registry_enabled) {
|
|
||||||
obj_to_read_selector = registry.getLoopedSelector(
|
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_read',
|
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
|
||||||
status: 'created',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
const write_vu_count = parseInt(__ENV.WRITERS || '0');
|
||||||
const generator = newGenerator(write_vu_count > 0);
|
|
||||||
if (write_vu_count > 0) {
|
if (write_vu_count > 0) {
|
||||||
scenarios.write = {
|
scenarios.write = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: write_vu_count,
|
vus: write_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_write',
|
exec: 'obj_write',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const read_vu_count = parseInt(__ENV.READERS || '0');
|
const read_vu_count = parseInt(__ENV.READERS || '0');
|
||||||
if (read_vu_count > 0) {
|
if (read_vu_count > 0) {
|
||||||
scenarios.read = {
|
scenarios.read = {
|
||||||
executor: 'constant-vus',
|
executor: 'constant-vus',
|
||||||
vus: read_vu_count,
|
vus: read_vu_count,
|
||||||
duration: `${duration}s`,
|
duration: `${duration}s`,
|
||||||
exec: 'obj_read',
|
exec: 'obj_read',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout: '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
const total_vu_count = write_vu_count + read_vu_count;
|
const total_vu_count = write_vu_count + read_vu_count;
|
||||||
|
|
||||||
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
console.log(`Pregenerated buckets: ${bucket_list.length}`);
|
||||||
console.log(`Pregenerated read object size: ${read_size}`);
|
console.log(`Pregenerated read object size: ${read_size}`);
|
||||||
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
console.log(`Pregenerated total objects: ${obj_list.length}`);
|
||||||
console.log(`Reading VUs: ${read_vu_count}`);
|
console.log(`Reading VUs: ${read_vu_count}`);
|
||||||
console.log(`Writing VUs: ${write_vu_count}`);
|
console.log(`Writing VUs: ${write_vu_count}`);
|
||||||
console.log(`Total VUs: ${total_vu_count}`);
|
console.log(`Total VUs: ${total_vu_count}`);
|
||||||
|
|
||||||
const start_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load started at: ${Date(start_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.close();
|
obj_registry.close();
|
||||||
}
|
}
|
||||||
const end_timestamp = Date.now()
|
|
||||||
console.log(
|
|
||||||
`Load finished at: ${Date(end_timestamp).toString()}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json]: JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_write() {
|
export function obj_write() {
|
||||||
const key = __ENV.OBJ_NAME || uuidv4();
|
const key = __ENV.OBJ_NAME || uuidv4();
|
||||||
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)];
|
||||||
|
|
||||||
const payload = generator.genPayload();
|
const { payload, hash } = generator.genPayload(registry_enabled);
|
||||||
const resp = s3_client.put(bucket, key, payload);
|
const resp = s3_client.put(bucket, key, payload);
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
if (resp.abort) {
|
log.withFields({bucket: bucket, key: key}).error(resp.error);
|
||||||
exec.test.abort(resp.error);
|
return;
|
||||||
}
|
}
|
||||||
log.withFields({bucket: bucket, key: key}).error(resp.error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (obj_registry) {
|
if (obj_registry) {
|
||||||
obj_registry.addObject('', '', bucket, key, payload.hash());
|
obj_registry.addObject("", "", bucket, key, hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_read() {
|
export function obj_read() {
|
||||||
if (obj_to_read_selector) {
|
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
||||||
const obj = obj_to_read_selector.nextObject();
|
|
||||||
if (!obj) {
|
const resp = s3_client.get(obj.bucket, obj.object);
|
||||||
return;
|
|
||||||
}
|
|
||||||
const resp = s3_client.get(obj.s3_bucket, obj.s3_key)
|
|
||||||
if (!resp.success) {
|
if (!resp.success) {
|
||||||
log.withFields({bucket: obj.s3_bucket, key: obj.s3_key})
|
log.withFields({bucket: obj.bucket, key: obj.object}).error(resp.error);
|
||||||
.error(resp.error);
|
|
||||||
}
|
}
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
|
|
||||||
|
|
||||||
const resp = s3_client.get(obj.bucket, obj.object);
|
|
||||||
if (!resp.success) {
|
|
||||||
log.withFields({bucket: obj.bucket, key: obj.object}).error(resp.error);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,21 +1,19 @@
|
||||||
import {sleep} from 'k6';
|
|
||||||
import {Counter} from 'k6/metrics';
|
|
||||||
import logging from 'k6/x/frostfs/logging';
|
|
||||||
import native from 'k6/x/frostfs/native';
|
import native from 'k6/x/frostfs/native';
|
||||||
import registry from 'k6/x/frostfs/registry';
|
import registry from 'k6/x/frostfs/registry';
|
||||||
import s3 from 'k6/x/frostfs/s3';
|
import s3 from 'k6/x/frostfs/s3';
|
||||||
import stats from 'k6/x/frostfs/stats';
|
import logging from 'k6/x/frostfs/logging';
|
||||||
|
import { sleep } from 'k6';
|
||||||
import {parseEnv} from './libs/env-parser.js';
|
import { Counter } from 'k6/metrics';
|
||||||
import {textSummary} from './libs/k6-summary-0.0.2.js';
|
import { textSummary } from './libs/k6-summary-0.0.2.js';
|
||||||
|
import { parseEnv } from './libs/env-parser.js';
|
||||||
|
|
||||||
parseEnv();
|
parseEnv();
|
||||||
|
|
||||||
const obj_registry = registry.open(__ENV.REGISTRY_FILE);
|
const obj_registry = registry.open(__ENV.REGISTRY_FILE);
|
||||||
|
|
||||||
// Time limit (in seconds) for the run
|
// Time limit (in seconds) for the run
|
||||||
const time_limit = __ENV.TIME_LIMIT || '60';
|
const time_limit = __ENV.TIME_LIMIT || "60";
|
||||||
const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json";
|
||||||
|
|
||||||
// Number of objects in each status. These counters are cumulative in a
|
// Number of objects in each status. These counters are cumulative in a
|
||||||
// sense that they reflect total number of objects in the registry, not just
|
// sense that they reflect total number of objects in the registry, not just
|
||||||
|
@ -23,148 +21,135 @@ const summary_json = __ENV.SUMMARY_JSON || '/tmp/summary.json';
|
||||||
// This allows to run this scenario multiple times and collect overall
|
// This allows to run this scenario multiple times and collect overall
|
||||||
// statistics in the final run.
|
// statistics in the final run.
|
||||||
const obj_counters = {
|
const obj_counters = {
|
||||||
verified: new Counter('verified_obj'),
|
verified: new Counter('verified_obj'),
|
||||||
skipped: new Counter('skipped_obj'),
|
skipped: new Counter('skipped_obj'),
|
||||||
invalid: new Counter('invalid_obj'),
|
invalid: new Counter('invalid_obj'),
|
||||||
};
|
};
|
||||||
|
|
||||||
let log = logging.new();
|
let log = logging.new();
|
||||||
|
|
||||||
if (!!__ENV.METRIC_TAGS) {
|
|
||||||
stats.setTags(__ENV.METRIC_TAGS)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Connect to random gRPC endpoint
|
// Connect to random gRPC endpoint
|
||||||
let grpc_client = undefined;
|
let grpc_client = undefined;
|
||||||
if (__ENV.GRPC_ENDPOINTS) {
|
if (__ENV.GRPC_ENDPOINTS) {
|
||||||
const grpcEndpoints = __ENV.GRPC_ENDPOINTS.split(',');
|
const grpcEndpoints = __ENV.GRPC_ENDPOINTS.split(',');
|
||||||
const grpcEndpoint =
|
const grpcEndpoint = grpcEndpoints[Math.floor(Math.random() * grpcEndpoints.length)];
|
||||||
grpcEndpoints[Math.floor(Math.random() * grpcEndpoints.length)];
|
log = log.withField("endpoint", grpcEndpoint);
|
||||||
log = log.withField('endpoint', grpcEndpoint);
|
grpc_client = native.connect(grpcEndpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 0, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 0);
|
||||||
grpc_client = native.connect(
|
|
||||||
grpcEndpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 0,
|
|
||||||
__ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 0,
|
|
||||||
__ENV.PREPARE_LOCALLY ? __ENV.PREPARE_LOCALLY.toLowerCase() === 'true' :
|
|
||||||
false,
|
|
||||||
'');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Connect to random S3 endpoint
|
// Connect to random S3 endpoint
|
||||||
let s3_client = undefined;
|
let s3_client = undefined;
|
||||||
if (__ENV.S3_ENDPOINTS) {
|
if (__ENV.S3_ENDPOINTS) {
|
||||||
const no_verify_ssl = __ENV.NO_VERIFY_SSL || 'true';
|
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
||||||
const connection_args = {no_verify_ssl: no_verify_ssl};
|
const s3_endpoint = s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
||||||
const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
|
log = log.withField("endpoint", s3_endpoint);
|
||||||
const s3_endpoint =
|
s3_client = s3.connect(`http://${s3_endpoint}`);
|
||||||
s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
|
|
||||||
log = log.withField('endpoint', s3_endpoint);
|
|
||||||
s3_client = s3.connect(s3_endpoint, connection_args);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We will attempt to verify every object in "created" status. The scenario will
|
// We will attempt to verify every object in "created" status. The scenario will execute
|
||||||
// execute as many iterations as there are objects. Each object will have 3
|
// as many iterations as there are objects. Each object will have 3 retries to be verified
|
||||||
// retries to be verified
|
|
||||||
const obj_to_verify_selector = registry.getSelector(
|
const obj_to_verify_selector = registry.getSelector(
|
||||||
__ENV.REGISTRY_FILE, 'obj_to_verify',
|
__ENV.REGISTRY_FILE,
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {
|
"obj_to_verify",
|
||||||
status: 'created',
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
});
|
{
|
||||||
|
status: "created",
|
||||||
|
}
|
||||||
|
);
|
||||||
const obj_to_verify_count = obj_to_verify_selector.count();
|
const obj_to_verify_count = obj_to_verify_selector.count();
|
||||||
// Execute at least one iteration (executor shared-iterations can't run 0
|
// Execute at least one iteration (executor shared-iterations can't run 0 iterations)
|
||||||
// iterations)
|
|
||||||
const iterations = Math.max(1, obj_to_verify_count);
|
const iterations = Math.max(1, obj_to_verify_count);
|
||||||
// Executor shared-iterations requires number of iterations to be larger than
|
// Executor shared-iterations requires number of iterations to be larger than number of VUs
|
||||||
// number of VUs
|
|
||||||
const vus = Math.min(__ENV.CLIENTS, iterations);
|
const vus = Math.min(__ENV.CLIENTS, iterations);
|
||||||
|
|
||||||
const scenarios = {
|
const scenarios = {
|
||||||
verify: {
|
verify: {
|
||||||
executor: 'shared-iterations',
|
executor: 'shared-iterations',
|
||||||
vus,
|
vus,
|
||||||
iterations,
|
iterations,
|
||||||
maxDuration: `${time_limit}s`,
|
maxDuration: `${time_limit}s`,
|
||||||
exec: 'obj_verify',
|
exec: 'obj_verify',
|
||||||
gracefulStop: '5s',
|
gracefulStop: '5s',
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const options = {
|
export const options = {
|
||||||
scenarios,
|
scenarios,
|
||||||
setupTimeout: '5s',
|
setupTimeout: '5s',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function setup() {
|
export function setup() {
|
||||||
// Populate counters with initial values
|
// Populate counters with initial values
|
||||||
for (const [status, counter] of Object.entries(obj_counters)) {
|
for (const [status, counter] of Object.entries(obj_counters)) {
|
||||||
const obj_selector = registry.getSelector(
|
const obj_selector = registry.getSelector(
|
||||||
__ENV.REGISTRY_FILE, status,
|
__ENV.REGISTRY_FILE,
|
||||||
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, {status});
|
status,
|
||||||
counter.add(obj_selector.count());
|
__ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0,
|
||||||
}
|
{ status });
|
||||||
|
counter.add(obj_selector.count());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function handleSummary(data) {
|
export function handleSummary(data) {
|
||||||
return {
|
return {
|
||||||
'stdout': textSummary(data, {indent: ' ', enableColors: false}),
|
'stdout': textSummary(data, { indent: ' ', enableColors: false }),
|
||||||
[summary_json]: JSON.stringify(data),
|
[summary_json]: JSON.stringify(data),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function obj_verify() {
|
export function obj_verify() {
|
||||||
if (obj_to_verify_count == 0) {
|
if (obj_to_verify_count == 0) {
|
||||||
log.info('Nothing to verify');
|
log.info("Nothing to verify");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (__ENV.SLEEP) {
|
if (__ENV.SLEEP) {
|
||||||
sleep(__ENV.SLEEP);
|
sleep(__ENV.SLEEP);
|
||||||
}
|
}
|
||||||
|
|
||||||
const obj = obj_to_verify_selector.nextObject();
|
const obj = obj_to_verify_selector.nextObject();
|
||||||
if (!obj) {
|
if (!obj) {
|
||||||
log.info('All objects have been verified');
|
log.info("All objects have been verified");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const obj_status = verify_object_with_retries(obj, 3);
|
const obj_status = verify_object_with_retries(obj, 3);
|
||||||
obj_counters[obj_status].add(1);
|
obj_counters[obj_status].add(1);
|
||||||
obj_registry.setObjectStatus(obj.id, obj.status, obj_status);
|
obj_registry.setObjectStatus(obj.id, obj.status, obj_status);
|
||||||
}
|
}
|
||||||
|
|
||||||
function verify_object_with_retries(obj, attempts) {
|
function verify_object_with_retries(obj, attempts) {
|
||||||
for (let i = 0; i < attempts; i++) {
|
for (let i = 0; i < attempts; i++) {
|
||||||
let result;
|
let result;
|
||||||
// Different name is required.
|
// Different name is required.
|
||||||
// ReferenceError: Cannot access a variable before initialization.
|
// ReferenceError: Cannot access a variable before initialization.
|
||||||
let lg = log;
|
let lg = log;
|
||||||
if (obj.c_id && obj.o_id) {
|
if (obj.c_id && obj.o_id) {
|
||||||
lg = lg.withFields({cid: obj.c_id, oid: obj.o_id});
|
lg = lg.withFields({cid: obj.c_id, oid: obj.o_id});
|
||||||
result = grpc_client.verifyHash(obj.c_id, obj.o_id, obj.payload_hash);
|
result = grpc_client.verifyHash(obj.c_id, obj.o_id, obj.payload_hash);
|
||||||
} else if (obj.s3_bucket && obj.s3_key) {
|
} else if (obj.s3_bucket && obj.s3_key) {
|
||||||
lg = lg.withFields({bucket: obj.s3_bucket, key: obj.s3_key});
|
lg = lg.withFields({bucket: obj.s3_bucket, key: obj.s3_key});
|
||||||
result =
|
result = s3_client.verifyHash(obj.s3_bucket, obj.s3_key, obj.payload_hash);
|
||||||
s3_client.verifyHash(obj.s3_bucket, obj.s3_key, obj.payload_hash);
|
} else {
|
||||||
} else {
|
lg.withFields({
|
||||||
lg.withFields({
|
cid: obj.c_id,
|
||||||
cid: obj.c_id,
|
oid: obj.o_id,
|
||||||
oid: obj.o_id,
|
bucket: obj.s3_bucket,
|
||||||
bucket: obj.s3_bucket,
|
key: obj.s3_key
|
||||||
key: obj.s3_key
|
}).warn(`Object cannot be verified with supported protocols`);
|
||||||
}).warn(`Object cannot be verified with supported protocols`);
|
return "skipped";
|
||||||
return 'skipped';
|
}
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
return "verified";
|
||||||
|
} else if (result.error == "hash mismatch") {
|
||||||
|
return "invalid";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unless we explicitly saw that there was a hash mismatch, then we will retry after a delay
|
||||||
|
lg.error(`Verify error: ${result.error}. Object will be re-tried`);
|
||||||
|
sleep(__ENV.SLEEP);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (result.success) {
|
return "invalid";
|
||||||
return 'verified';
|
|
||||||
} else if (result.error == 'hash mismatch') {
|
|
||||||
return 'invalid';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unless we explicitly saw that there was a hash mismatch, then we will
|
|
||||||
// retry after a delay
|
|
||||||
lg.error(`Verify error: ${result.error}. Object will be re-tried`);
|
|
||||||
sleep(__ENV.SLEEP);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 'invalid';
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue