Add s3 tests

Signed-off-by: a.lipay <a.lipay@yadro.com>
This commit is contained in:
a.lipay 2022-11-14 01:17:50 +03:00 committed by Alipayy
parent bdbcee4e81
commit 6047ad2fb5
4 changed files with 266 additions and 3 deletions

View file

@ -64,7 +64,9 @@ class K6:
@property
def k6_dir(self) -> str:
if not self._k6_dir:
self._k6_dir = self.shell.exec("sudo find . -name 'k6'").stdout.strip("\n")
self._k6_dir = self.shell.exec(
r"sudo find . -name 'k6' -exec dirname {} \; -quit"
).stdout.strip("\n")
return self._k6_dir
@allure.step("Prepare containers and objects")

View file

@ -1,11 +1,17 @@
import concurrent.futures
import re
from dataclasses import asdict
import allure
from common import STORAGE_NODE_SERVICE_NAME_REGEX
from k6 import K6, LoadParams, LoadResults
from neofs_testlib.cli.neofs_authmate import NeofsAuthmate
from neofs_testlib.cli.neogo import NeoGo
from neofs_testlib.hosting import Hosting
from neofs_testlib.shell import SSHShell
from neofs_testlib.shell import CommandOptions, SSHShell
from neofs_testlib.shell.interfaces import InteractiveInput
NEOFS_AUTHMATE_PATH = "neofs-s3-authmate"
@allure.title("Get services endpoints")
@ -16,6 +22,54 @@ def get_services_endpoints(
return [service_config.attributes[endpoint_attribute] for service_config in service_configs]
@allure.title("Init s3 client")
def init_s3_client(load_nodes: list, login: str, pkey: str, hosting: Hosting):
service_configs = hosting.find_service_configs(STORAGE_NODE_SERVICE_NAME_REGEX)
host = hosting.get_host_by_service(service_configs[0].name)
wallet_path = service_configs[0].attributes["wallet_path"]
neogo_cli_config = host.get_cli_config("neo-go")
neogo_wallet = NeoGo(shell=host.get_shell(), neo_go_exec_path=neogo_cli_config.exec_path).wallet
dump_keys_output = neogo_wallet.dump_keys(wallet_config=wallet_path).stdout
public_key = str(re.search(r":\n(?P<public_key>.*)", dump_keys_output).group("public_key"))
node_endpoint = service_configs[0].attributes["rpc_endpoint"]
# prompt_pattern doesn't work at the moment
for load_node in load_nodes:
ssh_client = SSHShell(host=load_node, login=login, private_key_path=pkey)
path = ssh_client.exec(r"sudo find . -name 'k6' -exec dirname {} \; -quit").stdout.strip(
"\n"
)
neofs_authmate_exec = NeofsAuthmate(ssh_client, NEOFS_AUTHMATE_PATH)
issue_secret_output = neofs_authmate_exec.secret.issue(
wallet=f"{path}/scenarios/files/wallet.json",
peer=node_endpoint,
bearer_rules=f"{path}/scenarios/files/rules.json",
gate_public_key=public_key,
container_placement_policy="REP 1 IN X CBF 1 SELECT 1 FROM * AS X",
container_policy=f"{path}/scenarios/files/policy.json",
wallet_password="",
).stdout
aws_access_key_id = str(
re.search(r"access_key_id.*:\s.(?P<aws_access_key_id>\w*)", issue_secret_output).group(
"aws_access_key_id"
)
)
aws_secret_access_key = str(
re.search(
r"secret_access_key.*:\s.(?P<aws_secret_access_key>\w*)", issue_secret_output
).group("aws_secret_access_key")
)
# prompt_pattern doesn't work at the moment
configure_input = [
InteractiveInput(prompt_pattern=r"AWS Access Key ID.*", input=aws_access_key_id),
InteractiveInput(
prompt_pattern=r"AWS Secret Access Key.*", input=aws_secret_access_key
),
InteractiveInput(prompt_pattern=r".*", input=""),
InteractiveInput(prompt_pattern=r".*", input=""),
]
ssh_client.exec("aws configure", CommandOptions(interactive_inputs=configure_input))
@allure.title("Clear cache and data from storage nodes")
def clear_cache_and_data(hosting: Hosting):
service_configs = hosting.find_service_configs(STORAGE_NODE_SERVICE_NAME_REGEX)

View file

@ -7,12 +7,14 @@ from common import (
LOAD_NODE_SSH_PRIVATE_KEY_PATH,
LOAD_NODE_SSH_USER,
LOAD_NODES,
S3_GATE_SERVICE_NAME_REGEX,
STORAGE_NODE_SERVICE_NAME_REGEX,
)
from k6 import LoadParams
from load import (
clear_cache_and_data,
get_services_endpoints,
init_s3_client,
multi_node_k6_run,
prepare_k6_instances,
)
@ -450,3 +452,208 @@ class TestLoad:
)
with allure.step("Run load"):
multi_node_k6_run(k6_load_instances)
@pytest.mark.load
@pytest.mark.s3
class TestS3Load:
@pytest.fixture(autouse=True)
def clear_cache_and_data(self, hosting: Hosting):
clear_cache_and_data(hosting=hosting)
@pytest.fixture(scope="session", autouse=True)
def init_s3_client(self, hosting: Hosting):
init_s3_client(
load_nodes=LOAD_NODES,
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
hosting=hosting,
)
@pytest.mark.parametrize(
"obj_size, out_file, writers",
[
(4, "4kb_300.json", 400),
(16, "16kb_250.json", 350),
(64, "64kb_250.json", 350),
(128, "128kb_250.json", 300),
(512, "512kb_200.json", 250),
(1000, "1mb_200.json", 250),
(8000, "8mb_150.json", 200),
(32000, "32mb_150.json", 200),
(128000, "128mb_100.json", 150),
(512000, "512mb_50.json", 50),
],
)
@pytest.mark.parametrize(
"load_time", [LoadTime.EXPECTED_MAXIMUM.value, LoadTime.PMI_EXPECTATION.value]
)
@pytest.mark.benchmark
@pytest.mark.s3
def test_s3_benchmark_write(
self,
obj_size,
out_file,
writers,
load_time,
hosting: Hosting,
):
allure.dynamic.title(
f"Single gate benchmark write test - "
f"writers = {writers}, "
f"obj_size = {obj_size}, "
f"load_time = {load_time}"
)
with allure.step("Get endpoints"):
endpoints_list = get_services_endpoints(
hosting=hosting,
service_name_regex=S3_GATE_SERVICE_NAME_REGEX,
endpoint_attribute="endpoint",
)
endpoints = ",".join(endpoints_list[:1])
load_params = LoadParams(
endpoint=endpoints,
obj_size=obj_size,
containers_count=CONTAINERS_COUNT,
out_file=out_file,
obj_count=OBJ_COUNT,
writers=writers,
readers=0,
deleters=0,
load_time=load_time,
load_type="s3",
)
k6_load_instances = prepare_k6_instances(
load_nodes=LOAD_NODES,
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
load_params=load_params,
)
with allure.step("Run load"):
multi_node_k6_run(k6_load_instances)
@pytest.mark.parametrize(
"obj_size, out_file, writers, readers",
[
(4, "4kb_350.json", 210, 90),
(16, "16kb_300.json", 210, 90),
(64, "64kb_300.json", 210, 90),
(128, "128kb_300.json", 210, 90),
(512, "512kb_300.json", 210, 90),
(1000, "1mb_300.json", 210, 90),
(8000, "8mb_250.json", 175, 75),
(32000, "32mb_200.json", 140, 60),
(128000, "128mb_100.json", 70, 30),
(512000, "512mb_50.json", 35, 15),
],
)
@pytest.mark.parametrize(
"load_time", [LoadTime.EXPECTED_MAXIMUM.value, LoadTime.PMI_EXPECTATION.value]
)
@pytest.mark.benchmark
@pytest.mark.s3
def test_s3_benchmark_write_read_70_30(
self,
obj_size,
out_file,
writers,
readers,
load_time,
hosting: Hosting,
):
allure.dynamic.title(
f"Single gate benchmark write + read (70%/30%) test - "
f"writers = {writers}, "
f"readers = {readers}, "
f"obj_size = {obj_size}, "
f"load_time = {load_time}"
)
with allure.step("Get endpoints"):
endpoints_list = get_services_endpoints(
hosting=hosting,
service_name_regex=S3_GATE_SERVICE_NAME_REGEX,
endpoint_attribute="endpoint",
)
endpoints = ",".join(endpoints_list[:1])
load_params = LoadParams(
endpoint=endpoints,
obj_size=obj_size,
containers_count=CONTAINERS_COUNT,
out_file=out_file,
obj_count=500,
writers=writers,
readers=readers,
deleters=0,
load_time=load_time,
load_type="s3",
)
k6_load_instances = prepare_k6_instances(
load_nodes=LOAD_NODES,
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
load_params=load_params,
)
with allure.step("Run load"):
multi_node_k6_run(k6_load_instances)
@pytest.mark.parametrize(
"obj_size, out_file, readers",
[
(4, "4kb_400.json", 400),
(16, "16kb_400.json", 400),
(64, "64kb_350.json", 350),
(128, "128kb_300.json", 300),
(512, "512kb_300.json", 300),
(1000, "1mb_300.json", 300),
(8000, "8mb_300.json", 300),
(32000, "32mb_200.json", 200),
(128000, "128mb_150.json", 150),
(512000, "512mb_50.json", 50),
],
)
@pytest.mark.parametrize(
"load_time", [LoadTime.EXPECTED_MAXIMUM.value, LoadTime.PMI_EXPECTATION.value]
)
@pytest.mark.benchmark
@pytest.mark.s3
def test_s3_benchmark_read(
self,
obj_size,
out_file,
readers,
load_time,
hosting: Hosting,
):
allure.dynamic.title(
f"Single gate benchmark read test - "
f"readers = {readers}, "
f"obj_size = {obj_size}, "
f"load_time = {load_time}"
)
with allure.step("Get endpoints"):
endpoints_list = get_services_endpoints(
hosting=hosting,
service_name_regex=S3_GATE_SERVICE_NAME_REGEX,
endpoint_attribute="endpoint",
)
endpoints = ",".join(endpoints_list[:1])
load_params = LoadParams(
endpoint=endpoints,
obj_size=obj_size,
containers_count=1,
out_file=out_file,
obj_count=500,
writers=0,
readers=readers,
deleters=0,
load_time=load_time,
load_type="s3",
)
k6_load_instances = prepare_k6_instances(
load_nodes=LOAD_NODES,
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
load_params=load_params,
)
with allure.step("Run load"):
multi_node_k6_run(k6_load_instances)

View file

@ -35,7 +35,7 @@ neo-mamba==0.10.0
neo3crypto==0.2.1
neo3vm==0.9.0
neo3vm-stubs==0.9.0
neofs-testlib==0.4.0
neofs-testlib==0.5.0
netaddr==0.8.0
orjson==3.6.8
packaging==21.3