Add background load fixture

Signed-off-by: Vladimir Avdeev <v.avdeev@yadro.com>
support/v0.36
Vladimir Avdeev 2022-11-28 22:27:37 +03:00 committed by Vladimir Avdeev
parent 9b0ac8579b
commit d9e881001e
5 changed files with 142 additions and 28 deletions

View File

@ -2,6 +2,7 @@ import re
from contextlib import contextmanager
from dataclasses import dataclass
from time import sleep
from typing import Optional
import allure
from neofs_testlib.shell import Shell
@ -23,16 +24,18 @@ LOAD_RESULTS_PATTERNS = {
@dataclass
class LoadParams:
obj_size: int
containers_count: int
out_file: str
obj_count: int
writers: int
readers: int
deleters: int
load_time: int
load_type: str
endpoint: str
writers: Optional[int] = None
readers: Optional[int] = None
deleters: Optional[int] = None
clients: Optional[int] = None
containers_count: Optional[int] = None
out_file: Optional[str] = None
load_time: Optional[int] = None
obj_count: Optional[int] = None
obj_size: Optional[int] = None
registry_file: Optional[str] = None
@dataclass
@ -97,19 +100,36 @@ class K6:
else:
raise AssertionError("Wrong K6 load type")
@allure.step("Generate K6 command")
def _generate_env_variables(self, load_params: LoadParams, k6_dir: str) -> str:
env_vars = {
"DURATION": load_params.load_time or None,
"WRITE_OBJ_SIZE": load_params.obj_size or None,
"WRITERS": load_params.writers or 0,
"READERS": load_params.readers or 0,
"DELETERS": load_params.deleters or 0,
"REGISTRY_FILE": load_params.registry_file or None,
"CLIENTS": load_params.clients or None,
f"{self.load_params.load_type.upper()}_ENDPOINTS": self.load_params.endpoint,
"PREGEN_JSON": f"{self.k6_dir}/{self.load_params.load_type}_{self.load_params.out_file}"
if load_params.out_file
else None,
}
allure.attach(
"\n".join(f"{param}: {value}" for param, value in env_vars.items()),
"K6 ENV variables",
allure.attachment_type.TEXT,
)
return " ".join(
[f"-e {param}={value}" for param, value in env_vars.items() if value is not None]
)
@allure.step("Start K6 on initiator")
def start(self) -> None:
self._k6_dir = self.k6_dir
command = (
f"{self.k6_dir}/k6 run "
f"-e DURATION={self.load_params.load_time} "
f"-e WRITE_OBJ_SIZE={self.load_params.obj_size} "
f"-e WRITERS={self.load_params.writers} -e READERS={self.load_params.readers} "
f"-e DELETERS={self.load_params.deleters} "
f"-e {self.load_params.load_type.upper()}_ENDPOINTS={self.load_params.endpoint} "
f"-e PREGEN_JSON={self.k6_dir}/"
f"{self.load_params.load_type}_{self.load_params.out_file} "
f"{self.k6_dir}/k6 run {self._generate_env_variables(self.load_params, self.k6_dir)} "
f"{self.k6_dir}/scenarios/{self.load_params.load_type}.js"
)
self._k6_process = RemoteProcess.create(command, self.shell)

View File

@ -86,15 +86,18 @@ def prepare_objects(k6_instance: K6):
@allure.title("Prepare K6 instances and objects")
def prepare_k6_instances(load_nodes: list, login: str, pkey: str, load_params: LoadParams) -> list:
def prepare_k6_instances(
load_nodes: list, login: str, pkey: str, load_params: LoadParams, prepare: bool = True
) -> list:
k6_load_objects = []
for load_node in load_nodes:
ssh_client = SSHShell(host=load_node, login=login, private_key_path=pkey)
k6_load_object = K6(load_params, ssh_client)
k6_load_objects.append(k6_load_object)
for k6_load_object in k6_load_objects:
with allure.step("Prepare objects"):
prepare_objects(k6_load_object)
if prepare:
with allure.step("Prepare objects"):
prepare_objects(k6_load_object)
return k6_load_objects

View File

@ -9,8 +9,24 @@ import allure
import pytest
import yaml
from binary_version_helper import get_local_binaries_versions, get_remote_binaries_versions
from common import ASSETS_DIR, FREE_STORAGE, HOSTING_CONFIG_FILE, NEOFS_NETMAP_DICT, WALLET_PASS
from common import (
ASSETS_DIR,
BACKGROUND_LOAD_MAX_TIME,
BACKGROUND_OBJ_SIZE,
BACKGROUND_READERS_COUNT,
BACKGROUND_WRITERS_COUNT,
FREE_STORAGE,
HOSTING_CONFIG_FILE,
LOAD_NODE_SSH_PRIVATE_KEY_PATH,
LOAD_NODE_SSH_USER,
LOAD_NODES,
NEOFS_NETMAP_DICT,
STORAGE_NODE_SERVICE_NAME_REGEX,
WALLET_PASS,
)
from env_properties import save_env_properties
from k6 import LoadParams
from load import get_services_endpoints, prepare_k6_instances
from neofs_testlib.hosting import Hosting
from neofs_testlib.reporter import AllureHandler, get_reporter
from neofs_testlib.shell import LocalShell, Shell
@ -138,6 +154,71 @@ def run_health_check(collect_logs, hosting: Hosting):
raise AssertionError(f"Nodes {failed_nodes} are not healthy")
@pytest.fixture(scope="session")
def background_grpc_load(client_shell, prepare_wallet_and_deposit):
registry_file = os.path.join("/tmp/", f"{str(uuid.uuid4())}.bolt")
prepare_file = os.path.join("/tmp/", f"{str(uuid.uuid4())}.json")
allure.dynamic.title(
f"Start background load with parameters: "
f"writers = {BACKGROUND_WRITERS_COUNT}, "
f"obj_size = {BACKGROUND_OBJ_SIZE}, "
f"load_time = {BACKGROUND_LOAD_MAX_TIME}"
f"prepare_json = {prepare_file}"
)
with allure.step("Get endpoints"):
endpoints_list = get_services_endpoints(
hosting=hosting,
service_name_regex=STORAGE_NODE_SERVICE_NAME_REGEX,
endpoint_attribute="rpc_endpoint",
)
endpoints = ",".join(endpoints_list)
load_params = LoadParams(
endpoint=endpoints,
obj_size=BACKGROUND_OBJ_SIZE,
registry_file=registry_file,
containers_count=1,
obj_count=0,
out_file=prepare_file,
readers=0,
writers=BACKGROUND_WRITERS_COUNT,
deleters=0,
load_time=BACKGROUND_LOAD_MAX_TIME,
load_type="grpc",
)
k6_load_instances = prepare_k6_instances(
load_nodes=LOAD_NODES,
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
load_params=load_params,
)
with allure.step("Run background load"):
for k6_load_instance in k6_load_instances:
k6_load_instance.start()
yield
with allure.step("Stop background load"):
for k6_load_instance in k6_load_instances:
k6_load_instance.stop()
with allure.step("Verify background load data"):
verify_params = LoadParams(
endpoint=endpoints,
clients=BACKGROUND_READERS_COUNT,
registry_file=registry_file,
load_time=BACKGROUND_LOAD_MAX_TIME,
load_type="verify",
)
k6_verify_instances = prepare_k6_instances(
load_nodes=LOAD_NODES,
login=LOAD_NODE_SSH_USER,
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
load_params=verify_params,
prepare=False,
)
with allure.step("Run verify background load data"):
for k6_verify_instance in k6_verify_instances:
k6_verify_instance.start()
k6_verify_instance.wait_until_finished(BACKGROUND_LOAD_MAX_TIME)
@pytest.fixture(scope="session")
@allure.title("Prepare wallet and deposit")
def prepare_wallet_and_deposit(client_shell, prepare_tmp_dir):

View File

@ -89,6 +89,18 @@ def return_nodes_after_test_run(client_shell: Shell, hosting: Hosting):
return_nodes(client_shell, hosting)
@allure.step("Tick epoch with retries")
def tick_epoch_with_retries(shell: Shell, attempts: int = 3, timeout: int = 3):
for __attempt in range(attempts):
try:
tick_epoch(shell=shell)
except RuntimeError:
sleep(timeout)
continue
return
raise
@allure.step("Return node to cluster")
def return_nodes(shell: Shell, hosting: Hosting, alive_node: Optional[str] = None) -> None:
for node in list(check_nodes):
@ -107,13 +119,7 @@ def return_nodes(shell: Shell, hosting: Hosting, alive_node: Optional[str] = Non
check_nodes.remove(node)
sleep(parse_time(MORPH_BLOCK_TIME))
for __attempt in range(3):
try:
tick_epoch(shell=shell)
break
except RuntimeError:
sleep(3)
tick_epoch_with_retries(attempts=3)
check_node_in_map(node, shell=shell, alive_node=alive_node)

View File

@ -36,8 +36,12 @@ WALLET_PASS = os.getenv("WALLET_PASS", "")
# Load node parameters
LOAD_NODES = os.getenv("LOAD_NODES", "").split(",")
LOAD_NODE_SSH_USER = os.getenv("LOAD_NODE_SSH_USER")
LOAD_NODE_SSH_USER = os.getenv("LOAD_NODE_SSH_USER", "root")
LOAD_NODE_SSH_PRIVATE_KEY_PATH = os.getenv("LOAD_NODE_SSH_PRIVATE_KEY_PATH")
BACKGROUND_WRITERS_COUNT = os.getenv("BACKGROUND_WRITERS_COUNT", 10)
BACKGROUND_READERS_COUNT = os.getenv("BACKGROUND_READERS_COUNT", 10)
BACKGROUND_OBJ_SIZE = os.getenv("BACKGROUND_OBJ_SIZE", 1024)
BACKGROUND_LOAD_MAX_TIME = os.getenv("BACKGROUND_LOAD_MAX_TIME", 600)
# Configuration of storage nodes
# TODO: we should use hosting instead of all these variables