2022-07-05 07:18:37 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2022-11-15 08:39:07 +00:00
|
|
|
import re
|
2022-07-05 07:18:37 +00:00
|
|
|
import shutil
|
2022-10-27 05:43:20 +00:00
|
|
|
import uuid
|
2022-08-26 19:58:56 +00:00
|
|
|
from datetime import datetime
|
2022-07-05 07:18:37 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
import allure
|
2022-07-05 07:18:37 +00:00
|
|
|
import pytest
|
2022-10-09 20:01:59 +00:00
|
|
|
import yaml
|
|
|
|
from binary_version_helper import get_local_binaries_versions, get_remote_binaries_versions
|
2022-11-28 19:27:37 +00:00
|
|
|
from common import (
|
|
|
|
ASSETS_DIR,
|
|
|
|
BACKGROUND_LOAD_MAX_TIME,
|
|
|
|
BACKGROUND_OBJ_SIZE,
|
|
|
|
BACKGROUND_READERS_COUNT,
|
|
|
|
BACKGROUND_WRITERS_COUNT,
|
|
|
|
FREE_STORAGE,
|
|
|
|
HOSTING_CONFIG_FILE,
|
|
|
|
LOAD_NODE_SSH_PRIVATE_KEY_PATH,
|
|
|
|
LOAD_NODE_SSH_USER,
|
|
|
|
LOAD_NODES,
|
|
|
|
NEOFS_NETMAP_DICT,
|
|
|
|
STORAGE_NODE_SERVICE_NAME_REGEX,
|
|
|
|
WALLET_PASS,
|
|
|
|
)
|
2022-08-08 16:51:28 +00:00
|
|
|
from env_properties import save_env_properties
|
2022-11-28 19:27:37 +00:00
|
|
|
from k6 import LoadParams
|
|
|
|
from load import get_services_endpoints, prepare_k6_instances
|
2022-10-09 20:01:59 +00:00
|
|
|
from neofs_testlib.hosting import Hosting
|
2022-10-13 16:13:45 +00:00
|
|
|
from neofs_testlib.reporter import AllureHandler, get_reporter
|
2022-10-13 18:53:44 +00:00
|
|
|
from neofs_testlib.shell import LocalShell, Shell
|
2022-10-27 05:43:20 +00:00
|
|
|
from neofs_testlib.utils.wallet import init_wallet
|
|
|
|
from payment_neogo import deposit_gas, transfer_gas
|
2022-11-17 13:38:45 +00:00
|
|
|
from pytest import FixtureRequest
|
2022-08-04 21:03:06 +00:00
|
|
|
from python_keywords.node_management import node_healthcheck
|
2022-11-10 14:56:25 +00:00
|
|
|
from wallet import WalletFactory
|
2022-09-05 09:35:46 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger("NeoLogger")
|
2022-07-05 07:18:37 +00:00
|
|
|
|
2022-11-07 07:10:11 +00:00
|
|
|
|
2022-11-03 13:17:08 +00:00
|
|
|
def pytest_collection_modifyitems(items):
|
|
|
|
# Make network tests last based on @pytest.mark.node_mgmt
|
|
|
|
def priority(item: pytest.Item) -> int:
|
|
|
|
is_node_mgmt_test = item.get_closest_marker("node_mgmt")
|
|
|
|
return 0 if not is_node_mgmt_test else 1
|
|
|
|
|
|
|
|
items.sort(key=lambda item: priority(item))
|
2022-07-05 07:18:37 +00:00
|
|
|
|
2022-11-07 07:10:11 +00:00
|
|
|
|
2022-10-13 18:53:44 +00:00
|
|
|
@pytest.fixture(scope="session")
|
2022-10-13 16:13:45 +00:00
|
|
|
def configure_testlib():
|
|
|
|
get_reporter().register_handler(AllureHandler())
|
|
|
|
yield
|
2022-10-13 18:53:44 +00:00
|
|
|
|
|
|
|
|
2022-09-05 09:35:46 +00:00
|
|
|
@pytest.fixture(scope="session")
|
2022-10-13 16:13:45 +00:00
|
|
|
def client_shell(configure_testlib) -> Shell:
|
|
|
|
yield LocalShell()
|
2022-08-01 06:16:36 +00:00
|
|
|
|
|
|
|
|
2022-10-09 20:01:59 +00:00
|
|
|
@pytest.fixture(scope="session")
|
2022-10-13 16:13:45 +00:00
|
|
|
def hosting(configure_testlib) -> Hosting:
|
2022-10-09 20:01:59 +00:00
|
|
|
with open(HOSTING_CONFIG_FILE, "r") as file:
|
|
|
|
hosting_config = yaml.full_load(file)
|
2022-07-26 07:28:13 +00:00
|
|
|
|
2022-10-09 20:01:59 +00:00
|
|
|
hosting_instance = Hosting()
|
|
|
|
hosting_instance.configure(hosting_config)
|
|
|
|
yield hosting_instance
|
2022-07-05 07:18:37 +00:00
|
|
|
|
|
|
|
|
2022-10-13 16:13:45 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def require_multiple_hosts(hosting: Hosting):
|
2022-10-14 16:56:15 +00:00
|
|
|
"""Designates tests that require environment with multiple hosts.
|
|
|
|
|
|
|
|
These tests will be skipped on an environment that has only 1 host.
|
|
|
|
"""
|
2022-10-13 16:13:45 +00:00
|
|
|
if len(hosting.hosts) <= 1:
|
|
|
|
pytest.skip("Test only works with multiple hosts")
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
2022-11-10 14:56:25 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def wallet_factory(prepare_tmp_dir: str, client_shell: Shell) -> WalletFactory:
|
|
|
|
return WalletFactory(prepare_tmp_dir, client_shell)
|
|
|
|
|
|
|
|
|
2022-10-09 20:01:59 +00:00
|
|
|
@pytest.fixture(scope="session", autouse=True)
|
|
|
|
@allure.title("Check binary versions")
|
|
|
|
def check_binary_versions(request, hosting: Hosting, client_shell: Shell):
|
|
|
|
local_versions = get_local_binaries_versions(client_shell)
|
|
|
|
remote_versions = get_remote_binaries_versions(hosting)
|
2022-07-05 07:18:37 +00:00
|
|
|
|
2022-10-09 20:01:59 +00:00
|
|
|
all_versions = {**local_versions, **remote_versions}
|
|
|
|
save_env_properties(request.config, all_versions)
|
2022-08-03 15:20:50 +00:00
|
|
|
|
|
|
|
|
2022-09-05 09:35:46 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
@allure.title("Prepare tmp directory")
|
2022-08-29 20:42:34 +00:00
|
|
|
def prepare_tmp_dir():
|
2022-11-10 14:56:25 +00:00
|
|
|
with allure.step("Prepare tmp directory"):
|
|
|
|
full_path = os.path.join(os.getcwd(), ASSETS_DIR)
|
|
|
|
shutil.rmtree(full_path, ignore_errors=True)
|
|
|
|
os.mkdir(full_path)
|
|
|
|
|
2022-08-29 20:42:34 +00:00
|
|
|
yield full_path
|
2022-11-10 14:56:25 +00:00
|
|
|
|
|
|
|
with allure.step("Remove tmp directory"):
|
|
|
|
shutil.rmtree(full_path)
|
2022-08-29 20:42:34 +00:00
|
|
|
|
|
|
|
|
2022-11-17 13:38:45 +00:00
|
|
|
@pytest.fixture(scope="function", autouse=True)
|
|
|
|
@allure.title("Analyze logs")
|
|
|
|
def analyze_logs(prepare_tmp_dir: str, hosting: Hosting, request: FixtureRequest):
|
|
|
|
start_time = datetime.utcnow()
|
|
|
|
yield
|
|
|
|
end_time = datetime.utcnow()
|
|
|
|
|
|
|
|
# Skip tests where we expect failures in logs
|
|
|
|
if request.node.get_closest_marker("no_log_analyze"):
|
|
|
|
with allure.step("Skip analyze logs due to no_log_analyze mark"):
|
|
|
|
return
|
|
|
|
|
2022-11-18 11:24:40 +00:00
|
|
|
# Test name may exceed os NAME_MAX (255 bytes), so we use test start datetime instead
|
|
|
|
start_time_str = start_time.strftime("%Y_%m_%d_%H_%M_%S_%f")
|
|
|
|
logs_dir = os.path.join(prepare_tmp_dir, f"logs_{start_time_str}")
|
2022-11-17 13:38:45 +00:00
|
|
|
dump_logs(hosting, logs_dir, start_time, end_time)
|
|
|
|
check_logs(logs_dir)
|
|
|
|
|
|
|
|
|
2022-08-29 20:42:34 +00:00
|
|
|
@pytest.fixture(scope="session", autouse=True)
|
|
|
|
@allure.title("Collect logs")
|
2022-10-09 20:01:59 +00:00
|
|
|
def collect_logs(prepare_tmp_dir, hosting: Hosting):
|
2022-08-26 19:58:56 +00:00
|
|
|
start_time = datetime.utcnow()
|
|
|
|
yield
|
|
|
|
end_time = datetime.utcnow()
|
|
|
|
|
2022-08-29 20:42:34 +00:00
|
|
|
# Dump logs to temp directory (because they might be too large to keep in RAM)
|
|
|
|
logs_dir = os.path.join(prepare_tmp_dir, "logs")
|
2022-11-17 13:38:45 +00:00
|
|
|
dump_logs(hosting, logs_dir, start_time, end_time)
|
|
|
|
attach_logs(logs_dir)
|
2022-11-15 08:39:07 +00:00
|
|
|
|
2022-08-26 19:58:56 +00:00
|
|
|
|
2022-09-05 09:35:46 +00:00
|
|
|
@pytest.fixture(scope="session", autouse=True)
|
|
|
|
@allure.title("Run health check for all storage nodes")
|
2022-10-09 20:01:59 +00:00
|
|
|
def run_health_check(collect_logs, hosting: Hosting):
|
2022-07-20 14:55:25 +00:00
|
|
|
failed_nodes = []
|
|
|
|
for node_name in NEOFS_NETMAP_DICT.keys():
|
2022-10-09 20:01:59 +00:00
|
|
|
health_check = node_healthcheck(hosting, node_name)
|
2022-09-20 15:03:52 +00:00
|
|
|
if health_check.health_status != "READY" or health_check.network_status != "ONLINE":
|
2022-07-20 14:55:25 +00:00
|
|
|
failed_nodes.append(node_name)
|
|
|
|
|
|
|
|
if failed_nodes:
|
2022-09-05 09:35:46 +00:00
|
|
|
raise AssertionError(f"Nodes {failed_nodes} are not healthy")
|
2022-07-20 14:55:25 +00:00
|
|
|
|
|
|
|
|
2022-11-28 19:27:37 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def background_grpc_load(client_shell, prepare_wallet_and_deposit):
|
|
|
|
registry_file = os.path.join("/tmp/", f"{str(uuid.uuid4())}.bolt")
|
|
|
|
prepare_file = os.path.join("/tmp/", f"{str(uuid.uuid4())}.json")
|
|
|
|
allure.dynamic.title(
|
|
|
|
f"Start background load with parameters: "
|
|
|
|
f"writers = {BACKGROUND_WRITERS_COUNT}, "
|
|
|
|
f"obj_size = {BACKGROUND_OBJ_SIZE}, "
|
|
|
|
f"load_time = {BACKGROUND_LOAD_MAX_TIME}"
|
|
|
|
f"prepare_json = {prepare_file}"
|
|
|
|
)
|
|
|
|
with allure.step("Get endpoints"):
|
|
|
|
endpoints_list = get_services_endpoints(
|
|
|
|
hosting=hosting,
|
|
|
|
service_name_regex=STORAGE_NODE_SERVICE_NAME_REGEX,
|
|
|
|
endpoint_attribute="rpc_endpoint",
|
|
|
|
)
|
|
|
|
endpoints = ",".join(endpoints_list)
|
|
|
|
load_params = LoadParams(
|
|
|
|
endpoint=endpoints,
|
|
|
|
obj_size=BACKGROUND_OBJ_SIZE,
|
|
|
|
registry_file=registry_file,
|
|
|
|
containers_count=1,
|
|
|
|
obj_count=0,
|
|
|
|
out_file=prepare_file,
|
|
|
|
readers=0,
|
|
|
|
writers=BACKGROUND_WRITERS_COUNT,
|
|
|
|
deleters=0,
|
|
|
|
load_time=BACKGROUND_LOAD_MAX_TIME,
|
|
|
|
load_type="grpc",
|
|
|
|
)
|
|
|
|
k6_load_instances = prepare_k6_instances(
|
|
|
|
load_nodes=LOAD_NODES,
|
|
|
|
login=LOAD_NODE_SSH_USER,
|
|
|
|
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
|
|
|
|
load_params=load_params,
|
|
|
|
)
|
|
|
|
with allure.step("Run background load"):
|
|
|
|
for k6_load_instance in k6_load_instances:
|
|
|
|
k6_load_instance.start()
|
|
|
|
yield
|
|
|
|
with allure.step("Stop background load"):
|
|
|
|
for k6_load_instance in k6_load_instances:
|
|
|
|
k6_load_instance.stop()
|
|
|
|
with allure.step("Verify background load data"):
|
|
|
|
verify_params = LoadParams(
|
|
|
|
endpoint=endpoints,
|
|
|
|
clients=BACKGROUND_READERS_COUNT,
|
|
|
|
registry_file=registry_file,
|
|
|
|
load_time=BACKGROUND_LOAD_MAX_TIME,
|
|
|
|
load_type="verify",
|
|
|
|
)
|
|
|
|
k6_verify_instances = prepare_k6_instances(
|
|
|
|
load_nodes=LOAD_NODES,
|
|
|
|
login=LOAD_NODE_SSH_USER,
|
|
|
|
pkey=LOAD_NODE_SSH_PRIVATE_KEY_PATH,
|
|
|
|
load_params=verify_params,
|
|
|
|
prepare=False,
|
|
|
|
)
|
|
|
|
with allure.step("Run verify background load data"):
|
|
|
|
for k6_verify_instance in k6_verify_instances:
|
|
|
|
k6_verify_instance.start()
|
|
|
|
k6_verify_instance.wait_until_finished(BACKGROUND_LOAD_MAX_TIME)
|
|
|
|
|
|
|
|
|
2022-09-05 09:35:46 +00:00
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
@allure.title("Prepare wallet and deposit")
|
2022-10-27 05:43:20 +00:00
|
|
|
def prepare_wallet_and_deposit(client_shell, prepare_tmp_dir):
|
|
|
|
wallet_path = os.path.join(os.getcwd(), ASSETS_DIR, f"{str(uuid.uuid4())}.json")
|
|
|
|
init_wallet(wallet_path, WALLET_PASS)
|
2022-09-20 15:03:52 +00:00
|
|
|
allure.attach.file(wallet_path, os.path.basename(wallet_path), allure.attachment_type.JSON)
|
2022-07-01 17:30:07 +00:00
|
|
|
|
2022-07-05 10:17:36 +00:00
|
|
|
if not FREE_STORAGE:
|
|
|
|
deposit = 30
|
2022-10-27 05:43:20 +00:00
|
|
|
transfer_gas(
|
|
|
|
shell=client_shell,
|
|
|
|
amount=deposit + 1,
|
|
|
|
wallet_to_path=wallet_path,
|
|
|
|
wallet_to_password=WALLET_PASS,
|
|
|
|
)
|
|
|
|
deposit_gas(
|
|
|
|
shell=client_shell,
|
|
|
|
amount=deposit,
|
|
|
|
wallet_from_path=wallet_path,
|
|
|
|
wallet_from_password=WALLET_PASS,
|
|
|
|
)
|
2022-07-05 07:18:37 +00:00
|
|
|
|
2022-07-26 19:44:05 +00:00
|
|
|
return wallet_path
|
2022-11-17 13:38:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check_logs(logs_dir: str):
|
|
|
|
problem_pattern = r"\Wpanic\W|\Woom\W"
|
|
|
|
|
|
|
|
log_file_paths = []
|
|
|
|
for directory_path, _, file_names in os.walk(logs_dir):
|
|
|
|
log_file_paths += [
|
|
|
|
os.path.join(directory_path, file_name)
|
|
|
|
for file_name in file_names
|
|
|
|
if re.match(r"\.(txt|log)", os.path.splitext(file_name)[-1], flags=re.IGNORECASE)
|
|
|
|
]
|
|
|
|
|
|
|
|
logs_with_problem = []
|
|
|
|
for file_path in log_file_paths:
|
|
|
|
with open(file_path, "r") as log_file:
|
|
|
|
if re.search(problem_pattern, log_file.read(), flags=re.IGNORECASE):
|
|
|
|
attach_logs(logs_dir)
|
|
|
|
logs_with_problem.append(file_path)
|
|
|
|
if logs_with_problem:
|
|
|
|
raise pytest.fail(f"System logs {', '.join(logs_with_problem)} contain critical errors")
|
|
|
|
|
|
|
|
|
|
|
|
def dump_logs(hosting: Hosting, logs_dir: str, since: datetime, until: datetime) -> None:
|
|
|
|
# Dump logs to temp directory (because they might be too large to keep in RAM)
|
|
|
|
os.makedirs(logs_dir)
|
|
|
|
|
|
|
|
for host in hosting.hosts:
|
|
|
|
host.dump_logs(logs_dir, since=since, until=until)
|
|
|
|
|
|
|
|
|
|
|
|
def attach_logs(logs_dir: str) -> None:
|
|
|
|
# Zip all files and attach to Allure because it is more convenient to download a single
|
|
|
|
# zip with all logs rather than mess with individual logs files per service or node
|
|
|
|
logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir)
|
|
|
|
allure.attach.file(logs_zip_file_path, name="logs.zip", extension="zip")
|