forked from TrueCloudLab/frostfs-testcases
Updates for testcases
Signed-off-by: Andrey Berezin <a.berezin@yadro.com>
This commit is contained in:
parent
cff0e0f23e
commit
c997e23194
4 changed files with 65 additions and 57 deletions
|
@ -1,6 +1,5 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -44,11 +43,20 @@ from pytest_tests.steps.load import get_services_endpoints, prepare_k6_instances
|
||||||
logger = logging.getLogger("NeoLogger")
|
logger = logging.getLogger("NeoLogger")
|
||||||
|
|
||||||
|
|
||||||
|
# Add logs check test even if it's not fit to mark selectors
|
||||||
|
def pytest_configure(config: pytest.Config):
|
||||||
|
markers = config.option.markexpr
|
||||||
|
if markers != "":
|
||||||
|
config.option.markexpr = f"logs_after_session or ({markers})"
|
||||||
|
|
||||||
|
|
||||||
|
# pytest hook. Do not rename
|
||||||
def pytest_collection_modifyitems(items):
|
def pytest_collection_modifyitems(items):
|
||||||
# Make network tests last based on @pytest.mark.node_mgmt
|
# Make network tests last based on @pytest.mark.node_mgmt and logs_test to be latest
|
||||||
def priority(item: pytest.Item) -> int:
|
def priority(item: pytest.Item) -> int:
|
||||||
is_node_mgmt_test = item.get_closest_marker("node_mgmt")
|
is_node_mgmt_test = 1 if item.get_closest_marker("node_mgmt") else 0
|
||||||
return 0 if not is_node_mgmt_test else 1
|
is_logs_check_test = 100 if item.get_closest_marker("logs_after_session") else 0
|
||||||
|
return is_node_mgmt_test + is_logs_check_test
|
||||||
|
|
||||||
items.sort(key=lambda item: priority(item))
|
items.sort(key=lambda item: priority(item))
|
||||||
|
|
||||||
|
@ -145,23 +153,16 @@ def temp_directory():
|
||||||
shutil.rmtree(full_path)
|
shutil.rmtree(full_path)
|
||||||
|
|
||||||
|
|
||||||
|
@allure.step("[Autouse/Session] Test session start time")
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
@allure.title("Collect logs")
|
def session_start_time():
|
||||||
def collect_logs(temp_directory, hosting: Hosting):
|
|
||||||
start_time = datetime.utcnow()
|
start_time = datetime.utcnow()
|
||||||
yield
|
return start_time
|
||||||
end_time = datetime.utcnow()
|
|
||||||
|
|
||||||
# Dump logs to temp directory (because they might be too large to keep in RAM)
|
|
||||||
logs_dir = os.path.join(temp_directory, "logs")
|
|
||||||
dump_logs(hosting, logs_dir, start_time, end_time)
|
|
||||||
attach_logs(logs_dir)
|
|
||||||
check_logs(logs_dir)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
@allure.title("Run health check for all storage nodes")
|
@allure.title("Run health check for all storage nodes")
|
||||||
def run_health_check(collect_logs, cluster: Cluster):
|
def run_health_check(session_start_time, cluster: Cluster):
|
||||||
failed_nodes = []
|
failed_nodes = []
|
||||||
for node in cluster.storage_nodes:
|
for node in cluster.storage_nodes:
|
||||||
health_check = storage_node_healthcheck(node)
|
health_check = storage_node_healthcheck(node)
|
||||||
|
@ -263,44 +264,3 @@ def default_wallet(client_shell: Shell, temp_directory: str, cluster: Cluster):
|
||||||
)
|
)
|
||||||
|
|
||||||
return wallet_path
|
return wallet_path
|
||||||
|
|
||||||
|
|
||||||
@allure.title("Check logs for OOM and PANIC entries in {logs_dir}")
|
|
||||||
def check_logs(logs_dir: str):
|
|
||||||
problem_pattern = r"\Wpanic\W|\Woom\W|\Wtoo many open files\W"
|
|
||||||
|
|
||||||
log_file_paths = []
|
|
||||||
for directory_path, _, file_names in os.walk(logs_dir):
|
|
||||||
log_file_paths += [
|
|
||||||
os.path.join(directory_path, file_name)
|
|
||||||
for file_name in file_names
|
|
||||||
if re.match(r"\.(txt|log)", os.path.splitext(file_name)[-1], flags=re.IGNORECASE)
|
|
||||||
]
|
|
||||||
|
|
||||||
logs_with_problem = []
|
|
||||||
for file_path in log_file_paths:
|
|
||||||
with allure.step(f"Check log file {file_path}"):
|
|
||||||
with open(file_path, "r") as log_file:
|
|
||||||
if re.search(problem_pattern, log_file.read(), flags=re.IGNORECASE):
|
|
||||||
logs_with_problem.append(file_path)
|
|
||||||
if logs_with_problem:
|
|
||||||
raise pytest.fail(f"System logs {', '.join(logs_with_problem)} contain critical errors")
|
|
||||||
|
|
||||||
|
|
||||||
def dump_logs(hosting: Hosting, logs_dir: str, since: datetime, until: datetime) -> None:
|
|
||||||
# Dump logs to temp directory (because they might be too large to keep in RAM)
|
|
||||||
os.makedirs(logs_dir)
|
|
||||||
|
|
||||||
for host in hosting.hosts:
|
|
||||||
with allure.step(f"Dump logs from host {host.config.address}"):
|
|
||||||
try:
|
|
||||||
host.dump_logs(logs_dir, since=since, until=until)
|
|
||||||
except Exception as ex:
|
|
||||||
logger.warning(f"Exception during logs collection: {ex}")
|
|
||||||
|
|
||||||
|
|
||||||
def attach_logs(logs_dir: str) -> None:
|
|
||||||
# Zip all files and attach to Allure because it is more convenient to download a single
|
|
||||||
# zip with all logs rather than mess with individual logs files per service or node
|
|
||||||
logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir)
|
|
||||||
allure.attach.file(logs_zip_file_path, name="logs.zip", extension="zip")
|
|
||||||
|
|
|
@ -20,8 +20,8 @@ logger = logging.getLogger("NeoLogger")
|
||||||
stopped_nodes: list[StorageNode] = []
|
stopped_nodes: list[StorageNode] = []
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function", autouse=True)
|
|
||||||
@allure.step("Return all stopped hosts")
|
@allure.step("Return all stopped hosts")
|
||||||
|
@pytest.fixture(scope="function", autouse=True)
|
||||||
def after_run_return_all_stopped_hosts(cluster: Cluster):
|
def after_run_return_all_stopped_hosts(cluster: Cluster):
|
||||||
yield
|
yield
|
||||||
return_stopped_hosts(cluster)
|
return_stopped_hosts(cluster)
|
||||||
|
|
|
@ -51,6 +51,7 @@ class Test_http_object(ClusterTestBase):
|
||||||
Expected result:
|
Expected result:
|
||||||
Hashes must be the same.
|
Hashes must be the same.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with allure.step("Create public container"):
|
with allure.step("Create public container"):
|
||||||
cid = create_container(
|
cid = create_container(
|
||||||
self.wallet,
|
self.wallet,
|
||||||
|
|
47
pytest_tests/testsuites/special/test_logs.py
Normal file
47
pytest_tests/testsuites/special/test_logs.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import allure
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from pytest_tests.steps.cluster_test_base import ClusterTestBase
|
||||||
|
|
||||||
|
|
||||||
|
class TestLogs(ClusterTestBase):
|
||||||
|
@pytest.mark.logs_after_session
|
||||||
|
def test_logs_after_session(self, temp_directory: str, session_start_time: datetime):
|
||||||
|
"""
|
||||||
|
This test automatically added to any test run to check logs from cluster for critical errors.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
end_time = datetime.utcnow()
|
||||||
|
logs_dir = os.path.join(temp_directory, "logs")
|
||||||
|
os.makedirs(logs_dir)
|
||||||
|
issues_regex = r"\Wpanic\W|\Woom\W|\Wtoo many open files\W"
|
||||||
|
|
||||||
|
hosts_with_problems = []
|
||||||
|
for host in self.cluster.hosts:
|
||||||
|
with allure.step(f"Check logs on {host.config.address}"):
|
||||||
|
if host.is_message_in_logs(issues_regex, session_start_time, end_time):
|
||||||
|
hosts_with_problems.append(host.config.address)
|
||||||
|
host.dump_logs(
|
||||||
|
logs_dir,
|
||||||
|
since=session_start_time,
|
||||||
|
until=end_time,
|
||||||
|
filter_regex=issues_regex,
|
||||||
|
)
|
||||||
|
|
||||||
|
if hosts_with_problems:
|
||||||
|
self._attach_logs(logs_dir)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
not hosts_with_problems
|
||||||
|
), f"The following hosts contains contain critical errors in system logs: {', '.join(hosts_with_problems)}"
|
||||||
|
|
||||||
|
def _attach_logs(self, logs_dir: str) -> None:
|
||||||
|
# Zip all files and attach to Allure because it is more convenient to download a single
|
||||||
|
# zip with all logs rather than mess with individual logs files per service or node
|
||||||
|
logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir)
|
||||||
|
allure.attach.file(logs_zip_file_path, name="logs.zip", extension="zip")
|
Loading…
Reference in a new issue