From c997e231942682d751692b54348dfb70064183fb Mon Sep 17 00:00:00 2001 From: Andrey Berezin Date: Tue, 14 Mar 2023 12:21:40 +0300 Subject: [PATCH] Updates for testcases Signed-off-by: Andrey Berezin --- pytest_tests/testsuites/conftest.py | 72 +++++-------------- .../failovers/test_failover_storage.py | 2 +- .../services/http_gate/test_http_object.py | 1 + pytest_tests/testsuites/special/test_logs.py | 47 ++++++++++++ 4 files changed, 65 insertions(+), 57 deletions(-) create mode 100644 pytest_tests/testsuites/special/test_logs.py diff --git a/pytest_tests/testsuites/conftest.py b/pytest_tests/testsuites/conftest.py index cf372f02..2ff85f41 100644 --- a/pytest_tests/testsuites/conftest.py +++ b/pytest_tests/testsuites/conftest.py @@ -1,6 +1,5 @@ import logging import os -import re import shutil import uuid from datetime import datetime @@ -44,11 +43,20 @@ from pytest_tests.steps.load import get_services_endpoints, prepare_k6_instances logger = logging.getLogger("NeoLogger") +# Add logs check test even if it's not fit to mark selectors +def pytest_configure(config: pytest.Config): + markers = config.option.markexpr + if markers != "": + config.option.markexpr = f"logs_after_session or ({markers})" + + +# pytest hook. Do not rename def pytest_collection_modifyitems(items): - # Make network tests last based on @pytest.mark.node_mgmt + # Make network tests last based on @pytest.mark.node_mgmt and logs_test to be latest def priority(item: pytest.Item) -> int: - is_node_mgmt_test = item.get_closest_marker("node_mgmt") - return 0 if not is_node_mgmt_test else 1 + is_node_mgmt_test = 1 if item.get_closest_marker("node_mgmt") else 0 + is_logs_check_test = 100 if item.get_closest_marker("logs_after_session") else 0 + return is_node_mgmt_test + is_logs_check_test items.sort(key=lambda item: priority(item)) @@ -145,23 +153,16 @@ def temp_directory(): shutil.rmtree(full_path) +@allure.step("[Autouse/Session] Test session start time") @pytest.fixture(scope="session", autouse=True) -@allure.title("Collect logs") -def collect_logs(temp_directory, hosting: Hosting): +def session_start_time(): start_time = datetime.utcnow() - yield - end_time = datetime.utcnow() - - # Dump logs to temp directory (because they might be too large to keep in RAM) - logs_dir = os.path.join(temp_directory, "logs") - dump_logs(hosting, logs_dir, start_time, end_time) - attach_logs(logs_dir) - check_logs(logs_dir) + return start_time @pytest.fixture(scope="session", autouse=True) @allure.title("Run health check for all storage nodes") -def run_health_check(collect_logs, cluster: Cluster): +def run_health_check(session_start_time, cluster: Cluster): failed_nodes = [] for node in cluster.storage_nodes: health_check = storage_node_healthcheck(node) @@ -263,44 +264,3 @@ def default_wallet(client_shell: Shell, temp_directory: str, cluster: Cluster): ) return wallet_path - - -@allure.title("Check logs for OOM and PANIC entries in {logs_dir}") -def check_logs(logs_dir: str): - problem_pattern = r"\Wpanic\W|\Woom\W|\Wtoo many open files\W" - - log_file_paths = [] - for directory_path, _, file_names in os.walk(logs_dir): - log_file_paths += [ - os.path.join(directory_path, file_name) - for file_name in file_names - if re.match(r"\.(txt|log)", os.path.splitext(file_name)[-1], flags=re.IGNORECASE) - ] - - logs_with_problem = [] - for file_path in log_file_paths: - with allure.step(f"Check log file {file_path}"): - with open(file_path, "r") as log_file: - if re.search(problem_pattern, log_file.read(), flags=re.IGNORECASE): - logs_with_problem.append(file_path) - if logs_with_problem: - raise pytest.fail(f"System logs {', '.join(logs_with_problem)} contain critical errors") - - -def dump_logs(hosting: Hosting, logs_dir: str, since: datetime, until: datetime) -> None: - # Dump logs to temp directory (because they might be too large to keep in RAM) - os.makedirs(logs_dir) - - for host in hosting.hosts: - with allure.step(f"Dump logs from host {host.config.address}"): - try: - host.dump_logs(logs_dir, since=since, until=until) - except Exception as ex: - logger.warning(f"Exception during logs collection: {ex}") - - -def attach_logs(logs_dir: str) -> None: - # Zip all files and attach to Allure because it is more convenient to download a single - # zip with all logs rather than mess with individual logs files per service or node - logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir) - allure.attach.file(logs_zip_file_path, name="logs.zip", extension="zip") diff --git a/pytest_tests/testsuites/failovers/test_failover_storage.py b/pytest_tests/testsuites/failovers/test_failover_storage.py index d906a6d7..167d1b9d 100644 --- a/pytest_tests/testsuites/failovers/test_failover_storage.py +++ b/pytest_tests/testsuites/failovers/test_failover_storage.py @@ -20,8 +20,8 @@ logger = logging.getLogger("NeoLogger") stopped_nodes: list[StorageNode] = [] -@pytest.fixture(scope="function", autouse=True) @allure.step("Return all stopped hosts") +@pytest.fixture(scope="function", autouse=True) def after_run_return_all_stopped_hosts(cluster: Cluster): yield return_stopped_hosts(cluster) diff --git a/pytest_tests/testsuites/services/http_gate/test_http_object.py b/pytest_tests/testsuites/services/http_gate/test_http_object.py index 80eddf2c..a3f2bea2 100644 --- a/pytest_tests/testsuites/services/http_gate/test_http_object.py +++ b/pytest_tests/testsuites/services/http_gate/test_http_object.py @@ -51,6 +51,7 @@ class Test_http_object(ClusterTestBase): Expected result: Hashes must be the same. """ + with allure.step("Create public container"): cid = create_container( self.wallet, diff --git a/pytest_tests/testsuites/special/test_logs.py b/pytest_tests/testsuites/special/test_logs.py new file mode 100644 index 00000000..b2f33d97 --- /dev/null +++ b/pytest_tests/testsuites/special/test_logs.py @@ -0,0 +1,47 @@ +import os +import shutil +from datetime import datetime + +import allure +import pytest + +from pytest_tests.steps.cluster_test_base import ClusterTestBase + + +class TestLogs(ClusterTestBase): + @pytest.mark.logs_after_session + def test_logs_after_session(self, temp_directory: str, session_start_time: datetime): + """ + This test automatically added to any test run to check logs from cluster for critical errors. + + """ + + end_time = datetime.utcnow() + logs_dir = os.path.join(temp_directory, "logs") + os.makedirs(logs_dir) + issues_regex = r"\Wpanic\W|\Woom\W|\Wtoo many open files\W" + + hosts_with_problems = [] + for host in self.cluster.hosts: + with allure.step(f"Check logs on {host.config.address}"): + if host.is_message_in_logs(issues_regex, session_start_time, end_time): + hosts_with_problems.append(host.config.address) + host.dump_logs( + logs_dir, + since=session_start_time, + until=end_time, + filter_regex=issues_regex, + ) + + if hosts_with_problems: + self._attach_logs(logs_dir) + + assert ( + not hosts_with_problems + ), f"The following hosts contains contain critical errors in system logs: {', '.join(hosts_with_problems)}" + + def _attach_logs(self, logs_dir: str) -> None: + # Zip all files and attach to Allure because it is more convenient to download a single + # zip with all logs rather than mess with individual logs files per service or node + logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir) + allure.attach.file(logs_zip_file_path, name="logs.zip", extension="zip")