frostfs-testcases/pytest_tests/testsuites/special/test_frostfs_logs.py
a.berezin e098f63251 [#216] Enable http tests, since we remove them in plugin
Signed-off-by: a.berezin <a.berezin@yadro.com>
2024-04-16 15:17:49 +03:00

82 lines
2.9 KiB
Python

import os
import shutil
import time
from datetime import datetime
import allure
import pytest
from frostfs_testlib import reporter
from frostfs_testlib.hosting import Host
from frostfs_testlib.testing.cluster_test_base import Cluster
from frostfs_testlib.testing.parallel import parallel
def pytest_generate_tests(metafunc: pytest.Metafunc):
metafunc.fixturenames.append("repo")
metafunc.fixturenames.append("markers")
metafunc.parametrize(
"repo, markers",
[("frostfs-testcases", metafunc.config.option.markexpr)],
)
class TestLogs:
@allure.title("Check logs from frostfs-testcases with marks '{request.config.option.markexpr}'")
@pytest.mark.logs_after_session
@pytest.mark.no_healthcheck
def test_logs_after_session(
self, temp_directory: str, cluster: Cluster, session_start_time: datetime, request: pytest.FixtureRequest
):
"""
This test automatically added to any test run to check logs from cluster for critical errors.
"""
end_time = datetime.utcnow()
logs_dir = os.path.join(temp_directory, "logs")
os.makedirs(logs_dir)
# Using \b here because 'oom' and 'panic' can sometimes be found in OID or CID
issues_regex = r"\bpanic\b|\boom\b|too many|insufficient funds|insufficient amount of gas|wallet passwd|secret \bkey\b|access \bkey\b|cannot assign requested address"
exclude_filter = r"too many requests"
time.sleep(2)
futures = parallel(
self._collect_logs_on_host,
cluster.hosts,
logs_dir,
issues_regex,
session_start_time,
end_time,
exclude_filter,
)
hosts_with_problems = [
future.result() for future in futures if not future.exception() and future.result() is not None
]
if hosts_with_problems:
self._attach_logs(logs_dir)
assert (
not hosts_with_problems
), f"The following hosts contains contain critical errors in system logs: {', '.join(hosts_with_problems)}"
def _collect_logs_on_host(
self, host: Host, logs_dir: str, regex: str, since: datetime, until: datetime, exclude_filter: str
):
with reporter.step(f"Get logs from {host.config.address}"):
logs = host.get_filtered_logs(filter_regex=regex, since=since, until=until, exclude_filter=exclude_filter)
if not logs:
return None
with open(os.path.join(logs_dir, f"{host.config.address}.log"), "w") as file:
file.write(logs)
return host.config.address
def _attach_logs(self, logs_dir: str) -> None:
# Zip all files and attach to Allure because it is more convenient to download a single
# zip with all logs rather than mess with individual logs files per service or node
logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir)
reporter.attach(logs_zip_file_path, "logs.zip")