2023-03-14 09:21:40 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
from datetime import datetime
|
|
|
|
|
|
|
|
import allure
|
|
|
|
import pytest
|
2023-11-29 13:34:59 +00:00
|
|
|
from frostfs_testlib import reporter
|
2023-10-20 15:50:02 +00:00
|
|
|
from frostfs_testlib.hosting import Host
|
|
|
|
from frostfs_testlib.testing.cluster_test_base import Cluster
|
|
|
|
from frostfs_testlib.testing.parallel import parallel
|
2023-03-14 09:21:40 +00:00
|
|
|
|
|
|
|
|
2023-10-10 16:17:18 +00:00
|
|
|
def pytest_generate_tests(metafunc: pytest.Metafunc):
|
|
|
|
metafunc.fixturenames.append("repo")
|
|
|
|
metafunc.fixturenames.append("markers")
|
|
|
|
metafunc.parametrize(
|
|
|
|
"repo, markers",
|
|
|
|
[("frostfs-testcases", metafunc.config.option.markexpr)],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-10-20 15:50:02 +00:00
|
|
|
class TestLogs:
|
2023-10-05 16:05:34 +00:00
|
|
|
@allure.title("Check logs from frostfs-testcases with marks '{request.config.option.markexpr}'")
|
2023-03-14 09:21:40 +00:00
|
|
|
@pytest.mark.logs_after_session
|
2023-09-29 13:16:06 +00:00
|
|
|
@pytest.mark.no_healthcheck
|
2023-10-06 09:13:10 +00:00
|
|
|
def test_logs_after_session(
|
2023-10-20 15:50:02 +00:00
|
|
|
self, temp_directory: str, cluster: Cluster, session_start_time: datetime, request: pytest.FixtureRequest
|
2023-10-06 09:13:10 +00:00
|
|
|
):
|
2023-03-14 09:21:40 +00:00
|
|
|
"""
|
|
|
|
This test automatically added to any test run to check logs from cluster for critical errors.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
end_time = datetime.utcnow()
|
|
|
|
logs_dir = os.path.join(temp_directory, "logs")
|
|
|
|
os.makedirs(logs_dir)
|
2023-09-06 14:13:26 +00:00
|
|
|
# Using \b here because 'oom' and 'panic' can sometimes be found in OID or CID
|
2023-11-22 15:54:59 +00:00
|
|
|
issues_regex = r"\bpanic\b|\boom\b|too many|insufficient funds|insufficient amount of gas|wallet passwd|secret key|access key"
|
2023-12-01 20:35:31 +00:00
|
|
|
exclude_filter = r"too many requests"
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2023-10-20 15:50:02 +00:00
|
|
|
futures = parallel(
|
2023-12-01 20:35:31 +00:00
|
|
|
self._collect_logs_on_host, cluster.hosts, logs_dir, issues_regex, session_start_time, end_time, exclude_filter
|
2023-10-20 15:50:02 +00:00
|
|
|
)
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2023-10-20 15:50:02 +00:00
|
|
|
hosts_with_problems = [
|
|
|
|
future.result() for future in futures if not future.exception() and future.result() is not None
|
|
|
|
]
|
2023-03-14 09:21:40 +00:00
|
|
|
if hosts_with_problems:
|
|
|
|
self._attach_logs(logs_dir)
|
|
|
|
|
|
|
|
assert (
|
|
|
|
not hosts_with_problems
|
|
|
|
), f"The following hosts contains contain critical errors in system logs: {', '.join(hosts_with_problems)}"
|
|
|
|
|
2023-12-01 20:35:31 +00:00
|
|
|
def _collect_logs_on_host(self, host: Host, logs_dir: str, regex: str, since: datetime, until: datetime, exclude_filter: str):
|
2023-11-29 13:34:59 +00:00
|
|
|
with reporter.step(f"Get logs from {host.config.address}"):
|
2023-12-01 20:35:31 +00:00
|
|
|
logs = host.get_filtered_logs(regex, since, until, exclude_filter)
|
2023-10-20 15:50:02 +00:00
|
|
|
|
|
|
|
if not logs:
|
|
|
|
return None
|
|
|
|
|
|
|
|
with open(os.path.join(logs_dir, f"{host.config.address}.log"), "w") as file:
|
|
|
|
file.write(logs)
|
|
|
|
|
|
|
|
return host.config.address
|
|
|
|
|
2023-03-14 09:21:40 +00:00
|
|
|
def _attach_logs(self, logs_dir: str) -> None:
|
|
|
|
# Zip all files and attach to Allure because it is more convenient to download a single
|
|
|
|
# zip with all logs rather than mess with individual logs files per service or node
|
|
|
|
logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir)
|
2023-11-29 13:34:59 +00:00
|
|
|
reporter.attach(logs_zip_file_path, "logs.zip")
|