2023-03-14 09:21:40 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
2023-12-11 12:07:16 +00:00
|
|
|
import time
|
2024-06-29 09:05:10 +00:00
|
|
|
from datetime import datetime, timezone
|
2023-03-14 09:21:40 +00:00
|
|
|
|
|
|
|
import allure
|
|
|
|
import pytest
|
2023-11-29 13:34:59 +00:00
|
|
|
from frostfs_testlib import reporter
|
2023-10-20 15:50:02 +00:00
|
|
|
from frostfs_testlib.hosting import Host
|
|
|
|
from frostfs_testlib.testing.cluster_test_base import Cluster
|
|
|
|
from frostfs_testlib.testing.parallel import parallel
|
2023-03-14 09:21:40 +00:00
|
|
|
|
|
|
|
|
2023-10-10 16:17:18 +00:00
|
|
|
def pytest_generate_tests(metafunc: pytest.Metafunc):
|
|
|
|
metafunc.fixturenames.append("repo")
|
|
|
|
metafunc.fixturenames.append("markers")
|
|
|
|
metafunc.parametrize(
|
|
|
|
"repo, markers",
|
|
|
|
[("frostfs-testcases", metafunc.config.option.markexpr)],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-07-25 11:45:39 +00:00
|
|
|
@pytest.mark.session_logs
|
2023-10-20 15:50:02 +00:00
|
|
|
class TestLogs:
|
2024-07-25 11:45:39 +00:00
|
|
|
@pytest.mark.logs_after_session
|
2024-06-24 14:51:36 +00:00
|
|
|
@pytest.mark.order(1000)
|
2024-06-29 09:05:10 +00:00
|
|
|
@allure.title("Check logs from frostfs-testcases with marks '{request.config.option.markexpr}' - search errors")
|
2024-07-02 13:23:02 +00:00
|
|
|
def test_logs_search_errors(self, temp_directory: str, cluster: Cluster, session_start_time: datetime, request: pytest.FixtureRequest):
|
2024-06-29 09:05:10 +00:00
|
|
|
end_time = datetime.now(timezone.utc)
|
|
|
|
logs_dir = os.path.join(temp_directory, "logs")
|
|
|
|
if not os.path.exists(logs_dir):
|
|
|
|
os.makedirs(logs_dir)
|
|
|
|
|
2024-10-17 12:07:53 +00:00
|
|
|
regexes = [
|
|
|
|
r"\bpanic\b",
|
|
|
|
r"\boom\b",
|
|
|
|
r"too many",
|
|
|
|
r"insufficient funds",
|
|
|
|
r"insufficient amount of gas",
|
|
|
|
r"cannot assign requested address",
|
|
|
|
r"\bunable to process\b",
|
|
|
|
r"\bmaximum number of subscriptions is reached\b",
|
|
|
|
]
|
|
|
|
issues_regex = "|".join(regexes)
|
2024-06-29 09:05:10 +00:00
|
|
|
exclude_filter = r"too many requests"
|
|
|
|
log_level_priority = "3" # will include 0-3 priority logs (0: emergency 1: alerts 2: critical 3: errors)
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2024-06-29 09:05:10 +00:00
|
|
|
time.sleep(2)
|
|
|
|
|
|
|
|
futures = parallel(
|
|
|
|
self._collect_logs_on_host,
|
|
|
|
cluster.hosts,
|
|
|
|
logs_dir,
|
|
|
|
issues_regex,
|
|
|
|
session_start_time,
|
|
|
|
end_time,
|
|
|
|
exclude_filter,
|
|
|
|
priority=log_level_priority,
|
|
|
|
)
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2024-07-02 13:23:02 +00:00
|
|
|
hosts_with_problems = [future.result() for future in futures if not future.exception() and future.result() is not None]
|
2024-06-29 09:05:10 +00:00
|
|
|
if hosts_with_problems:
|
|
|
|
self._attach_logs(logs_dir)
|
|
|
|
|
2024-07-02 13:23:02 +00:00
|
|
|
assert not hosts_with_problems, f"The following hosts contains critical errors in system logs: {', '.join(hosts_with_problems)}"
|
2024-06-29 09:05:10 +00:00
|
|
|
|
|
|
|
@pytest.mark.order(1001)
|
2024-07-02 13:23:02 +00:00
|
|
|
@allure.title("Check logs from frostfs-testcases with marks '{request.config.option.markexpr}' - identify sensitive data")
|
2024-06-29 09:05:10 +00:00
|
|
|
def test_logs_identify_sensitive_data(
|
|
|
|
self, temp_directory: str, cluster: Cluster, session_start_time: datetime, request: pytest.FixtureRequest
|
|
|
|
):
|
|
|
|
end_time = datetime.now(timezone.utc)
|
2023-03-14 09:21:40 +00:00
|
|
|
logs_dir = os.path.join(temp_directory, "logs")
|
2024-06-29 09:05:10 +00:00
|
|
|
if not os.path.exists(logs_dir):
|
|
|
|
os.makedirs(logs_dir)
|
|
|
|
|
|
|
|
_regex = {
|
|
|
|
"authorization_basic": r"basic [a-zA-Z0-9=:_\+\/-]{16,100}",
|
|
|
|
"authorization_bearer": r"bearer [a-zA-Z0-9_\-\.=:_\+\/]{16,100}",
|
|
|
|
"access_token": r"\"access_token\":\"[0-9a-z]{16}\$[0-9a-f]{32}\"",
|
|
|
|
"api_token": r"\"api_token\":\"(xox[a-zA-Z]-[a-zA-Z0-9-]+)\"",
|
|
|
|
"yadro_access_token": r"[a-zA-Z0-9_-]*:[a-zA-Z0-9_\-]+@yadro\.com*",
|
|
|
|
"SSH_privKey": r"([-]+BEGIN [^\s]+ PRIVATE KEY[-]+[\s]*[^-]*[-]+END [^\s]+ PRIVATE KEY[-]+)",
|
2024-07-02 13:23:02 +00:00
|
|
|
"possible_Creds": r"(?i)(" r"password\s*[`=:]+\s*[^\s]+|" r"password is\s*[`=:]+\s*[^\s]+|" r"passwd\s*[`=:]+\s*[^\s]+)",
|
2024-06-29 09:05:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
issues_regex = "|".join(_regex.values())
|
2024-07-02 13:23:02 +00:00
|
|
|
exclude_filter = r"COMMAND=\|--\sBoot\s"
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2023-12-11 12:07:16 +00:00
|
|
|
time.sleep(2)
|
|
|
|
|
2023-10-20 15:50:02 +00:00
|
|
|
futures = parallel(
|
2023-12-08 09:31:53 +00:00
|
|
|
self._collect_logs_on_host,
|
|
|
|
cluster.hosts,
|
|
|
|
logs_dir,
|
|
|
|
issues_regex,
|
|
|
|
session_start_time,
|
|
|
|
end_time,
|
|
|
|
exclude_filter,
|
2023-10-20 15:50:02 +00:00
|
|
|
)
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2024-07-02 13:23:02 +00:00
|
|
|
hosts_with_problems = [future.result() for future in futures if not future.exception() and future.result() is not None]
|
2023-03-14 09:21:40 +00:00
|
|
|
if hosts_with_problems:
|
|
|
|
self._attach_logs(logs_dir)
|
|
|
|
|
2024-07-02 13:23:02 +00:00
|
|
|
assert not hosts_with_problems, f"The following hosts contains sensitive data in system logs: {', '.join(hosts_with_problems)}"
|
2023-03-14 09:21:40 +00:00
|
|
|
|
2023-12-08 09:31:53 +00:00
|
|
|
def _collect_logs_on_host(
|
2024-06-29 09:05:10 +00:00
|
|
|
self,
|
|
|
|
host: Host,
|
|
|
|
logs_dir: str,
|
|
|
|
regex: str,
|
|
|
|
since: datetime,
|
|
|
|
until: datetime,
|
|
|
|
exclude_filter: str,
|
|
|
|
priority: str = None,
|
2023-12-08 09:31:53 +00:00
|
|
|
):
|
2023-11-29 13:34:59 +00:00
|
|
|
with reporter.step(f"Get logs from {host.config.address}"):
|
2024-07-02 13:23:02 +00:00
|
|
|
logs = host.get_filtered_logs(filter_regex=regex, since=since, until=until, exclude_filter=exclude_filter, priority=priority)
|
2023-10-20 15:50:02 +00:00
|
|
|
|
|
|
|
if not logs:
|
|
|
|
return None
|
|
|
|
|
|
|
|
with open(os.path.join(logs_dir, f"{host.config.address}.log"), "w") as file:
|
|
|
|
file.write(logs)
|
|
|
|
|
|
|
|
return host.config.address
|
|
|
|
|
2023-03-14 09:21:40 +00:00
|
|
|
def _attach_logs(self, logs_dir: str) -> None:
|
|
|
|
# Zip all files and attach to Allure because it is more convenient to download a single
|
|
|
|
# zip with all logs rather than mess with individual logs files per service or node
|
|
|
|
logs_zip_file_path = shutil.make_archive(logs_dir, "zip", logs_dir)
|
2023-11-29 13:34:59 +00:00
|
|
|
reporter.attach(logs_zip_file_path, "logs.zip")
|