forked from TrueCloudLab/frostfs-testlib
66 lines
2.9 KiB
Python
66 lines
2.9 KiB
Python
from frostfs_testlib import reporter
|
|
from frostfs_testlib.load.interfaces.summarized import SummarizedStats
|
|
from frostfs_testlib.load.load_config import LoadParams, LoadScenario
|
|
from frostfs_testlib.load.load_metrics import get_metrics_object
|
|
|
|
|
|
class LoadVerifier:
|
|
def __init__(self, load_params: LoadParams) -> None:
|
|
self.load_params = load_params
|
|
|
|
def collect_load_issues(self, load_summaries: dict[str, dict]) -> list[str]:
|
|
summarized = SummarizedStats.collect(self.load_params, load_summaries)
|
|
issues = []
|
|
|
|
for operation_type, stats in summarized.items():
|
|
if stats.threads and not stats.operations:
|
|
issues.append(f"No any {operation_type.lower()} operation was performed")
|
|
|
|
if stats.errors.percent > stats.errors.threshold:
|
|
rate_str = self._get_rate_str(stats.errors.percent)
|
|
issues.append(f"{operation_type} errors exceeded threshold: {rate_str} > {stats.errors.threshold}%")
|
|
|
|
return issues
|
|
|
|
def collect_verify_issues(self, load_summaries, verification_summaries) -> list[str]:
|
|
verify_issues: list[str] = []
|
|
for k6_process_label in load_summaries:
|
|
with reporter.step(f"Check verify scenario results for {k6_process_label}"):
|
|
verify_issues.extend(
|
|
self._collect_verify_issues_on_process(
|
|
k6_process_label,
|
|
load_summaries[k6_process_label],
|
|
verification_summaries[k6_process_label],
|
|
)
|
|
)
|
|
return verify_issues
|
|
|
|
def _get_rate_str(self, rate: float, minimal: float = 0.01) -> str:
|
|
return f"{rate:.2f}%" if rate >= minimal else f"~{minimal}%"
|
|
|
|
def _collect_verify_issues_on_process(self, label, load_summary, verification_summary) -> list[str]:
|
|
issues = []
|
|
|
|
load_metrics = get_metrics_object(self.load_params.scenario, load_summary)
|
|
|
|
writers = self.load_params.writers or self.load_params.preallocated_writers or 0
|
|
deleters = self.load_params.deleters or self.load_params.preallocated_deleters or 0
|
|
|
|
delete_success = 0
|
|
|
|
if deleters > 0:
|
|
delete_success = load_metrics.delete.success_iterations
|
|
|
|
if verification_summary:
|
|
verify_metrics = get_metrics_object(LoadScenario.VERIFY, verification_summary)
|
|
verified_objects = verify_metrics.read.success_iterations
|
|
invalid_objects = verify_metrics.read.failed_iterations
|
|
total_left_objects = load_metrics.write.success_iterations - delete_success
|
|
|
|
# Due to interruptions we may see total verified objects to be less than written on writers count
|
|
if abs(total_left_objects - verified_objects) > writers:
|
|
issues.append(
|
|
f"Verified objects mismatch for {label}. Total: {total_left_objects}, Verified: {verified_objects}. Writers: {writers}."
|
|
)
|
|
|
|
return issues
|