[#151] Refactor load report

Signed-off-by: Andrey Berezin <a.berezin@yadro.com>
This commit is contained in:
Andrey Berezin 2023-12-20 16:02:54 +03:00 committed by Andrey Berezin
parent 663c144709
commit 10a6efa333
4 changed files with 278 additions and 321 deletions

View file

@ -3,8 +3,8 @@ from typing import Optional
import yaml
from frostfs_testlib.load.interfaces.summarized import SummarizedStats
from frostfs_testlib.load.load_config import K6ProcessAllocationStrategy, LoadParams, LoadScenario
from frostfs_testlib.load.load_metrics import get_metrics_object
from frostfs_testlib.utils.converting_utils import calc_unit
@ -114,63 +114,46 @@ class LoadReport:
return model_map[self.load_params.scenario]
def _get_operations_sub_section_html(
self,
operation_type: str,
total_operations: int,
requested_rate_str: str,
vus_str: str,
total_rate: float,
throughput: float,
errors: dict[str, int],
latency: dict[str, dict],
):
def _get_operations_sub_section_html(self, operation_type: str, stats: SummarizedStats):
throughput_html = ""
if throughput > 0:
throughput, unit = calc_unit(throughput)
if stats.throughput > 0:
throughput, unit = calc_unit(stats.throughput)
throughput_html = self._row("Throughput", f"{throughput:.2f} {unit}/sec")
per_node_errors_html = ""
total_errors = 0
if errors:
total_errors: int = 0
for node_key, errors in errors.items():
total_errors += errors
if self.load_params.k6_process_allocation_strategy == K6ProcessAllocationStrategy.PER_ENDPOINT:
per_node_errors_html += self._row(f"At {node_key}", errors)
for node_key, errors in stats.errors.by_node.items():
if self.load_params.k6_process_allocation_strategy == K6ProcessAllocationStrategy.PER_ENDPOINT:
per_node_errors_html += self._row(f"At {node_key}", errors)
latency_html = ""
if latency:
for node_key, latency_dict in latency.items():
latency_values = "N/A"
if latency_dict:
latency_values = ""
for param_name, param_val in latency_dict.items():
latency_values += f"{param_name}={param_val:.2f}ms "
for node_key, latencies in stats.latencies.by_node.items():
latency_values = "N/A"
if latencies:
latency_values = ""
for param_name, param_val in latencies.items():
latency_values += f"{param_name}={param_val:.2f}ms "
latency_html += self._row(f"{operation_type} latency {node_key.split(':')[0]}", latency_values)
latency_html += self._row(f"{operation_type} latency {node_key.split(':')[0]}", latency_values)
object_size, object_size_unit = calc_unit(self.load_params.object_size, 1)
duration = self._seconds_to_formatted_duration(self.load_params.load_time)
model = self._get_model_string()
requested_rate_str = f"{stats.requested_rate}op/sec" if stats.requested_rate else ""
# write 8KB 15h49m 50op/sec 50th open model/closed model/min_iteration duration=1s - 1.636MB/s 199.57451/s
short_summary = f"{operation_type} {object_size}{object_size_unit} {duration} {requested_rate_str} {vus_str} {model} - {throughput:.2f}{unit}/s {total_rate:.2f}/s"
errors_percent = 0
if total_operations:
errors_percent = total_errors / total_operations * 100.0
short_summary = f"{operation_type} {object_size}{object_size_unit} {duration} {requested_rate_str} {stats.threads}th {model} - {throughput:.2f}{unit}/s {stats.rate:.2f}/s"
html = f"""
<table border="1" cellpadding="5px"><tbody>
<tr><th colspan="2" bgcolor="gainsboro">{short_summary}</th></tr>
<tr><th colspan="2" bgcolor="gainsboro">Metrics</th></tr>
{self._row("Total operations", total_operations)}
{self._row("OP/sec", f"{total_rate:.2f}")}
{self._row("Total operations", stats.operations)}
{self._row("OP/sec", f"{stats.rate:.2f}")}
{throughput_html}
{latency_html}
<tr><th colspan="2" bgcolor="gainsboro">Errors</th></tr>
{per_node_errors_html}
{self._row("Total", f"{total_errors} ({errors_percent:.2f}%)")}
{self._row("Threshold", f"{self.load_params.error_threshold:.2f}%")}
{self._row("Total", f"{stats.errors.total} ({stats.errors.percent:.2f}%)")}
{self._row("Threshold", f"{stats.errors.threshold:.2f}%")}
</tbody></table><br><hr>
"""
@ -178,111 +161,12 @@ class LoadReport:
def _get_totals_section_html(self):
html = ""
for i, load_summaries in enumerate(self.load_summaries_list, 1):
html += f"<h3>Load Results for load #{i}</h3>"
for i in range(len(self.load_summaries_list)):
html += f"<h3>Load Results for load #{i+1}</h3>"
write_operations = 0
write_op_sec = 0
write_throughput = 0
write_latency = {}
write_errors = {}
requested_write_rate = self.load_params.write_rate
requested_write_rate_str = f"{requested_write_rate}op/sec" if requested_write_rate else ""
read_operations = 0
read_op_sec = 0
read_throughput = 0
read_latency = {}
read_errors = {}
requested_read_rate = self.load_params.read_rate
requested_read_rate_str = f"{requested_read_rate}op/sec" if requested_read_rate else ""
delete_operations = 0
delete_op_sec = 0
delete_latency = {}
delete_errors = {}
requested_delete_rate = self.load_params.delete_rate
requested_delete_rate_str = f"{requested_delete_rate}op/sec" if requested_delete_rate else ""
if self.load_params.scenario in [LoadScenario.gRPC_CAR, LoadScenario.S3_CAR]:
delete_vus = max(self.load_params.preallocated_deleters or 0, self.load_params.max_deleters or 0)
write_vus = max(self.load_params.preallocated_writers or 0, self.load_params.max_writers or 0)
read_vus = max(self.load_params.preallocated_readers or 0, self.load_params.max_readers or 0)
else:
write_vus = self.load_params.writers
read_vus = self.load_params.readers
delete_vus = self.load_params.deleters
write_vus_str = f"{write_vus}th"
read_vus_str = f"{read_vus}th"
delete_vus_str = f"{delete_vus}th"
write_section_required = False
read_section_required = False
delete_section_required = False
for node_key, load_summary in load_summaries.items():
metrics = get_metrics_object(self.load_params.scenario, load_summary)
write_operations += metrics.write_total_iterations
if write_operations:
write_section_required = True
write_op_sec += metrics.write_rate
write_latency[node_key] = metrics.write_latency
write_throughput += metrics.write_throughput
if metrics.write_failed_iterations:
write_errors[node_key] = metrics.write_failed_iterations
read_operations += metrics.read_total_iterations
if read_operations:
read_section_required = True
read_op_sec += metrics.read_rate
read_throughput += metrics.read_throughput
read_latency[node_key] = metrics.read_latency
if metrics.read_failed_iterations:
read_errors[node_key] = metrics.read_failed_iterations
delete_operations += metrics.delete_total_iterations
if delete_operations:
delete_section_required = True
delete_op_sec += metrics.delete_rate
delete_latency[node_key] = metrics.delete_latency
if metrics.delete_failed_iterations:
delete_errors[node_key] = metrics.delete_failed_iterations
if write_section_required:
html += self._get_operations_sub_section_html(
"Write",
write_operations,
requested_write_rate_str,
write_vus_str,
write_op_sec,
write_throughput,
write_errors,
write_latency,
)
if read_section_required:
html += self._get_operations_sub_section_html(
"Read",
read_operations,
requested_read_rate_str,
read_vus_str,
read_op_sec,
read_throughput,
read_errors,
read_latency,
)
if delete_section_required:
html += self._get_operations_sub_section_html(
"Delete",
delete_operations,
requested_delete_rate_str,
delete_vus_str,
delete_op_sec,
0,
delete_errors,
delete_latency,
)
summarized = SummarizedStats.collect(self.load_params, self.load_summaries_list[i])
for operation_type, stats in summarized.items():
if stats.operations:
html += self._get_operations_sub_section_html(operation_type, stats)
return html