[#342] Fixed container metrics for tombstone #342

Merged
i.niyazov merged 1 commit from fix-container-metrics-tombstone-v0.40 into support/v0.40 2024-12-16 07:46:21 +00:00
Showing only changes of commit d26434c50c - Show all commits

View file

@ -3,7 +3,6 @@ import math
import allure import allure
import pytest import pytest
from frostfs_testlib import reporter from frostfs_testlib import reporter
from frostfs_testlib.testing.parallel import parallel
from frostfs_testlib.steps.cli.container import create_container, delete_container, search_nodes_with_container, wait_for_container_deletion from frostfs_testlib.steps.cli.container import create_container, delete_container, search_nodes_with_container, wait_for_container_deletion
from frostfs_testlib.steps.cli.object import delete_object, head_object, put_object_to_random_node from frostfs_testlib.steps.cli.object import delete_object, head_object, put_object_to_random_node
from frostfs_testlib.steps.metrics import calc_metrics_count_from_stdout, check_metrics_counter, get_metrics_value from frostfs_testlib.steps.metrics import calc_metrics_count_from_stdout, check_metrics_counter, get_metrics_value
@ -12,6 +11,7 @@ from frostfs_testlib.storage.cluster import Cluster, ClusterNode
from frostfs_testlib.storage.dataclasses.object_size import ObjectSize from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
from frostfs_testlib.storage.dataclasses.wallet import WalletInfo from frostfs_testlib.storage.dataclasses.wallet import WalletInfo
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
from frostfs_testlib.testing.parallel import parallel
from frostfs_testlib.utils.file_utils import generate_file from frostfs_testlib.utils.file_utils import generate_file
from ...helpers.utility import are_numbers_similar from ...helpers.utility import are_numbers_similar
@ -24,7 +24,7 @@ class TestContainerMetrics(ClusterTestBase):
def put_object_parallel(self, file_path: str, wallet: WalletInfo, cid: str): def put_object_parallel(self, file_path: str, wallet: WalletInfo, cid: str):
oid = put_object_to_random_node(wallet, file_path, cid, self.shell, self.cluster) oid = put_object_to_random_node(wallet, file_path, cid, self.shell, self.cluster)
return oid return oid
@reporter.step("Get metrics value from node") @reporter.step("Get metrics value from node")
def get_metrics_search_by_greps_parallel(self, node: ClusterNode, **greps): def get_metrics_search_by_greps_parallel(self, node: ClusterNode, **greps):
try: try:
@ -139,12 +139,7 @@ class TestContainerMetrics(ClusterTestBase):
@allure.title("Container size metrics put {objects_count} objects (obj_size={object_size})") @allure.title("Container size metrics put {objects_count} objects (obj_size={object_size})")
@pytest.mark.parametrize("objects_count", [5, 10, 20]) @pytest.mark.parametrize("objects_count", [5, 10, 20])
def test_container_size_metrics_more_objects( def test_container_size_metrics_more_objects(self, object_size: ObjectSize, default_wallet: WalletInfo, objects_count: int):
self,
object_size: ObjectSize,
default_wallet: WalletInfo,
objects_count: int
):
with reporter.step(f"Create container"): with reporter.step(f"Create container"):
cid = create_container(default_wallet, self.shell, self.cluster.default_rpc_endpoint) cid = create_container(default_wallet, self.shell, self.cluster.default_rpc_endpoint)
@ -154,10 +149,14 @@ class TestContainerMetrics(ClusterTestBase):
oids = [future.result() for future in futures] oids = [future.result() for future in futures]
with reporter.step("Check metric appears in all nodes"): with reporter.step("Check metric appears in all nodes"):
metric_values = [get_metrics_value(node, command="frostfs_node_engine_container_size_bytes", cid=cid) for node in self.cluster.cluster_nodes] metric_values = [
actual_value = sum(metric_values) // 2 # for policy REP 2, value divide by 2 get_metrics_value(node, command="frostfs_node_engine_container_size_bytes", cid=cid) for node in self.cluster.cluster_nodes
]
actual_value = sum(metric_values) // 2 # for policy REP 2, value divide by 2
expected_value = object_size.value * objects_count expected_value = object_size.value * objects_count
assert are_numbers_similar(actual_value, expected_value, tolerance_percentage=2), "metric container size bytes value not correct" assert are_numbers_similar(
actual_value, expected_value, tolerance_percentage=2
), "metric container size bytes value not correct"
with reporter.step("Delete file, wait until gc remove object"): with reporter.step("Delete file, wait until gc remove object"):
tombstones_size = 0 tombstones_size = 0
@ -167,22 +166,28 @@ class TestContainerMetrics(ClusterTestBase):
tombstones_size += int(tombstone["header"]["payloadLength"]) tombstones_size += int(tombstone["header"]["payloadLength"])
with reporter.step(f"Check container size metrics, 'should be positive in all nodes'"): with reporter.step(f"Check container size metrics, 'should be positive in all nodes'"):
futures = parallel(get_metrics_value, self.cluster.cluster_nodes, command="frostfs_node_engine_container_size_bytes", cid=cid) with reporter.step("Search container nodes"):
container_nodes = search_nodes_with_container(
wallet=default_wallet,
cid=cid,
shell=self.shell,
endpoint=self.cluster.default_rpc_endpoint,
cluster=self.cluster,
)
with reporter.step(f"Get metrics value from container nodes"):
futures = parallel(get_metrics_value, container_nodes, command="frostfs_node_engine_container_size_bytes", cid=cid)
metrics_value_nodes = [future.result() for future in futures] metrics_value_nodes = [future.result() for future in futures]
for act_metric in metrics_value_nodes: for act_metric in metrics_value_nodes:
assert act_metric >= 0, "Metrics value is negative" assert act_metric >= 0, "Metrics value is negative"
assert sum(metrics_value_nodes) // len(self.cluster.cluster_nodes) == tombstones_size, "tomstone size of objects not correct"
with reporter.step(f"Check container size metrics for tombstone"):
assert sum(metrics_value_nodes) // len(container_nodes) == tombstones_size, "tomstone size of objects not correct"
@allure.title("Container metrics (policy={policy})") @allure.title("Container metrics (policy={policy})")
@pytest.mark.parametrize("placement_policy, policy", [("REP 2 IN X CBF 2 SELECT 2 FROM * AS X", "REP"), ("EC 1.1 CBF 1", "EC")]) @pytest.mark.parametrize("placement_policy, policy", [("REP 2 IN X CBF 2 SELECT 2 FROM * AS X", "REP"), ("EC 1.1 CBF 1", "EC")])
def test_container_metrics_delete_complex_objects( def test_container_metrics_delete_complex_objects(
self, self, complex_object_size: ObjectSize, default_wallet: WalletInfo, cluster: Cluster, placement_policy: str, policy: str
complex_object_size: ObjectSize,
default_wallet: WalletInfo,
cluster: Cluster,
placement_policy: str,
policy: str
): ):
copies = 2 if policy == "REP" else 1 copies = 2 if policy == "REP" else 1
objects_count = 2 objects_count = 2
@ -201,9 +206,9 @@ class TestContainerMetrics(ClusterTestBase):
with reporter.step("Delete objects and container"): with reporter.step("Delete objects and container"):
for oid in oids: for oid in oids:
delete_object(default_wallet, cid, oid, self.shell, cluster.default_rpc_endpoint) delete_object(default_wallet, cid, oid, self.shell, cluster.default_rpc_endpoint)
delete_container(default_wallet, cid, self.shell, cluster.default_rpc_endpoint) delete_container(default_wallet, cid, self.shell, cluster.default_rpc_endpoint)
with reporter.step("Tick epoch and check container was deleted"): with reporter.step("Tick epoch and check container was deleted"):
self.tick_epoch() self.tick_epoch()
wait_for_container_deletion(default_wallet, cid, shell=self.shell, endpoint=cluster.default_rpc_endpoint) wait_for_container_deletion(default_wallet, cid, shell=self.shell, endpoint=cluster.default_rpc_endpoint)