Add test case for loss of one node

This commit is contained in:
Yaroslava Lukoyanova 2023-05-31 13:16:38 +03:00 committed by Andrey Berezin
parent 9c3b1c771d
commit c9bfba136d

View file

@ -84,11 +84,6 @@ def return_stopped_hosts(shell: Shell, cluster: Cluster) -> None:
@pytest.mark.failover
class TestFailoverStorage(ClusterTestBase):
@pytest.fixture(scope="function", autouse=True)
def start_stopped_services(self, cluster_state_controller: ClusterStateController):
yield
cluster_state_controller.start_stopped_storage_services()
@allure.title("Lose and return storage node's host")
@pytest.mark.parametrize("hard_reboot", [True, False])
@pytest.mark.failover_reboot
@ -674,3 +669,30 @@ class TestStorageDataLoss(ClusterTestBase):
with allure.step("Pass test if no errors found"):
assert not exception_messages, "\n".join(exception_messages)
@allure.title(
"Test S3 Loss of one node should trigger use of tree and storage service in another node"
)
def test_s3_one_endpoint_loss(
self,
bucket,
s3_client: S3ClientWrapper,
simple_object_size: int,
after_run_return_all_stopped_services,
cluster_state_controller: ClusterStateController,
):
# TODO: need to check that s3 gate is connected to localhost (such metric will be supported in 1.3)
with allure.step(
"Stop one node and wait for rebalance connection of s3 gate to storage service"
):
current_node = self.cluster.cluster_nodes[0]
cluster_state_controller.stop_storage_service(current_node)
# waiting for rebalance connection of s3 gate to storage service
sleep(60)
file_path = generate_file(simple_object_size)
file_name = s3_helper.object_key_from_file_path(file_path)
with allure.step("Put object into one bucket"):
put_object = s3_client.put_object(bucket, file_path)
s3_helper.check_objects_in_bucket(s3_client, bucket, expected_objects=[file_name])