Update shards test
Signed-off-by: Andrey Berezin <a.berezin@yadro.com>
This commit is contained in:
parent
b9b8c4e1f8
commit
cd98d362e3
3 changed files with 113 additions and 45 deletions
|
@ -62,6 +62,22 @@ class NodeBase:
|
|||
_ConfigAttributes.WALLET_PATH,
|
||||
)
|
||||
|
||||
def get_remote_wallet_path(self) -> str:
|
||||
"""
|
||||
Returns node wallet file path located on remote host
|
||||
"""
|
||||
return self._get_attribute(
|
||||
_ConfigAttributes.WALLET_PATH,
|
||||
)
|
||||
|
||||
def get_remote_config_path(self) -> str:
|
||||
"""
|
||||
Returns node config file path located on remote host
|
||||
"""
|
||||
return self._get_attribute(
|
||||
_ConfigAttributes.CONFIG_PATH,
|
||||
)
|
||||
|
||||
def get_wallet_config_path(self):
|
||||
return self._get_attribute(
|
||||
_ConfigAttributes.LOCAL_WALLET_CONFIG,
|
||||
|
@ -317,6 +333,7 @@ class _ConfigAttributes:
|
|||
WALLET_PASSWORD = "wallet_password"
|
||||
WALLET_PATH = "wallet_path"
|
||||
WALLET_CONFIG = "wallet_config"
|
||||
CONFIG_PATH = "config_path"
|
||||
LOCAL_WALLET_PATH = "local_wallet_path"
|
||||
LOCAL_WALLET_CONFIG = "local_config_path"
|
||||
RPC_ENDPOINT = "rpc_endpoint"
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
import json
|
||||
import pathlib
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from io import StringIO
|
||||
|
||||
import allure
|
||||
import pytest
|
||||
import yaml
|
||||
from cluster import Cluster
|
||||
from common import NEOFS_CLI_EXEC, WALLET_CONFIG
|
||||
from cluster import Cluster, StorageNode
|
||||
from common import WALLET_CONFIG
|
||||
from configobj import ConfigObj
|
||||
from neofs_testlib.cli import NeofsCli
|
||||
from neofs_testlib.hosting import Host, Hosting, ServiceConfig
|
||||
from neofs_testlib.shell import Shell
|
||||
|
||||
SHARD_PREFIX = "NEOFS_STORAGE_SHARD_"
|
||||
BLOBSTOR_PREFIX = "_BLOBSTOR_"
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -24,6 +29,11 @@ class Blobstor:
|
|||
def __hash__(self):
|
||||
return hash((self.path, self.path_type))
|
||||
|
||||
@staticmethod
|
||||
def from_config_object(section: ConfigObj, shard_id: str, blobstor_id: str):
|
||||
var_prefix = f"{SHARD_PREFIX}{shard_id}{BLOBSTOR_PREFIX}{blobstor_id}"
|
||||
return Blobstor(section.get(f"{var_prefix}_PATH"), section.get(f"{var_prefix}_TYPE"))
|
||||
|
||||
|
||||
@dataclass
|
||||
class Shard:
|
||||
|
@ -43,61 +53,101 @@ class Shard:
|
|||
def __hash__(self):
|
||||
return hash((self.metabase, self.writecache))
|
||||
|
||||
@staticmethod
|
||||
def _get_blobstor_count_from_section(config_object: ConfigObj, shard_id: int):
|
||||
pattern = f"{SHARD_PREFIX}{shard_id}{BLOBSTOR_PREFIX}"
|
||||
blobstors = {key[: len(pattern) + 2] for key in config_object.keys() if pattern in key}
|
||||
return len(blobstors)
|
||||
|
||||
@staticmethod
|
||||
def from_config_object(config_object: ConfigObj, shard_id: int):
|
||||
var_prefix = f"{SHARD_PREFIX}{shard_id}"
|
||||
|
||||
blobstor_count = Shard._get_blobstor_count_from_section(config_object, shard_id)
|
||||
blobstors = [
|
||||
Blobstor.from_config_object(config_object, shard_id, blobstor_id)
|
||||
for blobstor_id in range(blobstor_count)
|
||||
]
|
||||
|
||||
write_cache_enabled = config_object.as_bool(f"{var_prefix}_WRITECACHE_ENABLED")
|
||||
|
||||
return Shard(
|
||||
blobstors,
|
||||
config_object.get(f"{var_prefix}_METABASE_PATH"),
|
||||
config_object.get(f"{var_prefix}_WRITECACHE_PATH") if write_cache_enabled else "",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_object(shard):
|
||||
metabase = shard["metabase"]["path"] if "path" in shard["metabase"] else shard["metabase"]
|
||||
writecache = (
|
||||
shard["writecache"]["path"] if "path" in shard["writecache"] else shard["writecache"]
|
||||
)
|
||||
|
||||
return Shard(
|
||||
blobstor=[
|
||||
Blobstor(path=blobstor["path"], path_type=blobstor["type"])
|
||||
for blobstor in shard["blobstor"]
|
||||
],
|
||||
metabase=metabase,
|
||||
writecache=writecache,
|
||||
)
|
||||
|
||||
|
||||
def shards_from_yaml(contents: str) -> list[Shard]:
|
||||
config = yaml.safe_load(contents)
|
||||
config["storage"]["shard"].pop("default")
|
||||
|
||||
return [Shard.from_object(shard) for shard in config["storage"]["shard"].values()]
|
||||
|
||||
|
||||
def shards_from_env(contents: str) -> list[Shard]:
|
||||
configObj = ConfigObj(StringIO(contents))
|
||||
|
||||
pattern = f"{SHARD_PREFIX}\d*"
|
||||
num_shards = len(set(re.findall(pattern, contents)))
|
||||
|
||||
return [Shard.from_config_object(configObj, shard_id) for shard_id in range(num_shards)]
|
||||
|
||||
|
||||
@pytest.mark.sanity
|
||||
@pytest.mark.shard
|
||||
class TestControlShard:
|
||||
@staticmethod
|
||||
def get_shards_from_config(host: Host, service_config: ServiceConfig) -> list[Shard]:
|
||||
config_file = service_config.attributes["config_path"]
|
||||
config = yaml.safe_load(host.get_shell().exec(f"cat {config_file}").stdout)
|
||||
config["storage"]["shard"].pop("default")
|
||||
return [
|
||||
Shard(
|
||||
blobstor=[
|
||||
Blobstor(path=blobstor["path"], path_type=blobstor["type"])
|
||||
for blobstor in shard["blobstor"]
|
||||
],
|
||||
metabase=shard["metabase"]["path"],
|
||||
writecache=shard["writecache"]["path"],
|
||||
)
|
||||
for shard in config["storage"]["shard"].values()
|
||||
]
|
||||
def get_shards_from_config(node: StorageNode) -> list[Shard]:
|
||||
config_file = node.get_remote_config_path()
|
||||
file_type = pathlib.Path(config_file).suffix
|
||||
contents = node.host.get_shell().exec(f"cat {config_file}").stdout
|
||||
|
||||
parser_method = {
|
||||
".env": shards_from_env,
|
||||
".yaml": shards_from_yaml,
|
||||
".yml": shards_from_yaml,
|
||||
}
|
||||
|
||||
shards = parser_method[file_type](contents)
|
||||
return shards
|
||||
|
||||
@staticmethod
|
||||
def get_shards_from_cli(host: Host, service_config: ServiceConfig) -> list[Shard]:
|
||||
wallet_path = service_config.attributes["wallet_path"]
|
||||
wallet_password = service_config.attributes["wallet_password"]
|
||||
control_endpoint = service_config.attributes["control_endpoint"]
|
||||
def get_shards_from_cli(node: StorageNode) -> list[Shard]:
|
||||
wallet_path = node.get_remote_wallet_path()
|
||||
wallet_password = node.get_wallet_password()
|
||||
control_endpoint = node.get_control_endpoint()
|
||||
|
||||
cli = NeofsCli(host.get_shell(), NEOFS_CLI_EXEC, WALLET_CONFIG)
|
||||
cli_config = node.host.get_cli_config("neofs-cli")
|
||||
|
||||
cli = NeofsCli(node.host.get_shell(), cli_config.exec_path, WALLET_CONFIG)
|
||||
result = cli.shards.list(
|
||||
endpoint=control_endpoint,
|
||||
wallet=wallet_path,
|
||||
wallet_password=wallet_password,
|
||||
json_mode=True,
|
||||
)
|
||||
return [
|
||||
Shard(
|
||||
blobstor=[
|
||||
Blobstor(path=blobstor["path"], path_type=blobstor["type"])
|
||||
for blobstor in shard["blobstor"]
|
||||
],
|
||||
metabase=shard["metabase"],
|
||||
writecache=shard["writecache"],
|
||||
)
|
||||
for shard in json.loads(result.stdout.split(">", 1)[1])
|
||||
]
|
||||
return [Shard.from_object(shard) for shard in json.loads(result.stdout.split(">", 1)[1])]
|
||||
|
||||
@allure.title("All shards are available")
|
||||
def test_control_shard(self, hosting: Hosting, client_shell: Shell, cluster: Cluster):
|
||||
for stroage_host in cluster.storage_nodes:
|
||||
shards_from_config = self.get_shards_from_config(
|
||||
hosting.get_host_by_service(stroage_host.name),
|
||||
hosting.get_service_config(stroage_host.name),
|
||||
)
|
||||
shards_from_cli = self.get_shards_from_cli(
|
||||
hosting.get_host_by_service(stroage_host.name),
|
||||
hosting.get_service_config(stroage_host.name),
|
||||
)
|
||||
def test_control_shard(self, cluster: Cluster):
|
||||
for storage_node in cluster.storage_nodes:
|
||||
shards_from_config = self.get_shards_from_config(storage_node)
|
||||
shards_from_cli = self.get_shards_from_cli(storage_node)
|
||||
assert set(shards_from_config) == set(shards_from_cli)
|
||||
|
|
|
@ -16,6 +16,7 @@ cffi==1.15.0
|
|||
chardet==4.0.0
|
||||
charset-normalizer==2.0.12
|
||||
coverage==6.3.3
|
||||
configobj==5.0.6
|
||||
docker==4.4.0
|
||||
docutils==0.17.1
|
||||
Events==0.4
|
||||
|
|
Loading…
Reference in a new issue