[#314] Format all files with black and isort

Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
Vladimir Domnich 2022-09-28 16:07:16 +04:00 committed by Vladimir
parent 26032a67ec
commit 147cac0ebc
46 changed files with 1506 additions and 1100 deletions

View file

@ -1,6 +1,6 @@
[tool.isort] [tool.isort]
profile = "black" profile = "black"
src_paths = ["pytest_tests", "robot"] src_paths = ["neofs-keywords", "pytest_tests", "robot"]
line_length = 100 line_length = 100
[tool.black] [tool.black]

View file

@ -4,37 +4,37 @@ import re
import allure import allure
from pytest import Config from pytest import Config
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
@allure.step('Read environment.properties') @allure.step("Read environment.properties")
def read_env_properties(config: Config) -> dict: def read_env_properties(config: Config) -> dict:
environment_dir = config.getoption('--alluredir') environment_dir = config.getoption("--alluredir")
if not environment_dir: if not environment_dir:
return None return None
file_path = f'{environment_dir}/environment.properties' file_path = f"{environment_dir}/environment.properties"
with open(file_path, 'r') as file: with open(file_path, "r") as file:
raw_content = file.read() raw_content = file.read()
env_properties = {} env_properties = {}
for line in raw_content.split('\n'): for line in raw_content.split("\n"):
m = re.match('(.*?)=(.*)', line) m = re.match("(.*?)=(.*)", line)
if not m: if not m:
logger.warning(f'Could not parse env property from {line}') logger.warning(f"Could not parse env property from {line}")
continue continue
key, value = m.group(1), m.group(2) key, value = m.group(1), m.group(2)
env_properties[key] = value env_properties[key] = value
return env_properties return env_properties
@allure.step('Update data in environment.properties') @allure.step("Update data in environment.properties")
def save_env_properties(config: Config, env_data: dict) -> None: def save_env_properties(config: Config, env_data: dict) -> None:
environment_dir = config.getoption('--alluredir') environment_dir = config.getoption("--alluredir")
if not environment_dir: if not environment_dir:
return None return None
file_path = f'{environment_dir}/environment.properties' file_path = f"{environment_dir}/environment.properties"
with open(file_path, 'a+') as env_file: with open(file_path, "a+") as env_file:
for env, env_value in env_data.items(): for env, env_value in env_data.items():
env_file.write(f'{env}={env_value}\n') env_file.write(f"{env}={env_value}\n")

View file

@ -1,6 +1,5 @@
import re import re
# Regex patterns of status codes of Container service (https://github.com/nspcc-dev/neofs-spec/blob/98b154848116223e486ce8b43eaa35fec08b4a99/20-api-v2/container.md) # Regex patterns of status codes of Container service (https://github.com/nspcc-dev/neofs-spec/blob/98b154848116223e486ce8b43eaa35fec08b4a99/20-api-v2/container.md)
CONTAINER_NOT_FOUND = "code = 3072.*message = container not found" CONTAINER_NOT_FOUND = "code = 3072.*message = container not found"

View file

@ -2,15 +2,14 @@ from ssh_helper import HostClient
class IpTablesHelper: class IpTablesHelper:
@staticmethod @staticmethod
def drop_input_traffic_to_port(client: HostClient, ports: list[str]): def drop_input_traffic_to_port(client: HostClient, ports: list[str]):
for port in ports: for port in ports:
cmd_output = client.exec(cmd=f'sudo iptables -A INPUT -p tcp --dport {port} -j DROP') cmd_output = client.exec(cmd=f"sudo iptables -A INPUT -p tcp --dport {port} -j DROP")
assert cmd_output.rc == 0 assert cmd_output.rc == 0
@staticmethod @staticmethod
def restore_input_traffic_to_port(client: HostClient, ports: list[str]): def restore_input_traffic_to_port(client: HostClient, ports: list[str]):
for port in ports: for port in ports:
cmd_output = client.exec(cmd=f'sudo iptables -D INPUT -p tcp --dport {port} -j DROP') cmd_output = client.exec(cmd=f"sudo iptables -D INPUT -p tcp --dport {port} -j DROP")
assert cmd_output.rc == 0 assert cmd_output.rc == 0

View file

@ -1,9 +1,11 @@
from dataclasses import dataclass
import allure
from contextlib import contextmanager from contextlib import contextmanager
from dataclasses import dataclass
from time import sleep from time import sleep
from pytest_tests.helpers.ssh_helper import HostClient
import allure
from pytest_tests.helpers.remote_process import RemoteProcess from pytest_tests.helpers.remote_process import RemoteProcess
from pytest_tests.helpers.ssh_helper import HostClient
EXIT_RESULT_CODE = 0 EXIT_RESULT_CODE = 0
@ -22,11 +24,7 @@ class LoadParams:
class K6: class K6:
def __init__( def __init__(self, load_params: LoadParams, host_client: HostClient):
self,
load_params: LoadParams,
host_client: HostClient
):
self.load_params = load_params self.load_params = load_params
self.host_client = host_client self.host_client = host_client
@ -51,10 +49,7 @@ class K6:
@allure.step("Prepare containers and objects") @allure.step("Prepare containers and objects")
def prepare(self) -> str: def prepare(self) -> str:
self._k6_dir = self.k6_dir self._k6_dir = self.k6_dir
if ( if self.load_params.load_type == "http" or self.load_params.load_type == "grpc":
self.load_params.load_type == "http"
or self.load_params.load_type == "grpc"
):
command = ( command = (
f"{self.k6_dir}/scenarios/preset/preset_grpc.py " f"{self.k6_dir}/scenarios/preset/preset_grpc.py "
f"--size {self.load_params.obj_size} " f"--size {self.load_params.obj_size} "

View file

@ -1,8 +1,11 @@
from __future__ import annotations from __future__ import annotations
import uuid import uuid
from typing import Optional
import allure import allure
from tenacity import retry, stop_after_attempt, wait_fixed from tenacity import retry, stop_after_attempt, wait_fixed
from typing import Optional
from pytest_tests.helpers.ssh_helper import HostClient from pytest_tests.helpers.ssh_helper import HostClient
@ -179,8 +182,6 @@ class RemoteProcess:
f"echo $? > {self.process_dir}/rc" f"echo $? > {self.process_dir}/rc"
) )
self.host_client.exec( self.host_client.exec(f'echo "{script}" > {self.process_dir}/command.sh')
f'echo "{script}" > {self.process_dir}/command.sh'
)
self.host_client.exec(f"cat {self.process_dir}/command.sh") self.host_client.exec(f"cat {self.process_dir}/command.sh")
self.host_client.exec(f"chmod +x {self.process_dir}/command.sh") self.host_client.exec(f"chmod +x {self.process_dir}/command.sh")

View file

@ -1,7 +1,7 @@
from typing import Optional from typing import Optional
import allure import allure
import pytest
from steps import s3_gate_bucket, s3_gate_object from steps import s3_gate_bucket, s3_gate_object

View file

@ -20,17 +20,17 @@ class SberCloudConfig:
project_id: Optional[str] = None project_id: Optional[str] = None
@staticmethod @staticmethod
def from_dict(config_dict: dict) -> 'SberCloudConfig': def from_dict(config_dict: dict) -> "SberCloudConfig":
return SberCloudConfig(**config_dict) return SberCloudConfig(**config_dict)
@staticmethod @staticmethod
def from_yaml(config_path: str) -> 'SberCloudConfig': def from_yaml(config_path: str) -> "SberCloudConfig":
with open(config_path) as file: with open(config_path) as file:
config_dict = yaml.load(file, Loader=yaml.FullLoader) config_dict = yaml.load(file, Loader=yaml.FullLoader)
return SberCloudConfig.from_dict(config_dict["sbercloud"]) return SberCloudConfig.from_dict(config_dict["sbercloud"])
@staticmethod @staticmethod
def from_env() -> 'SberCloudConfig': def from_env() -> "SberCloudConfig":
config_dict = { config_dict = {
"access_key_id": os.getenv("SBERCLOUD_ACCESS_KEY_ID"), "access_key_id": os.getenv("SBERCLOUD_ACCESS_KEY_ID"),
"secret_key": os.getenv("SBERCLOUD_SECRET_KEY"), "secret_key": os.getenv("SBERCLOUD_SECRET_KEY"),
@ -53,7 +53,9 @@ class SberCloudAuthRequests:
ALGORITHM = "SDK-HMAC-SHA256" ALGORITHM = "SDK-HMAC-SHA256"
TIMESTAMP_FORMAT = "%Y%m%dT%H%M%SZ" TIMESTAMP_FORMAT = "%Y%m%dT%H%M%SZ"
def __init__(self, endpoint: str, access_key_id: str, secret_key: str, base_path: str = "") -> None: def __init__(
self, endpoint: str, access_key_id: str, secret_key: str, base_path: str = ""
) -> None:
self.endpoint = endpoint self.endpoint = endpoint
self.base_path = base_path self.base_path = base_path
self.access_key_id = access_key_id self.access_key_id = access_key_id
@ -62,12 +64,14 @@ class SberCloudAuthRequests:
def get(self, path: str, query: Optional[dict] = None) -> requests.Response: def get(self, path: str, query: Optional[dict] = None) -> requests.Response:
return self._send_request("GET", path, query, data=None) return self._send_request("GET", path, query, data=None)
def post(self, path: str, query: Optional[dict] = None, def post(
data: Optional[dict] = None) -> requests.Response: self, path: str, query: Optional[dict] = None, data: Optional[dict] = None
) -> requests.Response:
return self._send_request("POST", path, query, data) return self._send_request("POST", path, query, data)
def _send_request(self, method: str, path: str, query: Optional[dict], def _send_request(
data: Optional[dict]) -> requests.Response: self, method: str, path: str, query: Optional[dict], data: Optional[dict]
) -> requests.Response:
if self.base_path: if self.base_path:
path = self.base_path + path path = self.base_path + path
@ -82,8 +86,9 @@ class SberCloudAuthRequests:
body = content.encode(self.ENCODING) body = content.encode(self.ENCODING)
signed_headers = self._build_signed_headers(headers) signed_headers = self._build_signed_headers(headers)
canonical_request = self._build_canonical_request(method, path, query, body, headers, canonical_request = self._build_canonical_request(
signed_headers) method, path, query, body, headers, signed_headers
)
signature = self._build_signature(timestamp, canonical_request) signature = self._build_signature(timestamp, canonical_request)
headers["Authorization"] = self._build_authorization_header(signature, signed_headers) headers["Authorization"] = self._build_authorization_header(signature, signed_headers)
@ -92,8 +97,10 @@ class SberCloudAuthRequests:
response = requests.request(method, url, headers=headers, data=body) response = requests.request(method, url, headers=headers, data=body)
if response.status_code < 200 or response.status_code >= 300: if response.status_code < 200 or response.status_code >= 300:
raise AssertionError(f"Request to url={url} failed: status={response.status_code} " raise AssertionError(
f"response={response.text})") f"Request to url={url} failed: status={response.status_code} "
f"response={response.text})"
)
return response return response
def _build_original_headers(self, timestamp: str) -> dict[str, str]: def _build_original_headers(self, timestamp: str) -> dict[str, str]:
@ -105,21 +112,30 @@ class SberCloudAuthRequests:
def _build_signed_headers(self, headers: dict[str, str]) -> list[str]: def _build_signed_headers(self, headers: dict[str, str]) -> list[str]:
return sorted(header_name.lower() for header_name in headers) return sorted(header_name.lower() for header_name in headers)
def _build_canonical_request(self, method: str, path: str, query: Optional[dict], body: bytes, def _build_canonical_request(
headers: dict[str, str], signed_headers: list[str]) -> str: self,
method: str,
path: str,
query: Optional[dict],
body: bytes,
headers: dict[str, str],
signed_headers: list[str],
) -> str:
canonical_headers = self._build_canonical_headers(headers, signed_headers) canonical_headers = self._build_canonical_headers(headers, signed_headers)
body_hash = self._calc_sha256_hash(body) body_hash = self._calc_sha256_hash(body)
canonical_url = self._build_canonical_url(path) canonical_url = self._build_canonical_url(path)
canonical_query_string = self._build_canonical_query_string(query) canonical_query_string = self._build_canonical_query_string(query)
return "\n".join([ return "\n".join(
method.upper(), [
canonical_url, method.upper(),
canonical_query_string, canonical_url,
canonical_headers, canonical_query_string,
";".join(signed_headers), canonical_headers,
body_hash ";".join(signed_headers),
]) body_hash,
]
)
def _build_canonical_headers(self, headers: dict[str, str], signed_headers: list[str]) -> str: def _build_canonical_headers(self, headers: dict[str, str], signed_headers: list[str]) -> str:
normalized_headers = {} normalized_headers = {}
@ -166,7 +182,7 @@ class SberCloudAuthRequests:
hmac_digest = hmac.new( hmac_digest = hmac.new(
key=self.secret_key.encode(self.ENCODING), key=self.secret_key.encode(self.ENCODING),
msg=string_to_sign.encode(self.ENCODING), msg=string_to_sign.encode(self.ENCODING),
digestmod=hashlib.sha256 digestmod=hashlib.sha256,
).digest() ).digest()
signature = binascii.hexlify(hmac_digest).decode() signature = binascii.hexlify(hmac_digest).decode()
@ -185,6 +201,7 @@ class SberCloud:
https://docs.sbercloud.ru/terraform/ug/topics/quickstart.html https://docs.sbercloud.ru/terraform/ug/topics/quickstart.html
https://support.hc.sbercloud.ru/en-us/api/ecs/en-us_topic_0020212668.html https://support.hc.sbercloud.ru/en-us/api/ecs/en-us_topic_0020212668.html
""" """
def __init__(self, config: SberCloudConfig) -> None: def __init__(self, config: SberCloudConfig) -> None:
self.ecs_requests = SberCloudAuthRequests( self.ecs_requests = SberCloudAuthRequests(
endpoint=config.ecs_endpoint, endpoint=config.ecs_endpoint,
@ -198,42 +215,29 @@ class SberCloud:
if not self.ecs_nodes or no_cache: if not self.ecs_nodes or no_cache:
self.ecs_nodes = self.get_ecs_nodes() self.ecs_nodes = self.get_ecs_nodes()
nodes_by_ip = [ nodes_by_ip = [
node for node in self.ecs_nodes node
if ip in [ for node in self.ecs_nodes
node_ip['addr'] if ip
for node_ips in node['addresses'].values() in [node_ip["addr"] for node_ips in node["addresses"].values() for node_ip in node_ips]
for node_ip in node_ips
]
] ]
assert len(nodes_by_ip) == 1 assert len(nodes_by_ip) == 1
return nodes_by_ip[0]['id'] return nodes_by_ip[0]["id"]
def get_ecs_nodes(self) -> list[dict]: def get_ecs_nodes(self) -> list[dict]:
response = self.ecs_requests.get("/detail", {"limit": "1000"}).json() response = self.ecs_requests.get("/detail", {"limit": "1000"}).json()
return response["servers"] return response["servers"]
def start_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None) -> None: def start_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None) -> None:
data = { data = {"os-start": {"servers": [{"id": node_id or self.find_ecs_node_by_ip(node_ip)}]}}
'os-start': {
'servers': [
{
'id': node_id or self.find_ecs_node_by_ip(node_ip)
}
]
}
}
self.ecs_requests.post("/action", data=data) self.ecs_requests.post("/action", data=data)
def stop_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None, def stop_node(
hard: bool = False) -> None: self, node_id: Optional[str] = None, node_ip: Optional[str] = None, hard: bool = False
) -> None:
data = { data = {
'os-stop': { "os-stop": {
'type': 'HARD' if hard else 'SOFT', "type": "HARD" if hard else "SOFT",
'servers': [ "servers": [{"id": node_id or self.find_ecs_node_by_ip(node_ip)}],
{
'id': node_id or self.find_ecs_node_by_ip(node_ip)
}
]
} }
} }
self.ecs_requests.post("/action", data=data) self.ecs_requests.post("/action", data=data)

View file

@ -6,18 +6,23 @@ import time
from contextlib import contextmanager from contextlib import contextmanager
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
from requests import HTTPError
import docker import docker
from cli_helpers import _cmd_run from cli_helpers import _cmd_run
from common import (INFRASTRUCTURE_TYPE, NEOFS_CLI_EXEC, NEOFS_NETMAP_DICT, STORAGE_NODE_BIN_PATH, from common import (
STORAGE_NODE_SSH_PASSWORD, STORAGE_NODE_SSH_PRIVATE_KEY_PATH, INFRASTRUCTURE_TYPE,
STORAGE_NODE_SSH_USER, WALLET_CONFIG) NEOFS_CLI_EXEC,
NEOFS_NETMAP_DICT,
STORAGE_NODE_BIN_PATH,
STORAGE_NODE_SSH_PASSWORD,
STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
STORAGE_NODE_SSH_USER,
WALLET_CONFIG,
)
from requests import HTTPError
from ssh_helper import HostClient from ssh_helper import HostClient
logger = logging.getLogger("NeoLogger")
logger = logging.getLogger('NeoLogger')
class LocalDevEnvStorageServiceHelper: class LocalDevEnvStorageServiceHelper:
@ -59,8 +64,8 @@ class LocalDevEnvStorageServiceHelper:
wallet_path = NEOFS_NETMAP_DICT[node_name]["wallet_path"] wallet_path = NEOFS_NETMAP_DICT[node_name]["wallet_path"]
cmd = ( cmd = (
f'{NEOFS_CLI_EXEC} {command} --endpoint {control_endpoint} ' f"{NEOFS_CLI_EXEC} {command} --endpoint {control_endpoint} "
f'--wallet {wallet_path} --config {WALLET_CONFIG}' f"--wallet {wallet_path} --config {WALLET_CONFIG}"
) )
output = _cmd_run(cmd) output = _cmd_run(cmd)
return output return output
@ -77,8 +82,9 @@ class LocalDevEnvStorageServiceHelper:
def get_binaries_version(self) -> dict: def get_binaries_version(self) -> dict:
return {} return {}
def dump_logs(self, directory_path: str, since: Optional[datetime], def dump_logs(
until: Optional[datetime]) -> None: self, directory_path: str, since: Optional[datetime], until: Optional[datetime]
) -> None:
# All containers are running on the same host, so we can use 1st node to collect all logs # All containers are running on the same host, so we can use 1st node to collect all logs
first_node_name = next(iter(NEOFS_NETMAP_DICT)) first_node_name = next(iter(NEOFS_NETMAP_DICT))
client = self._get_docker_client(first_node_name) client = self._get_docker_client(first_node_name)
@ -107,8 +113,9 @@ class LocalDevEnvStorageServiceHelper:
return container return container
return None return None
def _wait_for_container_to_be_in_state(self, node_name: str, container_name: str, def _wait_for_container_to_be_in_state(
expected_state: str) -> None: self, node_name: str, container_name: str, expected_state: str
) -> None:
for __attempt in range(10): for __attempt in range(10):
container = self._get_container_by_name(node_name, container_name) container = self._get_container_by_name(node_name, container_name)
logger.info(f"Container info:\n{json.dumps(container, indent=2)}") logger.info(f"Container info:\n{json.dumps(container, indent=2)}")
@ -116,7 +123,7 @@ class LocalDevEnvStorageServiceHelper:
return return
time.sleep(5) time.sleep(5)
raise AssertionError(f'Container {container_name} is not in {expected_state} state.') raise AssertionError(f"Container {container_name} is not in {expected_state} state.")
def _get_docker_client(self, node_name: str) -> docker.APIClient: def _get_docker_client(self, node_name: str) -> docker.APIClient:
# For local docker we use default docker client that talks to unix socket # For local docker we use default docker client that talks to unix socket
@ -143,7 +150,9 @@ class CloudVmStorageServiceHelper:
logger.info(f"Start command output: {output.stdout}") logger.info(f"Start command output: {output.stdout}")
if wait: if wait:
self._wait_for_service_to_be_in_state(node_name, self.STORAGE_SERVICE, "active (running)") self._wait_for_service_to_be_in_state(
node_name, self.STORAGE_SERVICE, "active (running)"
)
def run_control_command(self, node_name: str, command: str) -> str: def run_control_command(self, node_name: str, command: str) -> str:
control_endpoint = NEOFS_NETMAP_DICT[node_name]["control"] control_endpoint = NEOFS_NETMAP_DICT[node_name]["control"]
@ -161,28 +170,31 @@ class CloudVmStorageServiceHelper:
# Put config on storage node host # Put config on storage node host
remote_config_path = f"/tmp/{node_name}-config.yaml" remote_config_path = f"/tmp/{node_name}-config.yaml"
remote_config = 'password: ""' remote_config = 'password: ""'
ssh_client.exec_with_confirmation(f"echo '{remote_config}' > {remote_config_path}", [""]) ssh_client.exec_with_confirmation(
f"echo '{remote_config}' > {remote_config_path}", [""]
)
# Execute command # Execute command
cmd = ( cmd = (
f'sudo {STORAGE_NODE_BIN_PATH}/neofs-cli {command} --endpoint {control_endpoint} ' f"sudo {STORAGE_NODE_BIN_PATH}/neofs-cli {command} --endpoint {control_endpoint} "
f'--wallet {remote_wallet_path} --config {remote_config_path}' f"--wallet {remote_wallet_path} --config {remote_config_path}"
) )
output = ssh_client.exec_with_confirmation(cmd, [""]) output = ssh_client.exec_with_confirmation(cmd, [""])
return output.stdout return output.stdout
def _wait_for_service_to_be_in_state(self, node_name: str, service_name: str, def _wait_for_service_to_be_in_state(
expected_state: str) -> None: self, node_name: str, service_name: str, expected_state: str
) -> None:
with _create_ssh_client(node_name) as ssh_client: with _create_ssh_client(node_name) as ssh_client:
for __attempt in range(10): for __attempt in range(10):
# Run command to get service status (set --lines=0 to suppress logs output) # Run command to get service status (set --lines=0 to suppress logs output)
# Also we don't verify return code, because for an inactive service return code will be 3 # Also we don't verify return code, because for an inactive service return code will be 3
command = f'sudo systemctl status {service_name} --lines=0' command = f"sudo systemctl status {service_name} --lines=0"
output = ssh_client.exec(command, verify=False) output = ssh_client.exec(command, verify=False)
if expected_state in output.stdout: if expected_state in output.stdout:
return return
time.sleep(3) time.sleep(3)
raise AssertionError(f'Service {service_name} is not in {expected_state} state') raise AssertionError(f"Service {service_name} is not in {expected_state} state")
def delete_node_data(self, node_name: str) -> None: def delete_node_data(self, node_name: str) -> None:
with _create_ssh_client(node_name) as ssh_client: with _create_ssh_client(node_name) as ssh_client:
@ -190,16 +202,16 @@ class CloudVmStorageServiceHelper:
def get_binaries_version(self, binaries: list = None) -> dict: def get_binaries_version(self, binaries: list = None) -> dict:
default_binaries = [ default_binaries = [
'neo-go', "neo-go",
'neofs-adm', "neofs-adm",
'neofs-cli', "neofs-cli",
'neofs-http-gw', "neofs-http-gw",
'neofs-ir', "neofs-ir",
'neofs-lens', "neofs-lens",
'neofs-node', "neofs-node",
'neofs-s3-authmate', "neofs-s3-authmate",
'neofs-s3-gw', "neofs-s3-gw",
'neogo-morph-cn', "neogo-morph-cn",
] ]
binaries = binaries or default_binaries binaries = binaries or default_binaries
@ -208,31 +220,35 @@ class CloudVmStorageServiceHelper:
with _create_ssh_client(node_name) as ssh_client: with _create_ssh_client(node_name) as ssh_client:
for binary in binaries: for binary in binaries:
try: try:
out = ssh_client.exec(f'sudo {binary} --version').stdout out = ssh_client.exec(f"sudo {binary} --version").stdout
except AssertionError as err: except AssertionError as err:
logger.error(f'Can not get version for {binary} because of\n{err}') logger.error(f"Can not get version for {binary} because of\n{err}")
version_map[binary] = 'Can not get version' version_map[binary] = "Can not get version"
continue continue
version = re.search(r'version[:\s]*v?(.+)', out, re.IGNORECASE) version = re.search(r"version[:\s]*v?(.+)", out, re.IGNORECASE)
version = version.group(1).strip() if version else 'Unknown' version = version.group(1).strip() if version else "Unknown"
if not version_map.get(binary): if not version_map.get(binary):
version_map[binary] = version version_map[binary] = version
else: else:
assert version_map[binary] == version, \ assert version_map[binary] == version, (
f'Expected binary {binary} to have identical version on all nodes ' \ f"Expected binary {binary} to have identical version on all nodes "
f'(mismatch on node {node_name})' f"(mismatch on node {node_name})"
)
return version_map return version_map
def dump_logs(self, directory_path: str, since: Optional[datetime], def dump_logs(
until: Optional[datetime]) -> None: self, directory_path: str, since: Optional[datetime], until: Optional[datetime]
) -> None:
for node_name, node_info in NEOFS_NETMAP_DICT.items(): for node_name, node_info in NEOFS_NETMAP_DICT.items():
with _create_ssh_client(node_name) as ssh_client: with _create_ssh_client(node_name) as ssh_client:
# We do not filter out logs of neofs services, because system logs might contain # We do not filter out logs of neofs services, because system logs might contain
# information that is useful for troubleshooting # information that is useful for troubleshooting
filters = " ".join([ filters = " ".join(
f"--since '{since:%Y-%m-%d %H:%M:%S}'" if since else "", [
f"--until '{until:%Y-%m-%d %H:%M:%S}'" if until else "", f"--since '{since:%Y-%m-%d %H:%M:%S}'" if since else "",
]) f"--until '{until:%Y-%m-%d %H:%M:%S}'" if until else "",
]
)
result = ssh_client.exec(f"journalctl --no-pager {filters}") result = ssh_client.exec(f"journalctl --no-pager {filters}")
logs = result.stdout logs = result.stdout
@ -250,6 +266,7 @@ class RemoteDevEnvStorageServiceHelper(LocalDevEnvStorageServiceHelper):
Most of operations are identical to local devenv, however, any interactions Most of operations are identical to local devenv, however, any interactions
with host resources (files, etc.) require ssh into the remote host machine. with host resources (files, etc.) require ssh into the remote host machine.
""" """
def _get_docker_client(self, node_name: str) -> docker.APIClient: def _get_docker_client(self, node_name: str) -> docker.APIClient:
# For remote devenv we use docker client that talks to tcp socket 2375: # For remote devenv we use docker client that talks to tcp socket 2375:
# https://docs.docker.com/engine/reference/commandline/dockerd/#daemon-socket-option # https://docs.docker.com/engine/reference/commandline/dockerd/#daemon-socket-option
@ -299,12 +316,12 @@ def _create_ssh_client(node_name: str) -> HostClient:
def _get_node_host(node_name: str) -> str: def _get_node_host(node_name: str) -> str:
if node_name not in NEOFS_NETMAP_DICT: if node_name not in NEOFS_NETMAP_DICT:
raise AssertionError(f'Node {node_name} is not found!') raise AssertionError(f"Node {node_name} is not found!")
# We use rpc endpoint to determine host address, because control endpoint # We use rpc endpoint to determine host address, because control endpoint
# (if it is private) will be a local address on the host machine # (if it is private) will be a local address on the host machine
node_config = NEOFS_NETMAP_DICT.get(node_name) node_config = NEOFS_NETMAP_DICT.get(node_name)
host = node_config.get('rpc').split(':')[0] host = node_config.get("rpc").split(":")[0]
return host return host
@ -313,7 +330,7 @@ def _get_storage_container_name(node_name: str) -> str:
Converts name of storage node (as it is listed in netmap) into the name of docker container Converts name of storage node (as it is listed in netmap) into the name of docker container
that runs instance of this storage node. that runs instance of this storage node.
""" """
return node_name.split('.')[0] return node_name.split(".")[0]
def _get_storage_volume_name(node_name: str) -> str: def _get_storage_volume_name(node_name: str) -> str:

View file

@ -10,7 +10,7 @@ from time import sleep
from typing import ClassVar, Optional from typing import ClassVar, Optional
import allure import allure
from paramiko import AutoAddPolicy, SFTPClient, SSHClient, SSHException, ssh_exception, RSAKey from paramiko import AutoAddPolicy, RSAKey, SFTPClient, SSHClient, SSHException, ssh_exception
from paramiko.ssh_exception import AuthenticationException from paramiko.ssh_exception import AuthenticationException
@ -20,34 +20,37 @@ class HostIsNotAvailable(Exception):
def __init__(self, ip: str = None, exc: Exception = None): def __init__(self, ip: str = None, exc: Exception = None):
msg = f'Host is not available{f" by ip: {ip}" if ip else ""}' msg = f'Host is not available{f" by ip: {ip}" if ip else ""}'
if exc: if exc:
msg = f'{msg}. {exc}' msg = f"{msg}. {exc}"
super().__init__(msg) super().__init__(msg)
def log_command(func): def log_command(func):
@wraps(func) @wraps(func)
def wrapper(host: 'HostClient', command: str, *args, **kwargs): def wrapper(host: "HostClient", command: str, *args, **kwargs):
display_length = 60 display_length = 60
short = command.removeprefix("$ProgressPreference='SilentlyContinue'\n") short = command.removeprefix("$ProgressPreference='SilentlyContinue'\n")
short = short[:display_length] short = short[:display_length]
short += '...' if short != command else '' short += "..." if short != command else ""
with allure.step(f'SSH: {short}'): with allure.step(f"SSH: {short}"):
logging.info(f'Execute command "{command}" on "{host.ip}"') logging.info(f'Execute command "{command}" on "{host.ip}"')
start_time = datetime.utcnow() start_time = datetime.utcnow()
cmd_result = func(host, command, *args, **kwargs) cmd_result = func(host, command, *args, **kwargs)
end_time = datetime.utcnow() end_time = datetime.utcnow()
log_message = f'HOST: {host.ip}\n' \ log_message = (
f'COMMAND:\n{textwrap.indent(command, " ")}\n' \ f"HOST: {host.ip}\n"
f'RC:\n {cmd_result.rc}\n' \ f'COMMAND:\n{textwrap.indent(command, " ")}\n'
f'STDOUT:\n{textwrap.indent(cmd_result.stdout, " ")}\n' \ f"RC:\n {cmd_result.rc}\n"
f'STDERR:\n{textwrap.indent(cmd_result.stderr, " ")}\n' \ f'STDOUT:\n{textwrap.indent(cmd_result.stdout, " ")}\n'
f'Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}' f'STDERR:\n{textwrap.indent(cmd_result.stderr, " ")}\n'
f"Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}"
)
logging.info(log_message) logging.info(log_message)
allure.attach(log_message, 'SSH command', allure.attachment_type.TEXT) allure.attach(log_message, "SSH command", allure.attachment_type.TEXT)
return cmd_result return cmd_result
return wrapper return wrapper
@ -65,9 +68,15 @@ class HostClient:
TIMEOUT_RESTORE_CONNECTION = 10, 24 TIMEOUT_RESTORE_CONNECTION = 10, 24
def __init__(self, ip: str, login: str, password: Optional[str] = None, def __init__(
private_key_path: Optional[str] = None, private_key_passphrase: Optional[str] = None, self,
init_ssh_client=True) -> None: ip: str,
login: str,
password: Optional[str] = None,
private_key_path: Optional[str] = None,
private_key_passphrase: Optional[str] = None,
init_ssh_client=True,
) -> None:
self.ip = ip self.ip = ip
self.login = login self.login = login
self.password = password self.password = password
@ -83,21 +92,25 @@ class HostClient:
return cmd_result return cmd_result
@log_command @log_command
def exec_with_confirmation(self, cmd: str, confirmation: list, verify=True, timeout=90) -> SSHCommand: def exec_with_confirmation(
self, cmd: str, confirmation: list, verify=True, timeout=90
) -> SSHCommand:
ssh_stdin, ssh_stdout, ssh_stderr = self.ssh_client.exec_command(cmd, timeout=timeout) ssh_stdin, ssh_stdout, ssh_stderr = self.ssh_client.exec_command(cmd, timeout=timeout)
for line in confirmation: for line in confirmation:
if not line.endswith('\n'): if not line.endswith("\n"):
line = f'{line}\n' line = f"{line}\n"
try: try:
ssh_stdin.write(line) ssh_stdin.write(line)
except OSError as err: except OSError as err:
logging.error(f'Got error {err} executing command {cmd}') logging.error(f"Got error {err} executing command {cmd}")
ssh_stdin.close() ssh_stdin.close()
output = SSHCommand(stdout=ssh_stdout.read().decode(errors='ignore'), output = SSHCommand(
stderr=ssh_stderr.read().decode(errors='ignore'), stdout=ssh_stdout.read().decode(errors="ignore"),
rc=ssh_stdout.channel.recv_exit_status()) stderr=ssh_stderr.read().decode(errors="ignore"),
rc=ssh_stdout.channel.recv_exit_status(),
)
if verify: if verify:
debug_info = f'\nSTDOUT: {output.stdout}\nSTDERR: {output.stderr}\nRC: {output.rc}' debug_info = f"\nSTDOUT: {output.stdout}\nSTDERR: {output.stderr}\nRC: {output.rc}"
assert output.rc == 0, f'Non zero rc from command: "{cmd}"{debug_info}' assert output.rc == 0, f'Non zero rc from command: "{cmd}"{debug_info}'
return output return output
@ -111,7 +124,7 @@ class HostClient:
self.create_connection() self.create_connection()
@contextmanager @contextmanager
def create_ssh_connection(self) -> 'SSHClient': def create_ssh_connection(self) -> "SSHClient":
if not self.ssh_client: if not self.ssh_client:
self.create_connection() self.create_connection()
try: try:
@ -119,38 +132,40 @@ class HostClient:
finally: finally:
self.drop() self.drop()
@allure.step('Restore connection') @allure.step("Restore connection")
def restore_ssh_connection(self): def restore_ssh_connection(self):
retry_time, retry_count = self.TIMEOUT_RESTORE_CONNECTION retry_time, retry_count = self.TIMEOUT_RESTORE_CONNECTION
for _ in range(retry_count): for _ in range(retry_count):
try: try:
self.create_connection() self.create_connection()
except AssertionError: except AssertionError:
logging.warning(f'Host: Cant reach host: {self.ip}.') logging.warning(f"Host: Cant reach host: {self.ip}.")
sleep(retry_time) sleep(retry_time)
else: else:
logging.info(f'Host: Cant reach host: {self.ip}.') logging.info(f"Host: Cant reach host: {self.ip}.")
return return
raise AssertionError(f'Host: Cant reach host: {self.ip} after 240 seconds..') raise AssertionError(f"Host: Cant reach host: {self.ip} after 240 seconds..")
@allure.step('Copy file {host_path_to_file} to local file {path_to_file}') @allure.step("Copy file {host_path_to_file} to local file {path_to_file}")
def copy_file_from_host(self, host_path_to_file: str, path_to_file: str): def copy_file_from_host(self, host_path_to_file: str, path_to_file: str):
with self._sftp_client() as sftp_client: with self._sftp_client() as sftp_client:
sftp_client.get(host_path_to_file, path_to_file) sftp_client.get(host_path_to_file, path_to_file)
def copy_file_to_host(self, path_to_file: str, host_path_to_file: str): def copy_file_to_host(self, path_to_file: str, host_path_to_file: str):
with allure.step(f'Copy local file {path_to_file} to remote file {host_path_to_file} on host {self.ip}'): with allure.step(
f"Copy local file {path_to_file} to remote file {host_path_to_file} on host {self.ip}"
):
with self._sftp_client() as sftp_client: with self._sftp_client() as sftp_client:
sftp_client.put(path_to_file, host_path_to_file) sftp_client.put(path_to_file, host_path_to_file)
@allure.step('Save string to remote file {host_path_to_file}') @allure.step("Save string to remote file {host_path_to_file}")
def copy_str_to_host_file(self, string: str, host_path_to_file: str): def copy_str_to_host_file(self, string: str, host_path_to_file: str):
with tempfile.NamedTemporaryFile(mode='r+') as temp: with tempfile.NamedTemporaryFile(mode="r+") as temp:
temp.writelines(string) temp.writelines(string)
temp.flush() temp.flush()
with self._sftp_client() as client: with self._sftp_client() as client:
client.put(temp.name, host_path_to_file) client.put(temp.name, host_path_to_file)
self.exec(f'cat {host_path_to_file}', verify=False) self.exec(f"cat {host_path_to_file}", verify=False)
def create_connection(self, attempts=SSH_CONNECTION_ATTEMPTS): def create_connection(self, attempts=SSH_CONNECTION_ATTEMPTS):
exc_err = None exc_err = None
@ -166,8 +181,10 @@ class HostClient:
self.ssh_client.connect( self.ssh_client.connect(
hostname=self.ip, hostname=self.ip,
username=self.login, username=self.login,
pkey=RSAKey.from_private_key_file(self.private_key_path, self.private_key_passphrase), pkey=RSAKey.from_private_key_file(
timeout=self.CONNECTION_TIMEOUT self.private_key_path, self.private_key_passphrase
),
timeout=self.CONNECTION_TIMEOUT,
) )
else: else:
logging.info( logging.info(
@ -178,25 +195,25 @@ class HostClient:
hostname=self.ip, hostname=self.ip,
username=self.login, username=self.login,
password=self.password, password=self.password,
timeout=self.CONNECTION_TIMEOUT timeout=self.CONNECTION_TIMEOUT,
) )
return True return True
except AuthenticationException as auth_err: except AuthenticationException as auth_err:
logging.error(f'Host: {self.ip}. {auth_err}') logging.error(f"Host: {self.ip}. {auth_err}")
self.drop() self.drop()
raise auth_err raise auth_err
except ( except (
SSHException, SSHException,
ssh_exception.NoValidConnectionsError, ssh_exception.NoValidConnectionsError,
AttributeError, AttributeError,
socket.timeout, socket.timeout,
OSError OSError,
) as ssh_err: ) as ssh_err:
exc_err = ssh_err exc_err = ssh_err
self.drop() self.drop()
logging.error(f'Host: {self.ip}, connection error. {exc_err}') logging.error(f"Host: {self.ip}, connection error. {exc_err}")
raise HostIsNotAvailable(self.ip, exc_err) raise HostIsNotAvailable(self.ip, exc_err)
@ -211,21 +228,21 @@ class HostClient:
try: try:
_, stdout, stderr = self.ssh_client.exec_command(cmd, timeout=timeout) _, stdout, stderr = self.ssh_client.exec_command(cmd, timeout=timeout)
return SSHCommand( return SSHCommand(
stdout=stdout.read().decode(errors='ignore'), stdout=stdout.read().decode(errors="ignore"),
stderr=stderr.read().decode(errors='ignore'), stderr=stderr.read().decode(errors="ignore"),
rc=stdout.channel.recv_exit_status() rc=stdout.channel.recv_exit_status(),
) )
except ( except (
SSHException, SSHException,
TimeoutError, TimeoutError,
ssh_exception.NoValidConnectionsError, ssh_exception.NoValidConnectionsError,
ConnectionResetError, ConnectionResetError,
AttributeError, AttributeError,
socket.timeout, socket.timeout,
) as ssh_err: ) as ssh_err:
logging.error(f'Host: {self.ip}, exec command error {ssh_err}') logging.error(f"Host: {self.ip}, exec command error {ssh_err}")
self.create_connection() self.create_connection()
raise HostIsNotAvailable(f'Host: {self.ip} is not reachable.') raise HostIsNotAvailable(f"Host: {self.ip} is not reachable.")
@contextmanager @contextmanager
def _sftp_client(self) -> SFTPClient: def _sftp_client(self) -> SFTPClient:

View file

@ -3,6 +3,7 @@ import os
import allure import allure
import pytest import pytest
from python_keywords.container import list_containers from python_keywords.container import list_containers
from steps import s3_gate_bucket from steps import s3_gate_bucket
from steps.aws_cli_client import AwsCliClient from steps.aws_cli_client import AwsCliClient
@ -10,29 +11,31 @@ from steps.aws_cli_client import AwsCliClient
class TestS3GateBase: class TestS3GateBase:
s3_client = None s3_client = None
@pytest.fixture(scope='class', autouse=True) @pytest.fixture(scope="class", autouse=True)
@allure.title('[Class/Autouse]: Create S3 client') @allure.title("[Class/Autouse]: Create S3 client")
def s3_client(self, prepare_wallet_and_deposit, request): def s3_client(self, prepare_wallet_and_deposit, request):
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
s3_bearer_rules_file = f"{os.getcwd()}/robot/resources/files/s3_bearer_rules.json" s3_bearer_rules_file = f"{os.getcwd()}/robot/resources/files/s3_bearer_rules.json"
cid, bucket, access_key_id, secret_access_key, owner_private_key = \ (
s3_gate_bucket.init_s3_credentials( cid,
wallet, s3_bearer_rules_file=s3_bearer_rules_file) bucket,
access_key_id,
secret_access_key,
owner_private_key,
) = s3_gate_bucket.init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file)
containers_list = list_containers(wallet) containers_list = list_containers(wallet)
assert cid in containers_list, f'Expected cid {cid} in {containers_list}' assert cid in containers_list, f"Expected cid {cid} in {containers_list}"
if request.param == 'aws cli': if request.param == "aws cli":
try: try:
client = AwsCliClient(access_key_id, secret_access_key) client = AwsCliClient(access_key_id, secret_access_key)
except Exception as err: except Exception as err:
if 'command was not found or was not executable' in str(err): if "command was not found or was not executable" in str(err):
pytest.skip('AWS CLI was not found') pytest.skip("AWS CLI was not found")
else: else:
raise RuntimeError( raise RuntimeError("Error on creating instance for AwsCliClient") from err
'Error on creating instance for AwsCliClient') from err
else: else:
client = s3_gate_bucket.config_s3_client( client = s3_gate_bucket.config_s3_client(access_key_id, secret_access_key)
access_key_id, secret_access_key)
TestS3GateBase.s3_client = client TestS3GateBase.s3_client = client
TestS3GateBase.wallet = wallet TestS3GateBase.wallet = wallet

View file

@ -3,9 +3,14 @@ from typing import Dict, List, Optional
import allure import allure
import pytest import pytest
from common import (
from common import ASSETS_DIR, IR_WALLET_CONFIG, IR_WALLET_PATH, WALLET_CONFIG ASSETS_DIR,
from common import STORAGE_WALLET_PATH, STORAGE_WALLET_CONFIG IR_WALLET_CONFIG,
IR_WALLET_PATH,
STORAGE_WALLET_CONFIG,
STORAGE_WALLET_PATH,
WALLET_CONFIG,
)
from python_keywords.acl import EACLRole from python_keywords.acl import EACLRole
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.neofs_verbs import put_object from python_keywords.neofs_verbs import put_object
@ -38,21 +43,15 @@ def wallets(prepare_wallet_and_deposit):
yield Wallets( yield Wallets(
wallets={ wallets={
EACLRole.USER: [ EACLRole.USER: [
Wallet( Wallet(wallet_path=prepare_wallet_and_deposit, config_path=WALLET_CONFIG)
wallet_path=prepare_wallet_and_deposit, config_path=WALLET_CONFIG
)
], ],
EACLRole.OTHERS: [ EACLRole.OTHERS: [
Wallet( Wallet(wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG),
wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG Wallet(wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG),
),
Wallet(
wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG
),
], ],
EACLRole.SYSTEM: [ EACLRole.SYSTEM: [
Wallet(wallet_path=IR_WALLET_PATH, config_path=IR_WALLET_CONFIG), Wallet(wallet_path=IR_WALLET_PATH, config_path=IR_WALLET_CONFIG),
Wallet(wallet_path=STORAGE_WALLET_PATH, config_path=STORAGE_WALLET_CONFIG) Wallet(wallet_path=STORAGE_WALLET_PATH, config_path=STORAGE_WALLET_CONFIG),
], ],
} }
) )

View file

@ -1,17 +1,18 @@
import logging import logging
from typing import Optional
import allure
import pytest import pytest
from common import ( from common import (
ASSETS_DIR,
COMPLEX_OBJ_SIZE, COMPLEX_OBJ_SIZE,
FREE_STORAGE,
IR_WALLET_CONFIG, IR_WALLET_CONFIG,
IR_WALLET_PASS, IR_WALLET_PASS,
IR_WALLET_PATH, IR_WALLET_PATH,
SIMPLE_OBJ_SIZE, SIMPLE_OBJ_SIZE,
ASSETS_DIR,
FREE_STORAGE
) )
from epoch import tick_epoch from epoch import tick_epoch
from typing import Optional
from grpc_responses import OBJECT_ACCESS_DENIED, OBJECT_NOT_FOUND from grpc_responses import OBJECT_ACCESS_DENIED, OBJECT_NOT_FOUND
from python_keywords.acl import ( from python_keywords.acl import (
EACLAccess, EACLAccess,
@ -36,8 +37,6 @@ from python_keywords.storage_group import (
from python_keywords.utility_keywords import generate_file from python_keywords.utility_keywords import generate_file
from wallet import init_wallet from wallet import init_wallet
import allure
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")
deposit = 30 deposit = 30
@ -65,9 +64,7 @@ class TestStorageGroup:
objects = [oid] objects = [oid]
storage_group = put_storagegroup(self.main_wallet, cid, objects) storage_group = put_storagegroup(self.main_wallet, cid, objects)
self.expect_success_for_storagegroup_operations( self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size)
self.main_wallet, cid, objects, object_size
)
self.expect_failure_for_storagegroup_operations( self.expect_failure_for_storagegroup_operations(
self.other_wallet, cid, objects, storage_group self.other_wallet, cid, objects, storage_group
) )
@ -81,9 +78,7 @@ class TestStorageGroup:
file_path = generate_file(object_size) file_path = generate_file(object_size)
oid = put_object(self.main_wallet, file_path, cid) oid = put_object(self.main_wallet, file_path, cid)
objects = [oid] objects = [oid]
self.expect_success_for_storagegroup_operations( self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size)
self.main_wallet, cid, objects, object_size
)
self.expect_success_for_storagegroup_operations( self.expect_success_for_storagegroup_operations(
self.other_wallet, cid, objects, object_size self.other_wallet, cid, objects, object_size
) )
@ -97,9 +92,7 @@ class TestStorageGroup:
file_path = generate_file(object_size) file_path = generate_file(object_size)
oid = put_object(self.main_wallet, file_path, cid) oid = put_object(self.main_wallet, file_path, cid)
objects = [oid] objects = [oid]
self.expect_success_for_storagegroup_operations( self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size)
self.main_wallet, cid, objects, object_size
)
self.storagegroup_operations_by_other_ro_container( self.storagegroup_operations_by_other_ro_container(
self.main_wallet, self.other_wallet, cid, objects, object_size self.main_wallet, self.other_wallet, cid, objects, object_size
) )
@ -113,14 +106,12 @@ class TestStorageGroup:
file_path = generate_file(object_size) file_path = generate_file(object_size)
oid = put_object(self.main_wallet, file_path, cid) oid = put_object(self.main_wallet, file_path, cid)
objects = [oid] objects = [oid]
self.expect_success_for_storagegroup_operations( self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size)
self.main_wallet, cid, objects, object_size
)
storage_group = put_storagegroup(self.main_wallet, cid, objects) storage_group = put_storagegroup(self.main_wallet, cid, objects)
eacl_deny = [ eacl_deny = [
EACLRule(access=EACLAccess.DENY, role=role, operation=op) EACLRule(access=EACLAccess.DENY, role=role, operation=op)
for op in EACLOperation for op in EACLOperation
for role in EACLRole for role in EACLRole
] ]
set_eacl(self.main_wallet, cid, create_eacl(cid, eacl_deny)) set_eacl(self.main_wallet, cid, create_eacl(cid, eacl_deny))
self.expect_failure_for_storagegroup_operations( self.expect_failure_for_storagegroup_operations(
@ -208,15 +199,11 @@ class TestStorageGroup:
that System key is granted to make all operations except PUT and DELETE. that System key is granted to make all operations except PUT and DELETE.
""" """
if not FREE_STORAGE: if not FREE_STORAGE:
transfer_mainnet_gas( transfer_mainnet_gas(IR_WALLET_PATH, deposit + 1, wallet_password=IR_WALLET_PASS)
IR_WALLET_PATH, deposit + 1, wallet_password=IR_WALLET_PASS
)
neofs_deposit(IR_WALLET_PATH, deposit, wallet_password=IR_WALLET_PASS) neofs_deposit(IR_WALLET_PATH, deposit, wallet_password=IR_WALLET_PASS)
storage_group = put_storagegroup(wallet, cid, obj_list) storage_group = put_storagegroup(wallet, cid, obj_list)
with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED): with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED):
put_storagegroup( put_storagegroup(IR_WALLET_PATH, cid, obj_list, wallet_config=IR_WALLET_CONFIG)
IR_WALLET_PATH, cid, obj_list, wallet_config=IR_WALLET_CONFIG
)
verify_list_storage_group( verify_list_storage_group(
IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG
) )
@ -229,6 +216,4 @@ class TestStorageGroup:
wallet_config=IR_WALLET_CONFIG, wallet_config=IR_WALLET_CONFIG,
) )
with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED): with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED):
delete_storagegroup( delete_storagegroup(IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG)
IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG
)

View file

@ -1,6 +1,5 @@
import allure import allure
import pytest import pytest
from python_keywords.acl import EACLRole from python_keywords.acl import EACLRole
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.container_access import ( from python_keywords.container_access import (
@ -31,9 +30,7 @@ class TestACLBasic:
def private_container(self, wallets): def private_container(self, wallets):
user_wallet = wallets.get_wallet() user_wallet = wallets.get_wallet()
with allure.step("Create private container"): with allure.step("Create private container"):
cid_private = create_container( cid_private = create_container(user_wallet.wallet_path, basic_acl=PRIVATE_ACL_F)
user_wallet.wallet_path, basic_acl=PRIVATE_ACL_F
)
yield cid_private yield cid_private
@ -44,9 +41,7 @@ class TestACLBasic:
def read_only_container(self, wallets): def read_only_container(self, wallets):
user_wallet = wallets.get_wallet() user_wallet = wallets.get_wallet()
with allure.step("Create public readonly container"): with allure.step("Create public readonly container"):
cid_read_only = create_container( cid_read_only = create_container(user_wallet.wallet_path, basic_acl=READONLY_ACL_F)
user_wallet.wallet_path, basic_acl=READONLY_ACL_F
)
yield cid_read_only yield cid_read_only
@ -78,12 +73,8 @@ class TestACLBasic:
attributes={"created": "other"}, attributes={"created": "other"},
) )
with allure.step(f"Check {desc} has full access to public container"): with allure.step(f"Check {desc} has full access to public container"):
check_full_access_to_container( check_full_access_to_container(wallet.wallet_path, cid, owner_object_oid, file_path)
wallet.wallet_path, cid, owner_object_oid, file_path check_full_access_to_container(wallet.wallet_path, cid, other_object_oid, file_path)
)
check_full_access_to_container(
wallet.wallet_path, cid, other_object_oid, file_path
)
@allure.title("Test basic ACL on private container") @allure.title("Test basic ACL on private container")
def test_basic_acl_private(self, wallets, private_container, file_path): def test_basic_acl_private(self, wallets, private_container, file_path):
@ -97,9 +88,7 @@ class TestACLBasic:
owner_object_oid = put_object(user_wallet.wallet_path, file_path, cid) owner_object_oid = put_object(user_wallet.wallet_path, file_path, cid)
with allure.step("Check only owner has full access to private container"): with allure.step("Check only owner has full access to private container"):
with allure.step( with allure.step("Check no one except owner has access to operations with container"):
"Check no one except owner has access to operations with container"
):
check_no_access_to_container( check_no_access_to_container(
other_wallet.wallet_path, cid, owner_object_oid, file_path other_wallet.wallet_path, cid, owner_object_oid, file_path
) )
@ -121,14 +110,8 @@ class TestACLBasic:
with allure.step("Add test objects to container"): with allure.step("Add test objects to container"):
object_oid = put_object(user_wallet.wallet_path, file_path, cid) object_oid = put_object(user_wallet.wallet_path, file_path, cid)
with allure.step( with allure.step("Check other has read-only access to operations with container"):
"Check other has read-only access to operations with container" check_read_only_container(other_wallet.wallet_path, cid, object_oid, file_path)
):
check_read_only_container(
other_wallet.wallet_path, cid, object_oid, file_path
)
with allure.step("Check owner has full access to public container"): with allure.step("Check owner has full access to public container"):
check_full_access_to_container( check_full_access_to_container(user_wallet.wallet_path, cid, object_oid, file_path)
user_wallet.wallet_path, cid, object_oid, file_path
)

View file

@ -1,6 +1,5 @@
import allure import allure
import pytest import pytest
from python_keywords.acl import ( from python_keywords.acl import (
EACLAccess, EACLAccess,
EACLOperation, EACLOperation,
@ -24,16 +23,12 @@ from python_keywords.container_access import (
class TestACLBearer: class TestACLBearer:
@pytest.mark.parametrize("role", [EACLRole.USER, EACLRole.OTHERS]) @pytest.mark.parametrize("role", [EACLRole.USER, EACLRole.OTHERS])
def test_bearer_token_operations(self, wallets, eacl_container_with_objects, role): def test_bearer_token_operations(self, wallets, eacl_container_with_objects, role):
allure.dynamic.title( allure.dynamic.title(f"Testcase to validate NeoFS operations with {role.value} BearerToken")
f"Testcase to validate NeoFS operations with {role.value} BearerToken"
)
cid, objects_oids, file_path = eacl_container_with_objects cid, objects_oids, file_path = eacl_container_with_objects
user_wallet = wallets.get_wallet() user_wallet = wallets.get_wallet()
deny_wallet = wallets.get_wallet(role) deny_wallet = wallets.get_wallet(role)
with allure.step( with allure.step(f"Check {role.value} has full access to container without bearer token"):
f"Check {role.value} has full access to container without bearer token"
):
check_full_access_to_container( check_full_access_to_container(
deny_wallet.wallet_path, deny_wallet.wallet_path,
cid, cid,
@ -44,16 +39,13 @@ class TestACLBearer:
with allure.step(f"Set deny all operations for {role.value} via eACL"): with allure.step(f"Set deny all operations for {role.value} via eACL"):
eacl = [ eacl = [
EACLRule(access=EACLAccess.DENY, role=role, operation=op) EACLRule(access=EACLAccess.DENY, role=role, operation=op) for op in EACLOperation
for op in EACLOperation
] ]
eacl_file = create_eacl(cid, eacl) eacl_file = create_eacl(cid, eacl)
set_eacl(user_wallet.wallet_path, cid, eacl_file) set_eacl(user_wallet.wallet_path, cid, eacl_file)
wait_for_cache_expired() wait_for_cache_expired()
with allure.step( with allure.step(f"Create bearer token for {role.value} with all operations allowed"):
f"Create bearer token for {role.value} with all operations allowed"
):
bearer_token = form_bearertoken_file( bearer_token = form_bearertoken_file(
user_wallet.wallet_path, user_wallet.wallet_path,
cid, cid,
@ -88,8 +80,7 @@ class TestACLBearer:
with allure.step(f"Set allow all operations for {role.value} via eACL"): with allure.step(f"Set allow all operations for {role.value} via eACL"):
eacl = [ eacl = [
EACLRule(access=EACLAccess.ALLOW, role=role, operation=op) EACLRule(access=EACLAccess.ALLOW, role=role, operation=op) for op in EACLOperation
for op in EACLOperation
] ]
eacl_file = create_eacl(cid, eacl) eacl_file = create_eacl(cid, eacl)
set_eacl(user_wallet.wallet_path, cid, eacl_file) set_eacl(user_wallet.wallet_path, cid, eacl_file)
@ -107,9 +98,7 @@ class TestACLBearer:
) )
@allure.title("BearerToken Operations for compound Operations") @allure.title("BearerToken Operations for compound Operations")
def test_bearer_token_compound_operations( def test_bearer_token_compound_operations(self, wallets, eacl_container_with_objects):
self, wallets, eacl_container_with_objects
):
cid, objects_oids, file_path = eacl_container_with_objects cid, objects_oids, file_path = eacl_container_with_objects
user_wallet = wallets.get_wallet() user_wallet = wallets.get_wallet()
other_wallet = wallets.get_wallet(role=EACLRole.OTHERS) other_wallet = wallets.get_wallet(role=EACLRole.OTHERS)
@ -140,26 +129,19 @@ class TestACLBearer:
deny_map_with_bearer = { deny_map_with_bearer = {
EACLRole.USER: [ EACLRole.USER: [
op op for op in deny_map[EACLRole.USER] if op not in bearer_map[EACLRole.USER]
for op in deny_map[EACLRole.USER]
if op not in bearer_map[EACLRole.USER]
], ],
EACLRole.OTHERS: [ EACLRole.OTHERS: [
op op for op in deny_map[EACLRole.OTHERS] if op not in bearer_map[EACLRole.OTHERS]
for op in deny_map[EACLRole.OTHERS]
if op not in bearer_map[EACLRole.OTHERS]
], ],
} }
eacl_deny = [] eacl_deny = []
for role, operations in deny_map.items(): for role, operations in deny_map.items():
eacl_deny += [ eacl_deny += [
EACLRule(access=EACLAccess.DENY, role=role, operation=op) EACLRule(access=EACLAccess.DENY, role=role, operation=op) for op in operations
for op in operations
] ]
set_eacl( set_eacl(user_wallet.wallet_path, cid, eacl_table_path=create_eacl(cid, eacl_deny))
user_wallet.wallet_path, cid, eacl_table_path=create_eacl(cid, eacl_deny)
)
wait_for_cache_expired() wait_for_cache_expired()
with allure.step("Check rule consistency without bearer"): with allure.step("Check rule consistency without bearer"):
@ -194,9 +176,7 @@ class TestACLBearer:
user_wallet.wallet_path, user_wallet.wallet_path,
cid, cid,
[ [
EACLRule( EACLRule(operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS)
operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS
)
for op in bearer_map[EACLRole.OTHERS] for op in bearer_map[EACLRole.OTHERS]
], ],
) )

View file

@ -1,6 +1,5 @@
import allure import allure
import pytest import pytest
from common import NEOFS_NETMAP_DICT from common import NEOFS_NETMAP_DICT
from failover_utils import wait_object_replication_on_nodes from failover_utils import wait_object_replication_on_nodes
from python_keywords.acl import ( from python_keywords.acl import (
@ -20,12 +19,12 @@ from python_keywords.container_access import (
from python_keywords.neofs_verbs import put_object from python_keywords.neofs_verbs import put_object
from python_keywords.node_management import drop_object from python_keywords.node_management import drop_object
from python_keywords.object_access import ( from python_keywords.object_access import (
can_get_object,
can_put_object,
can_delete_object, can_delete_object,
can_get_head_object, can_get_head_object,
can_get_object,
can_get_range_hash_of_object, can_get_range_hash_of_object,
can_get_range_of_object, can_get_range_of_object,
can_put_object,
can_search_object, can_search_object,
) )
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL

View file

@ -1,6 +1,5 @@
import allure import allure
import pytest import pytest
from python_keywords.acl import ( from python_keywords.acl import (
EACLAccess, EACLAccess,
EACLFilter, EACLFilter,
@ -21,11 +20,7 @@ from python_keywords.container_access import (
check_no_access_to_container, check_no_access_to_container,
) )
from python_keywords.neofs_verbs import put_object from python_keywords.neofs_verbs import put_object
from python_keywords.object_access import ( from python_keywords.object_access import can_get_head_object, can_get_object, can_put_object
can_get_head_object,
can_get_object,
can_put_object,
)
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL
@ -112,12 +107,8 @@ class TestEACLFilters:
@pytest.mark.parametrize( @pytest.mark.parametrize(
"match_type", [EACLMatchType.STRING_EQUAL, EACLMatchType.STRING_NOT_EQUAL] "match_type", [EACLMatchType.STRING_EQUAL, EACLMatchType.STRING_NOT_EQUAL]
) )
def test_extended_acl_filters_request( def test_extended_acl_filters_request(self, wallets, eacl_container_with_objects, match_type):
self, wallets, eacl_container_with_objects, match_type allure.dynamic.title(f"Validate NeoFS operations with request filter: {match_type.name}")
):
allure.dynamic.title(
f"Validate NeoFS operations with request filter: {match_type.name}"
)
user_wallet = wallets.get_wallet() user_wallet = wallets.get_wallet()
other_wallet = wallets.get_wallet(EACLRole.OTHERS) other_wallet = wallets.get_wallet(EACLRole.OTHERS)
( (
@ -147,14 +138,10 @@ class TestEACLFilters:
# is STRING_EQUAL, then requests with "check_key=OTHER_ATTRIBUTE" will be allowed while # is STRING_EQUAL, then requests with "check_key=OTHER_ATTRIBUTE" will be allowed while
# requests with "check_key=ATTRIBUTE" will be denied, and vice versa # requests with "check_key=ATTRIBUTE" will be denied, and vice versa
allow_headers = ( allow_headers = (
self.OTHER_ATTRIBUTE self.OTHER_ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.ATTRIBUTE
if match_type == EACLMatchType.STRING_EQUAL
else self.ATTRIBUTE
) )
deny_headers = ( deny_headers = (
self.ATTRIBUTE self.ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.OTHER_ATTRIBUTE
if match_type == EACLMatchType.STRING_EQUAL
else self.OTHER_ATTRIBUTE
) )
# We test on 3 groups of objects with various headers, # We test on 3 groups of objects with various headers,
# but eACL rule should ignore object headers and # but eACL rule should ignore object headers and
@ -164,12 +151,8 @@ class TestEACLFilters:
objects_with_other_header, objects_with_other_header,
objects_without_header, objects_without_header,
): ):
with allure.step( with allure.step("Check other has full access when sending request without headers"):
"Check other has full access when sending request without headers" check_full_access_to_container(other_wallet.wallet_path, cid, oid.pop(), file_path)
):
check_full_access_to_container(
other_wallet.wallet_path, cid, oid.pop(), file_path
)
with allure.step( with allure.step(
"Check other has full access when sending request with allowed headers" "Check other has full access when sending request with allowed headers"
@ -182,9 +165,7 @@ class TestEACLFilters:
xhdr=allow_headers, xhdr=allow_headers,
) )
with allure.step( with allure.step("Check other has no access when sending request with denied headers"):
"Check other has no access when sending request with denied headers"
):
check_no_access_to_container( check_no_access_to_container(
other_wallet.wallet_path, other_wallet.wallet_path,
cid, cid,
@ -201,9 +182,7 @@ class TestEACLFilters:
user_wallet.wallet_path, user_wallet.wallet_path,
cid, cid,
[ [
EACLRule( EACLRule(operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS)
operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS
)
for op in EACLOperation for op in EACLOperation
], ],
) )
@ -265,9 +244,7 @@ class TestEACLFilters:
# but eACL rule should ignore request headers and validate # but eACL rule should ignore request headers and validate
# only object headers # only object headers
for xhdr in (self.ATTRIBUTE, self.OTHER_ATTRIBUTE, None): for xhdr in (self.ATTRIBUTE, self.OTHER_ATTRIBUTE, None):
with allure.step( with allure.step("Check other have full access to objects without attributes"):
"Check other have full access to objects without attributes"
):
check_full_access_to_container( check_full_access_to_container(
other_wallet.wallet_path, other_wallet.wallet_path,
cid, cid,
@ -276,9 +253,7 @@ class TestEACLFilters:
xhdr=xhdr, xhdr=xhdr,
) )
with allure.step( with allure.step("Check other have full access to objects without deny attribute"):
"Check other have full access to objects without deny attribute"
):
check_full_access_to_container( check_full_access_to_container(
other_wallet.wallet_path, other_wallet.wallet_path,
cid, cid,
@ -287,9 +262,7 @@ class TestEACLFilters:
xhdr=xhdr, xhdr=xhdr,
) )
with allure.step( with allure.step("Check other have no access to objects with deny attribute"):
"Check other have no access to objects with deny attribute"
):
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
assert can_get_head_object( assert can_get_head_object(
other_wallet.wallet_path, cid, deny_objects[0], xhdr=xhdr other_wallet.wallet_path, cid, deny_objects[0], xhdr=xhdr
@ -328,9 +301,7 @@ class TestEACLFilters:
) )
allow_attribute = ( allow_attribute = (
self.OTHER_ATTRIBUTE self.OTHER_ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.ATTRIBUTE
if match_type == EACLMatchType.STRING_EQUAL
else self.ATTRIBUTE
) )
with allure.step("Check other can PUT objects without denied attribute"): with allure.step("Check other can PUT objects without denied attribute"):
assert can_put_object( assert can_put_object(
@ -339,9 +310,7 @@ class TestEACLFilters:
assert can_put_object(other_wallet.wallet_path, cid, file_path) assert can_put_object(other_wallet.wallet_path, cid, file_path)
deny_attribute = ( deny_attribute = (
self.ATTRIBUTE self.ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.OTHER_ATTRIBUTE
if match_type == EACLMatchType.STRING_EQUAL
else self.OTHER_ATTRIBUTE
) )
with allure.step("Check other can not PUT objects with denied attribute"): with allure.step("Check other can not PUT objects with denied attribute"):
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
@ -432,8 +401,7 @@ class TestEACLFilters:
assert can_put_object(other_wallet.wallet_path, cid, file_path) assert can_put_object(other_wallet.wallet_path, cid, file_path)
with allure.step( with allure.step(
"Check other can get and put objects without attributes " "Check other can get and put objects without attributes and using bearer token"
"and using bearer token"
): ):
bearer_token_other = form_bearertoken_file( bearer_token_other = form_bearertoken_file(
user_wallet.wallet_path, user_wallet.wallet_path,
@ -464,9 +432,7 @@ class TestEACLFilters:
other_wallet.wallet_path, cid, file_path, bearer=bearer_token_other other_wallet.wallet_path, cid, file_path, bearer=bearer_token_other
) )
with allure.step( with allure.step(f"Check other can get objects with attributes matching the filter"):
f"Check other can get objects with attributes matching the filter"
):
oid = allow_objects.pop() oid = allow_objects.pop()
assert can_get_head_object(other_wallet.wallet_path, cid, oid) assert can_get_head_object(other_wallet.wallet_path, cid, oid)
assert can_get_object(other_wallet.wallet_path, cid, oid, file_path) assert can_get_object(other_wallet.wallet_path, cid, oid, file_path)
@ -474,17 +440,11 @@ class TestEACLFilters:
other_wallet.wallet_path, cid, file_path, attributes=allow_attribute other_wallet.wallet_path, cid, file_path, attributes=allow_attribute
) )
with allure.step( with allure.step("Check other cannot get objects without attributes matching the filter"):
"Check other cannot get objects without attributes matching the filter"
):
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
assert can_get_head_object( assert can_get_head_object(other_wallet.wallet_path, cid, deny_objects[0])
other_wallet.wallet_path, cid, deny_objects[0]
)
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
assert can_get_object( assert can_get_object(other_wallet.wallet_path, cid, deny_objects[0], file_path)
other_wallet.wallet_path, cid, deny_objects[0], file_path
)
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
assert can_put_object( assert can_put_object(
other_wallet.wallet_path, cid, file_path, attributes=deny_attribute other_wallet.wallet_path, cid, file_path, attributes=deny_attribute

View file

@ -3,75 +3,87 @@ import json
import allure import allure
import pytest import pytest
from epoch import tick_epoch from epoch import tick_epoch
from python_keywords.container import (create_container, delete_container, get_container, list_containers, from python_keywords.container import (
wait_for_container_creation, wait_for_container_deletion) create_container,
delete_container,
get_container,
list_containers,
wait_for_container_creation,
wait_for_container_deletion,
)
from utility import placement_policy_from_container from utility import placement_policy_from_container
from wellknown_acl import PRIVATE_ACL_F from wellknown_acl import PRIVATE_ACL_F
@pytest.mark.parametrize('name', ['', 'test-container'], ids=['No name', 'Set particular name']) @pytest.mark.parametrize("name", ["", "test-container"], ids=["No name", "Set particular name"])
@pytest.mark.sanity @pytest.mark.sanity
@pytest.mark.container @pytest.mark.container
def test_container_creation(prepare_wallet_and_deposit, name): def test_container_creation(prepare_wallet_and_deposit, name):
scenario_title = f'with name {name}' if name else 'without name' scenario_title = f"with name {name}" if name else "without name"
allure.dynamic.title(f'User can create container {scenario_title}') allure.dynamic.title(f"User can create container {scenario_title}")
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
with open(wallet) as file: with open(wallet) as file:
json_wallet = json.load(file) json_wallet = json.load(file)
placement_rule = 'REP 2 IN X CBF 1 SELECT 2 FROM * AS X' placement_rule = "REP 2 IN X CBF 1 SELECT 2 FROM * AS X"
cid = create_container(wallet, rule=placement_rule, name=name) cid = create_container(wallet, rule=placement_rule, name=name)
containers = list_containers(wallet) containers = list_containers(wallet)
assert cid in containers, f'Expected container {cid} in containers: {containers}' assert cid in containers, f"Expected container {cid} in containers: {containers}"
container_info: str = get_container(wallet, cid, json_mode=False) container_info: str = get_container(wallet, cid, json_mode=False)
container_info = container_info.casefold() # To ignore case when comparing with expected values container_info = container_info.casefold() # To ignore case when comparing with expected values
info_to_check = { info_to_check = {
f'basic ACL: {PRIVATE_ACL_F} (private)', f"basic ACL: {PRIVATE_ACL_F} (private)",
f'owner ID: {json_wallet.get("accounts")[0].get("address")}', f"owner ID: {json_wallet.get('accounts')[0].get('address')}",
f'container ID: {cid}', f"container ID: {cid}",
} }
if name: if name:
info_to_check.add(f'Name={name}') info_to_check.add(f"Name={name}")
with allure.step('Check container has correct information'): with allure.step("Check container has correct information"):
expected_policy = placement_rule.casefold() expected_policy = placement_rule.casefold()
actual_policy = placement_policy_from_container(container_info) actual_policy = placement_policy_from_container(container_info)
assert actual_policy == expected_policy, \ assert (
f'Expected policy\n{expected_policy} but got policy\n{actual_policy}' actual_policy == expected_policy
), f"Expected policy\n{expected_policy} but got policy\n{actual_policy}"
for info in info_to_check: for info in info_to_check:
expected_info = info.casefold() expected_info = info.casefold()
assert expected_info in container_info, \ assert (
f'Expected {expected_info} in container info:\n{container_info}' expected_info in container_info
), f"Expected {expected_info} in container info:\n{container_info}"
with allure.step('Delete container and check it was deleted'): with allure.step("Delete container and check it was deleted"):
delete_container(wallet, cid) delete_container(wallet, cid)
tick_epoch() tick_epoch()
wait_for_container_deletion(wallet, cid) wait_for_container_deletion(wallet, cid)
@allure.title('Parallel container creation and deletion') @allure.title("Parallel container creation and deletion")
@pytest.mark.sanity @pytest.mark.sanity
@pytest.mark.container @pytest.mark.container
def test_container_creation_deletion_parallel(prepare_wallet_and_deposit): def test_container_creation_deletion_parallel(prepare_wallet_and_deposit):
containers_count = 3 containers_count = 3
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
placement_rule = 'REP 2 IN X CBF 1 SELECT 2 FROM * AS X' placement_rule = "REP 2 IN X CBF 1 SELECT 2 FROM * AS X"
cids: list[str] = [] cids: list[str] = []
with allure.step(f'Create {containers_count} containers'): with allure.step(f"Create {containers_count} containers"):
for _ in range(containers_count): for _ in range(containers_count):
cids.append(create_container(wallet, rule=placement_rule, await_mode=False, wait_for_creation=False)) cids.append(
create_container(
wallet, rule=placement_rule, await_mode=False, wait_for_creation=False
)
)
with allure.step(f'Wait for containers occur in container list'): with allure.step(f"Wait for containers occur in container list"):
for cid in cids: for cid in cids:
wait_for_container_creation(wallet, cid, sleep_interval=containers_count) wait_for_container_creation(wallet, cid, sleep_interval=containers_count)
with allure.step('Delete containers and check they were deleted'): with allure.step("Delete containers and check they were deleted"):
for cid in cids: for cid in cids:
delete_container(wallet, cid) delete_container(wallet, cid)
tick_epoch() tick_epoch()

View file

@ -4,8 +4,11 @@ from time import sleep
import allure import allure
import pytest import pytest
from common import (STORAGE_NODE_SSH_PASSWORD, STORAGE_NODE_SSH_PRIVATE_KEY_PATH, from common import (
STORAGE_NODE_SSH_USER) STORAGE_NODE_SSH_PASSWORD,
STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
STORAGE_NODE_SSH_USER,
)
from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes
from iptables_helper import IpTablesHelper from iptables_helper import IpTablesHelper
from python_keywords.container import create_container from python_keywords.container import create_container
@ -14,24 +17,27 @@ from python_keywords.utility_keywords import generate_file, get_file_hash
from ssh_helper import HostClient from ssh_helper import HostClient
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
STORAGE_NODE_COMMUNICATION_PORT = '8080' STORAGE_NODE_COMMUNICATION_PORT = "8080"
STORAGE_NODE_COMMUNICATION_PORT_TLS = '8082' STORAGE_NODE_COMMUNICATION_PORT_TLS = "8082"
PORTS_TO_BLOCK = [STORAGE_NODE_COMMUNICATION_PORT, STORAGE_NODE_COMMUNICATION_PORT_TLS] PORTS_TO_BLOCK = [STORAGE_NODE_COMMUNICATION_PORT, STORAGE_NODE_COMMUNICATION_PORT_TLS]
blocked_hosts = [] blocked_hosts = []
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@allure.step('Restore network') @allure.step("Restore network")
def restore_network(): def restore_network():
yield yield
not_empty = len(blocked_hosts) != 0 not_empty = len(blocked_hosts) != 0
for host in list(blocked_hosts): for host in list(blocked_hosts):
with allure.step(f'Start storage node {host}'): with allure.step(f"Start storage node {host}"):
client = HostClient(ip=host, login=STORAGE_NODE_SSH_USER, client = HostClient(
password=STORAGE_NODE_SSH_PASSWORD, ip=host,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) login=STORAGE_NODE_SSH_USER,
password=STORAGE_NODE_SSH_PASSWORD,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
)
with client.create_ssh_connection(): with client.create_ssh_connection():
IpTablesHelper.restore_input_traffic_to_port(client, PORTS_TO_BLOCK) IpTablesHelper.restore_input_traffic_to_port(client, PORTS_TO_BLOCK)
blocked_hosts.remove(host) blocked_hosts.remove(host)
@ -39,7 +45,7 @@ def restore_network():
wait_all_storage_node_returned() wait_all_storage_node_returned()
@allure.title('Block Storage node traffic') @allure.title("Block Storage node traffic")
@pytest.mark.failover @pytest.mark.failover
@pytest.mark.failover_net @pytest.mark.failover_net
def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastructure_check): def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastructure_check):
@ -47,7 +53,7 @@ def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastruc
Block storage nodes traffic using iptables and wait for replication for objects. Block storage nodes traffic using iptables and wait for replication for objects.
""" """
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
placement_rule = 'REP 2 IN X CBF 2 SELECT 2 FROM * AS X' placement_rule = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
excluded_nodes = [] excluded_nodes = []
wakeup_node_timeout = 10 # timeout to let nodes detect that traffic has blocked wakeup_node_timeout = 10 # timeout to let nodes detect that traffic has blocked
nodes_to_block_count = 2 nodes_to_block_count = 2
@ -57,24 +63,29 @@ def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastruc
oid = put_object(wallet, source_file_path, cid) oid = put_object(wallet, source_file_path, cid)
nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2)
logger.info(f'Nodes are {nodes}') logger.info(f"Nodes are {nodes}")
random_nodes = [(node, node.split(':')[0]) for node in nodes] random_nodes = [(node, node.split(":")[0]) for node in nodes]
if nodes_to_block_count > len(nodes): if nodes_to_block_count > len(nodes):
random_nodes = [(node, node.split(':')[0]) for node in choices(nodes, k=2)] random_nodes = [(node, node.split(":")[0]) for node in choices(nodes, k=2)]
for random_node, random_node_ip in random_nodes: for random_node, random_node_ip in random_nodes:
client = HostClient(ip=random_node_ip, login=STORAGE_NODE_SSH_USER, client = HostClient(
password=STORAGE_NODE_SSH_PASSWORD, ip=random_node_ip,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) login=STORAGE_NODE_SSH_USER,
password=STORAGE_NODE_SSH_PASSWORD,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
)
with allure.step(f'Block incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}'): with allure.step(f"Block incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}"):
with client.create_ssh_connection(): with client.create_ssh_connection():
IpTablesHelper.drop_input_traffic_to_port(client, PORTS_TO_BLOCK) IpTablesHelper.drop_input_traffic_to_port(client, PORTS_TO_BLOCK)
blocked_hosts.append(random_node_ip) blocked_hosts.append(random_node_ip)
excluded_nodes.append(random_node) excluded_nodes.append(random_node)
sleep(wakeup_node_timeout) sleep(wakeup_node_timeout)
new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=excluded_nodes) new_nodes = wait_object_replication_on_nodes(
wallet, cid, oid, 2, excluded_nodes=excluded_nodes
)
assert random_node not in new_nodes assert random_node not in new_nodes
@ -82,11 +93,16 @@ def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastruc
assert get_file_hash(source_file_path) == get_file_hash(got_file_path) assert get_file_hash(source_file_path) == get_file_hash(got_file_path)
for random_node, random_node_ip in random_nodes: for random_node, random_node_ip in random_nodes:
client = HostClient(ip=random_node_ip, login=STORAGE_NODE_SSH_USER, client = HostClient(
password=STORAGE_NODE_SSH_PASSWORD, ip=random_node_ip,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) login=STORAGE_NODE_SSH_USER,
password=STORAGE_NODE_SSH_PASSWORD,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
)
with allure.step(f'Unblock incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}'): with allure.step(
f"Unblock incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}"
):
with client.create_ssh_connection(): with client.create_ssh_connection():
IpTablesHelper.restore_input_traffic_to_port(client, PORTS_TO_BLOCK) IpTablesHelper.restore_input_traffic_to_port(client, PORTS_TO_BLOCK)
blocked_hosts.remove(random_node_ip) blocked_hosts.remove(random_node_ip)

View file

@ -2,7 +2,11 @@ import logging
import allure import allure
import pytest import pytest
from common import STORAGE_NODE_SSH_PASSWORD, STORAGE_NODE_SSH_PRIVATE_KEY_PATH, STORAGE_NODE_SSH_USER from common import (
STORAGE_NODE_SSH_PASSWORD,
STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
STORAGE_NODE_SSH_USER,
)
from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.neofs_verbs import get_object, put_object from python_keywords.neofs_verbs import get_object, put_object
@ -11,52 +15,61 @@ from sbercloud_helper import SberCloud, SberCloudConfig
from ssh_helper import HostClient from ssh_helper import HostClient
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
stopped_hosts = [] stopped_hosts = []
@pytest.fixture(scope='session') @pytest.fixture(scope="session")
def sbercloud_client(): def sbercloud_client():
with allure.step('Connect to SberCloud'): with allure.step("Connect to SberCloud"):
try: try:
config = SberCloudConfig.from_env() config = SberCloudConfig.from_env()
yield SberCloud(config) yield SberCloud(config)
except Exception as err: except Exception as err:
pytest.fail(f'SberCloud infrastructure not available. Error\n{err}') pytest.fail(f"SberCloud infrastructure not available. Error\n{err}")
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@allure.step('Return all storage nodes') @allure.step("Return all storage nodes")
def return_all_storage_nodes_fixture(sbercloud_client): def return_all_storage_nodes_fixture(sbercloud_client):
yield yield
return_all_storage_nodes(sbercloud_client) return_all_storage_nodes(sbercloud_client)
def panic_reboot_host(ip: str = None): def panic_reboot_host(ip: str = None):
ssh = HostClient(ip=ip, login=STORAGE_NODE_SSH_USER, ssh = HostClient(
password=STORAGE_NODE_SSH_PASSWORD, ip=ip,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) login=STORAGE_NODE_SSH_USER,
password=STORAGE_NODE_SSH_PASSWORD,
private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH,
)
ssh.exec('sudo sh -c "echo 1 > /proc/sys/kernel/sysrq"') ssh.exec('sudo sh -c "echo 1 > /proc/sys/kernel/sysrq"')
ssh_stdin, _, _ = ssh.ssh_client.exec_command('sudo sh -c "echo b > /proc/sysrq-trigger"', timeout=1) ssh_stdin, _, _ = ssh.ssh_client.exec_command(
'sudo sh -c "echo b > /proc/sysrq-trigger"', timeout=1
)
ssh_stdin.close() ssh_stdin.close()
def return_all_storage_nodes(sbercloud_client: SberCloud) -> None: def return_all_storage_nodes(sbercloud_client: SberCloud) -> None:
for host in list(stopped_hosts): for host in list(stopped_hosts):
with allure.step(f'Start storage node {host}'): with allure.step(f"Start storage node {host}"):
sbercloud_client.start_node(node_ip=host.split(':')[-2]) sbercloud_client.start_node(node_ip=host.split(":")[-2])
stopped_hosts.remove(host) stopped_hosts.remove(host)
wait_all_storage_node_returned() wait_all_storage_node_returned()
@allure.title('Lost and returned nodes') @allure.title("Lost and returned nodes")
@pytest.mark.parametrize('hard_reboot', [True, False]) @pytest.mark.parametrize("hard_reboot", [True, False])
@pytest.mark.failover @pytest.mark.failover
def test_lost_storage_node(prepare_wallet_and_deposit, sbercloud_client: SberCloud, cloud_infrastructure_check, def test_lost_storage_node(
hard_reboot: bool): prepare_wallet_and_deposit,
sbercloud_client: SberCloud,
cloud_infrastructure_check,
hard_reboot: bool,
):
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
placement_rule = 'REP 2 IN X CBF 2 SELECT 2 FROM * AS X' placement_rule = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
source_file_path = generate_file() source_file_path = generate_file()
cid = create_container(wallet, rule=placement_rule, basic_acl=PUBLIC_ACL) cid = create_container(wallet, rule=placement_rule, basic_acl=PUBLIC_ACL)
oid = put_object(wallet, source_file_path, cid) oid = put_object(wallet, source_file_path, cid)
@ -65,15 +78,15 @@ def test_lost_storage_node(prepare_wallet_and_deposit, sbercloud_client: SberClo
new_nodes = [] new_nodes = []
for node in nodes: for node in nodes:
stopped_hosts.append(node) stopped_hosts.append(node)
with allure.step(f'Stop storage node {node}'): with allure.step(f"Stop storage node {node}"):
sbercloud_client.stop_node(node_ip=node.split(':')[-2], hard=hard_reboot) sbercloud_client.stop_node(node_ip=node.split(":")[-2], hard=hard_reboot)
new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=[node]) new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=[node])
assert not [node for node in nodes if node in new_nodes] assert not [node for node in nodes if node in new_nodes]
got_file_path = get_object(wallet, cid, oid, endpoint=new_nodes[0]) got_file_path = get_object(wallet, cid, oid, endpoint=new_nodes[0])
assert get_file_hash(source_file_path) == get_file_hash(got_file_path) assert get_file_hash(source_file_path) == get_file_hash(got_file_path)
with allure.step(f'Return storage nodes'): with allure.step(f"Return storage nodes"):
return_all_storage_nodes(sbercloud_client) return_all_storage_nodes(sbercloud_client)
new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2)
@ -82,36 +95,42 @@ def test_lost_storage_node(prepare_wallet_and_deposit, sbercloud_client: SberClo
assert get_file_hash(source_file_path) == get_file_hash(got_file_path) assert get_file_hash(source_file_path) == get_file_hash(got_file_path)
@allure.title('Panic storage node(s)') @allure.title("Panic storage node(s)")
@pytest.mark.parametrize('sequence', [True, False]) @pytest.mark.parametrize("sequence", [True, False])
@pytest.mark.failover_panic @pytest.mark.failover_panic
@pytest.mark.failover @pytest.mark.failover
def test_panic_storage_node(prepare_wallet_and_deposit, cloud_infrastructure_check, def test_panic_storage_node(prepare_wallet_and_deposit, cloud_infrastructure_check, sequence: bool):
sequence: bool):
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
placement_rule = 'REP 2 IN X CBF 2 SELECT 2 FROM * AS X' placement_rule = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
source_file_path = generate_file() source_file_path = generate_file()
cid = create_container(wallet, rule=placement_rule, basic_acl=PUBLIC_ACL) cid = create_container(wallet, rule=placement_rule, basic_acl=PUBLIC_ACL)
oid = put_object(wallet, source_file_path, cid) oid = put_object(wallet, source_file_path, cid)
nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2)
new_nodes: list[str] = [] new_nodes: list[str] = []
allure.attach('\n'.join(nodes), 'Current nodes with object', allure.attachment_type.TEXT) allure.attach("\n".join(nodes), "Current nodes with object", allure.attachment_type.TEXT)
for node in nodes: for node in nodes:
with allure.step(f'Hard reboot host {node} via magic SysRq option'): with allure.step(f"Hard reboot host {node} via magic SysRq option"):
panic_reboot_host(ip=node.split(':')[-2]) panic_reboot_host(ip=node.split(":")[-2])
if sequence: if sequence:
try: try:
new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=[node]) new_nodes = wait_object_replication_on_nodes(
wallet, cid, oid, 2, excluded_nodes=[node]
)
except AssertionError: except AssertionError:
new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2)
allure.attach('\n'.join(new_nodes), f'Nodes with object after {node} fail', allure.attach(
allure.attachment_type.TEXT) "\n".join(new_nodes),
f"Nodes with object after {node} fail",
allure.attachment_type.TEXT,
)
if not sequence: if not sequence:
new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2)
allure.attach('\n'.join(new_nodes), 'Nodes with object after nodes fail', allure.attachment_type.TEXT) allure.attach(
"\n".join(new_nodes), "Nodes with object after nodes fail", allure.attachment_type.TEXT
)
got_file_path = get_object(wallet, cid, oid, endpoint=new_nodes[0]) got_file_path = get_object(wallet, cid, oid, endpoint=new_nodes[0])
assert get_file_hash(source_file_path) == get_file_hash(got_file_path) assert get_file_hash(source_file_path) == get_file_hash(got_file_path)

View file

@ -7,82 +7,98 @@ from common import COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
from container import create_container from container import create_container
from epoch import get_epoch, tick_epoch from epoch import get_epoch, tick_epoch
from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status
from python_keywords.neofs_verbs import (delete_object, get_object, get_range, get_range_hash, head_object, put_object, from python_keywords.neofs_verbs import (
search_object) delete_object,
get_object,
get_range,
get_range_hash,
head_object,
put_object,
search_object,
)
from python_keywords.storage_policy import get_simple_object_copies from python_keywords.storage_policy import get_simple_object_copies
from python_keywords.utility_keywords import generate_file, get_file_hash from python_keywords.utility_keywords import generate_file, get_file_hash
from tombstone import verify_head_tombstone from tombstone import verify_head_tombstone
from utility import get_file_content, wait_for_gc_pass_on_storage_nodes from utility import get_file_content, wait_for_gc_pass_on_storage_nodes
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
CLEANUP_TIMEOUT = 10 CLEANUP_TIMEOUT = 10
@allure.title('Test native object API') @allure.title("Test native object API")
@pytest.mark.sanity @pytest.mark.sanity
@pytest.mark.grpc_api @pytest.mark.grpc_api
@pytest.mark.parametrize('object_size', [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=['simple object', 'complex object']) @pytest.mark.parametrize(
"object_size", [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=["simple object", "complex object"]
)
def test_object_api(prepare_wallet_and_deposit, request, object_size): def test_object_api(prepare_wallet_and_deposit, request, object_size):
""" """
Test common gRPC API for object (put/get/head/get_range_hash/get_range/search/delete). Test common gRPC API for object (put/get/head/get_range_hash/get_range/search/delete).
""" """
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
cid = create_container(wallet) cid = create_container(wallet)
wallet_cid = {'wallet': wallet, 'cid': cid} wallet_cid = {"wallet": wallet, "cid": cid}
file_usr_header = {'key1': 1, 'key2': 'abc', 'common_key': 'common_value'} file_usr_header = {"key1": 1, "key2": "abc", "common_key": "common_value"}
file_usr_header_oth = {'key1': 2, 'common_key': 'common_value'} file_usr_header_oth = {"key1": 2, "common_key": "common_value"}
common_header = {'common_key': 'common_value'} common_header = {"common_key": "common_value"}
range_len = 10 range_len = 10
range_cut = f'0:{range_len}' range_cut = f"0:{range_len}"
oids = [] oids = []
allure.dynamic.title(f'Test native object API for {request.node.callspec.id}') allure.dynamic.title(f"Test native object API for {request.node.callspec.id}")
file_path = generate_file(object_size) file_path = generate_file(object_size)
file_hash = get_file_hash(file_path) file_hash = get_file_hash(file_path)
search_object(**wallet_cid, expected_objects_list=oids) search_object(**wallet_cid, expected_objects_list=oids)
with allure.step('Put objects'): with allure.step("Put objects"):
oids.append(put_object(wallet=wallet, path=file_path, cid=cid)) oids.append(put_object(wallet=wallet, path=file_path, cid=cid))
oids.append(put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header)) oids.append(put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header))
oids.append(put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header_oth)) oids.append(
put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header_oth)
)
with allure.step('Validate storage policy for objects'): with allure.step("Validate storage policy for objects"):
for oid_to_check in oids: for oid_to_check in oids:
assert get_simple_object_copies(wallet=wallet, cid=cid, oid=oid_to_check) == 2, 'Expected 2 copies' assert (
get_simple_object_copies(wallet=wallet, cid=cid, oid=oid_to_check) == 2
), "Expected 2 copies"
with allure.step('Get objects and compare hashes'): with allure.step("Get objects and compare hashes"):
for oid_to_check in oids: for oid_to_check in oids:
got_file_path = get_object(wallet=wallet, cid=cid, oid=oid_to_check) got_file_path = get_object(wallet=wallet, cid=cid, oid=oid_to_check)
got_file_hash = get_file_hash(got_file_path) got_file_hash = get_file_hash(got_file_path)
assert file_hash == got_file_hash assert file_hash == got_file_hash
with allure.step('Get range/range hash'): with allure.step("Get range/range hash"):
range_hash = get_range_hash(**wallet_cid, oid=oids[0], bearer_token='', range_cut=range_cut) range_hash = get_range_hash(**wallet_cid, oid=oids[0], bearer_token="", range_cut=range_cut)
assert get_file_hash(file_path, range_len) == range_hash, \ assert (
f'Expected range hash to match {range_cut} slice of file payload' get_file_hash(file_path, range_len) == range_hash
), f"Expected range hash to match {range_cut} slice of file payload"
range_hash = get_range_hash(**wallet_cid, oid=oids[1], bearer_token='', range_cut=range_cut) range_hash = get_range_hash(**wallet_cid, oid=oids[1], bearer_token="", range_cut=range_cut)
assert get_file_hash(file_path, range_len) == range_hash, \ assert (
f'Expected range hash to match {range_cut} slice of file payload' get_file_hash(file_path, range_len) == range_hash
), f"Expected range hash to match {range_cut} slice of file payload"
_, range_content = get_range(**wallet_cid, oid=oids[1], bearer='', range_cut=range_cut) _, range_content = get_range(**wallet_cid, oid=oids[1], bearer="", range_cut=range_cut)
assert get_file_content(file_path, content_len=range_len, mode='rb') == range_content, \ assert (
f'Expected range content to match {range_cut} slice of file payload' get_file_content(file_path, content_len=range_len, mode="rb") == range_content
), f"Expected range content to match {range_cut} slice of file payload"
with allure.step('Search objects'): with allure.step("Search objects"):
search_object(**wallet_cid, expected_objects_list=oids) search_object(**wallet_cid, expected_objects_list=oids)
search_object(**wallet_cid, filters=file_usr_header, expected_objects_list=oids[1:2]) search_object(**wallet_cid, filters=file_usr_header, expected_objects_list=oids[1:2])
search_object(**wallet_cid, filters=file_usr_header_oth, expected_objects_list=oids[2:3]) search_object(**wallet_cid, filters=file_usr_header_oth, expected_objects_list=oids[2:3])
search_object(**wallet_cid, filters=common_header, expected_objects_list=oids[1:3]) search_object(**wallet_cid, filters=common_header, expected_objects_list=oids[1:3])
with allure.step('Head object and validate'): with allure.step("Head object and validate"):
head_object(**wallet_cid, oid=oids[0]) head_object(**wallet_cid, oid=oids[0])
head_info = head_object(**wallet_cid, oid=oids[1]) head_info = head_object(**wallet_cid, oid=oids[1])
check_header_is_presented(head_info, file_usr_header) check_header_is_presented(head_info, file_usr_header)
with allure.step('Delete objects'): with allure.step("Delete objects"):
tombstone_s = delete_object(**wallet_cid, oid=oids[0]) tombstone_s = delete_object(**wallet_cid, oid=oids[0])
tombstone_h = delete_object(**wallet_cid, oid=oids[1]) tombstone_h = delete_object(**wallet_cid, oid=oids[1])
@ -92,15 +108,17 @@ def test_object_api(prepare_wallet_and_deposit, request, object_size):
tick_epoch() tick_epoch()
sleep(CLEANUP_TIMEOUT) sleep(CLEANUP_TIMEOUT)
with allure.step('Get objects and check errors'): with allure.step("Get objects and check errors"):
get_object_and_check_error(**wallet_cid, oid=oids[0], error_pattern=OBJECT_ALREADY_REMOVED) get_object_and_check_error(**wallet_cid, oid=oids[0], error_pattern=OBJECT_ALREADY_REMOVED)
get_object_and_check_error(**wallet_cid, oid=oids[1], error_pattern=OBJECT_ALREADY_REMOVED) get_object_and_check_error(**wallet_cid, oid=oids[1], error_pattern=OBJECT_ALREADY_REMOVED)
@allure.title('Test object life time') @allure.title("Test object life time")
@pytest.mark.sanity @pytest.mark.sanity
@pytest.mark.grpc_api @pytest.mark.grpc_api
@pytest.mark.parametrize('object_size', [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=['simple object', 'complex object']) @pytest.mark.parametrize(
"object_size", [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=["simple object", "complex object"]
)
def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size): def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size):
""" """
Test object deleted after expiration epoch. Test object deleted after expiration epoch.
@ -108,7 +126,7 @@ def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size):
wallet = prepare_wallet_and_deposit wallet = prepare_wallet_and_deposit
cid = create_container(wallet) cid = create_container(wallet)
allure.dynamic.title(f'Test object life time for {request.node.callspec.id}') allure.dynamic.title(f"Test object life time for {request.node.callspec.id}")
file_path = generate_file(object_size) file_path = generate_file(object_size)
file_hash = get_file_hash(file_path) file_hash = get_file_hash(file_path)
@ -118,14 +136,14 @@ def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size):
got_file = get_object(wallet, cid, oid) got_file = get_object(wallet, cid, oid)
assert get_file_hash(got_file) == file_hash assert get_file_hash(got_file) == file_hash
with allure.step('Tick two epochs'): with allure.step("Tick two epochs"):
for _ in range(2): for _ in range(2):
tick_epoch() tick_epoch()
# Wait for GC, because object with expiration is counted as alive until GC removes it # Wait for GC, because object with expiration is counted as alive until GC removes it
wait_for_gc_pass_on_storage_nodes() wait_for_gc_pass_on_storage_nodes()
with allure.step('Check object deleted because it expires-on epoch'): with allure.step("Check object deleted because it expires-on epoch"):
with pytest.raises(Exception, match=OBJECT_NOT_FOUND): with pytest.raises(Exception, match=OBJECT_NOT_FOUND):
get_object(wallet, cid, oid) get_object(wallet, cid, oid)
@ -133,14 +151,17 @@ def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size):
def get_object_and_check_error(wallet: str, cid: str, oid: str, error_pattern: str) -> None: def get_object_and_check_error(wallet: str, cid: str, oid: str, error_pattern: str) -> None:
try: try:
get_object(wallet=wallet, cid=cid, oid=oid) get_object(wallet=wallet, cid=cid, oid=oid)
raise AssertionError(f'Expected object {oid} removed, but it is not') raise AssertionError(f"Expected object {oid} removed, but it is not")
except Exception as err: except Exception as err:
logger.info(f'Error is {err}') logger.info(f"Error is {err}")
assert error_matches_status(err, error_pattern), f'Expected {err} to match {error_pattern}' assert error_matches_status(err, error_pattern), f"Expected {err} to match {error_pattern}"
def check_header_is_presented(head_info: dict, object_header: dict): def check_header_is_presented(head_info: dict, object_header: dict):
for key_to_check, val_to_check in object_header.items(): for key_to_check, val_to_check in object_header.items():
assert key_to_check in head_info['header']['attributes'], f'Key {key_to_check} is found in {head_object}' assert (
assert head_info['header']['attributes'].get(key_to_check) == str( key_to_check in head_info["header"]["attributes"]
val_to_check), f'Value {val_to_check} is equal' ), f"Key {key_to_check} is found in {head_object}"
assert head_info["header"]["attributes"].get(key_to_check) == str(
val_to_check
), f"Value {val_to_check} is equal"

View file

@ -11,11 +11,12 @@ from python_keywords.container import list_containers
from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas
from python_keywords.utility_keywords import concat_files, generate_file, get_file_hash from python_keywords.utility_keywords import concat_files, generate_file, get_file_hash
from s3_helper import check_objects_in_bucket, set_bucket_versioning from s3_helper import check_objects_in_bucket, set_bucket_versioning
from utility import create_file_with_content
from wallet import init_wallet
from steps import s3_gate_bucket, s3_gate_object from steps import s3_gate_bucket, s3_gate_object
from steps.aws_cli_client import AwsCliClient from steps.aws_cli_client import AwsCliClient
from steps.s3_gate_base import TestS3GateBase from steps.s3_gate_base import TestS3GateBase
from utility import create_file_with_content
from wallet import init_wallet
def pytest_generate_tests(metafunc): def pytest_generate_tests(metafunc):

View file

@ -5,26 +5,25 @@ from re import match
import allure import allure
import pytest import pytest
import requests import requests
from common import BIN_VERSIONS_FILE from common import BIN_VERSIONS_FILE
from env_properties import read_env_properties, save_env_properties from env_properties import read_env_properties, save_env_properties
from service_helper import get_storage_service_helper from service_helper import get_storage_service_helper
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
@allure.title('Check binaries versions') @allure.title("Check binaries versions")
@pytest.mark.check_binaries @pytest.mark.check_binaries
@pytest.mark.skip('Skipped due to https://j.yadro.com/browse/OBJECT-628') @pytest.mark.skip("Skipped due to https://j.yadro.com/browse/OBJECT-628")
def test_binaries_versions(request): def test_binaries_versions(request):
""" """
Compare binaries versions from external source (url) and deployed on servers. Compare binaries versions from external source (url) and deployed on servers.
""" """
if not BIN_VERSIONS_FILE: if not BIN_VERSIONS_FILE:
pytest.skip('File with binaries and versions was not provided') pytest.skip("File with binaries and versions was not provided")
binaries_to_check = download_versions_info(BIN_VERSIONS_FILE) binaries_to_check = download_versions_info(BIN_VERSIONS_FILE)
with allure.step('Get binaries versions from servers'): with allure.step("Get binaries versions from servers"):
helper = get_storage_service_helper() helper = get_storage_service_helper()
got_versions = helper.get_binaries_version(binaries=list(binaries_to_check.keys())) got_versions = helper.get_binaries_version(binaries=list(binaries_to_check.keys()))
@ -36,7 +35,7 @@ def test_binaries_versions(request):
for binary, version in binaries_to_check.items(): for binary, version in binaries_to_check.items():
actual_version = got_versions.get(binary) actual_version = got_versions.get(binary)
if actual_version != version: if actual_version != version:
failed_versions[binary] = f'Expected version {version}, found version {actual_version}' failed_versions[binary] = f"Expected version {version}, found version {actual_version}"
# If some binary was not listed in the env properties file, let's add it # If some binary was not listed in the env properties file, let's add it
# so that we have full information about versions in allure report # so that we have full information about versions in allure report
@ -48,26 +47,27 @@ def test_binaries_versions(request):
# create clear beautiful error with aggregation info # create clear beautiful error with aggregation info
if failed_versions: if failed_versions:
msg = '\n'.join({f'{binary}: {error}' for binary, error in failed_versions.items()}) msg = "\n".join({f"{binary}: {error}" for binary, error in failed_versions.items()})
raise AssertionError(f'Found binaries with unexpected versions:\n{msg}') raise AssertionError(f"Found binaries with unexpected versions:\n{msg}")
@allure.step('Download versions info from {url}') @allure.step("Download versions info from {url}")
def download_versions_info(url: str) -> dict: def download_versions_info(url: str) -> dict:
binaries_to_version = {} binaries_to_version = {}
response = requests.get(url) response = requests.get(url)
assert response.status_code == HTTPStatus.OK, \ assert (
f'Got {response.status_code} code. Content {response.json()}' response.status_code == HTTPStatus.OK
), f"Got {response.status_code} code. Content {response.json()}"
content = response.text content = response.text
assert content, f'Expected file with content, got {response}' assert content, f"Expected file with content, got {response}"
for line in content.split('\n'): for line in content.split("\n"):
m = match('(.*)=(.*)', line) m = match("(.*)=(.*)", line)
if not m: if not m:
logger.warning(f'Could not get binary/version from {line}') logger.warning(f"Could not get binary/version from {line}")
continue continue
bin_name, bin_version = m.group(1), m.group(2) bin_name, bin_version = m.group(1), m.group(2)
binaries_to_version[bin_name] = bin_version binaries_to_version[bin_name] = bin_version

View file

@ -1,5 +1,6 @@
import random import random
import allure
import pytest import pytest
from common import COMPLEX_OBJ_SIZE, NEOFS_NETMAP_DICT, SIMPLE_OBJ_SIZE from common import COMPLEX_OBJ_SIZE, NEOFS_NETMAP_DICT, SIMPLE_OBJ_SIZE
from grpc_responses import SESSION_NOT_FOUND from grpc_responses import SESSION_NOT_FOUND
@ -16,8 +17,6 @@ from python_keywords.neofs_verbs import (
from python_keywords.session_token import create_session_token from python_keywords.session_token import create_session_token
from python_keywords.utility_keywords import generate_file from python_keywords.utility_keywords import generate_file
import allure
@allure.title("Test Object Operations with Session Token") @allure.title("Test Object Operations with Session Token")
@pytest.mark.session_token @pytest.mark.session_token

View file

@ -15,41 +15,41 @@ from cli_utils import NeofsCli
from common import ASSETS_DIR, NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_CONFIG from common import ASSETS_DIR, NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_CONFIG
from data_formatters import get_wallet_public_key from data_formatters import get_wallet_public_key
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
EACL_LIFETIME = 100500 EACL_LIFETIME = 100500
NEOFS_CONTRACT_CACHE_TIMEOUT = 30 NEOFS_CONTRACT_CACHE_TIMEOUT = 30
class EACLOperation(Enum): class EACLOperation(Enum):
PUT = 'put' PUT = "put"
GET = 'get' GET = "get"
HEAD = 'head' HEAD = "head"
GET_RANGE = 'getrange' GET_RANGE = "getrange"
GET_RANGE_HASH = 'getrangehash' GET_RANGE_HASH = "getrangehash"
SEARCH = 'search' SEARCH = "search"
DELETE = 'delete' DELETE = "delete"
class EACLAccess(Enum): class EACLAccess(Enum):
ALLOW = 'allow' ALLOW = "allow"
DENY = 'deny' DENY = "deny"
class EACLRole(Enum): class EACLRole(Enum):
OTHERS = 'others' OTHERS = "others"
USER = 'user' USER = "user"
SYSTEM = 'system' SYSTEM = "system"
class EACLHeaderType(Enum): class EACLHeaderType(Enum):
REQUEST = 'req' # Filter request headers REQUEST = "req" # Filter request headers
OBJECT = 'obj' # Filter object headers OBJECT = "obj" # Filter object headers
SERVICE = 'SERVICE' # Filter service headers. These are not processed by NeoFS nodes and exist for service use only SERVICE = "SERVICE" # Filter service headers. These are not processed by NeoFS nodes and exist for service use only
class EACLMatchType(Enum): class EACLMatchType(Enum):
STRING_EQUAL = '=' # Return true if strings are equal STRING_EQUAL = "=" # Return true if strings are equal
STRING_NOT_EQUAL = '!=' # Return true if strings are different STRING_NOT_EQUAL = "!=" # Return true if strings are different
@dataclass @dataclass
@ -60,7 +60,12 @@ class EACLFilter:
value: Optional[str] = None value: Optional[str] = None
def to_dict(self) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]:
return {'headerType': self.header_type, 'matchType': self.match_type, 'key': self.key, 'value': self.value} return {
"headerType": self.header_type,
"matchType": self.match_type,
"key": self.key,
"value": self.value,
}
@dataclass @dataclass
@ -68,10 +73,16 @@ class EACLFilters:
filters: Optional[List[EACLFilter]] = None filters: Optional[List[EACLFilter]] = None
def __str__(self): def __str__(self):
return ','.join( return (
[f'{filter.header_type.value}:{filter.key}{filter.match_type.value}{filter.value}' ",".join(
for filter in self.filters] [
) if self.filters else [] f"{filter.header_type.value}:{filter.key}{filter.match_type.value}{filter.value}"
for filter in self.filters
]
)
if self.filters
else []
)
@dataclass @dataclass
@ -87,15 +98,23 @@ class EACLRule:
filters: Optional[EACLFilters] = None filters: Optional[EACLFilters] = None
def to_dict(self) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]:
return {'Operation': self.operation, 'Access': self.access, 'Role': self.role, return {
'Filters': self.filters or []} "Operation": self.operation,
"Access": self.access,
"Role": self.role,
"Filters": self.filters or [],
}
def __str__(self): def __str__(self):
role = self.role.value if isinstance(self.role, EACLRole) else f'pubkey:{get_wallet_public_key(self.role, "")}' role = (
self.role.value
if isinstance(self.role, EACLRole)
else f'pubkey:{get_wallet_public_key(self.role, "")}'
)
return f'{self.access.value} {self.operation.value} {self.filters or ""} {role}' return f'{self.access.value} {self.operation.value} {self.filters or ""} {role}'
@allure.title('Get extended ACL') @allure.title("Get extended ACL")
def get_eacl(wallet_path: str, cid: str) -> Optional[str]: def get_eacl(wallet_path: str, cid: str) -> Optional[str]:
cli = NeofsCli(config=WALLET_CONFIG) cli = NeofsCli(config=WALLET_CONFIG)
try: try:
@ -104,16 +123,21 @@ def get_eacl(wallet_path: str, cid: str) -> Optional[str]:
logger.info("Extended ACL table is not set for this container") logger.info("Extended ACL table is not set for this container")
logger.info(f"Got exception while getting eacl: {exc}") logger.info(f"Got exception while getting eacl: {exc}")
return None return None
if 'extended ACL table is not set for this container' in output: if "extended ACL table is not set for this container" in output:
return None return None
return output return output
@allure.title('Set extended ACL') @allure.title("Set extended ACL")
def set_eacl(wallet_path: str, cid: str, eacl_table_path: str) -> None: def set_eacl(wallet_path: str, cid: str, eacl_table_path: str) -> None:
cli = NeofsCli(config=WALLET_CONFIG, timeout=60) cli = NeofsCli(config=WALLET_CONFIG, timeout=60)
cli.container.set_eacl(wallet=wallet_path, rpc_endpoint=NEOFS_ENDPOINT, cid=cid, table=eacl_table_path, cli.container.set_eacl(
await_mode=True) wallet=wallet_path,
rpc_endpoint=NEOFS_ENDPOINT,
cid=cid,
table=eacl_table_path,
await_mode=True,
)
def _encode_cid_for_eacl(cid: str) -> str: def _encode_cid_for_eacl(cid: str) -> str:
@ -125,14 +149,16 @@ def create_eacl(cid: str, rules_list: List[EACLRule]) -> str:
table_file_path = f"{os.getcwd()}/{ASSETS_DIR}/eacl_table_{str(uuid.uuid4())}.json" table_file_path = f"{os.getcwd()}/{ASSETS_DIR}/eacl_table_{str(uuid.uuid4())}.json"
NeofsCli().acl.extended_create(cid=cid, out=table_file_path, rule=rules_list) NeofsCli().acl.extended_create(cid=cid, out=table_file_path, rule=rules_list)
with open(table_file_path, 'r') as file: with open(table_file_path, "r") as file:
table_data = file.read() table_data = file.read()
logger.info(f"Generated eACL:\n{table_data}") logger.info(f"Generated eACL:\n{table_data}")
return table_file_path return table_file_path
def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRule, EACLPubKey]]) -> str: def form_bearertoken_file(
wif: str, cid: str, eacl_rule_list: List[Union[EACLRule, EACLPubKey]]
) -> str:
""" """
This function fetches eACL for given <cid> on behalf of <wif>, This function fetches eACL for given <cid> on behalf of <wif>,
then extends it with filters taken from <eacl_rules>, signs then extends it with filters taken from <eacl_rules>, signs
@ -144,50 +170,29 @@ def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRul
eacl = get_eacl(wif, cid) eacl = get_eacl(wif, cid)
json_eacl = dict() json_eacl = dict()
if eacl: if eacl:
eacl = eacl.replace('eACL: ', '').split('Signature')[0] eacl = eacl.replace("eACL: ", "").split("Signature")[0]
json_eacl = json.loads(eacl) json_eacl = json.loads(eacl)
logger.info(json_eacl) logger.info(json_eacl)
eacl_result = { eacl_result = {
"body": "body": {
{ "eaclTable": {"containerID": {"value": enc_cid}, "records": []},
"eaclTable": "lifetime": {"exp": EACL_LIFETIME, "nbf": "1", "iat": "0"},
{ }
"containerID":
{
"value": enc_cid
},
"records": []
},
"lifetime":
{
"exp": EACL_LIFETIME,
"nbf": "1",
"iat": "0"
}
}
} }
assert eacl_rules, 'Got empty eacl_records list' assert eacl_rules, "Got empty eacl_records list"
for rule in eacl_rule_list: for rule in eacl_rule_list:
op_data = { op_data = {
"operation": rule.operation.value.upper(), "operation": rule.operation.value.upper(),
"action": rule.access.value.upper(), "action": rule.access.value.upper(),
"filters": rule.filters or [], "filters": rule.filters or [],
"targets": [] "targets": [],
} }
if isinstance(rule.role, EACLRole): if isinstance(rule.role, EACLRole):
op_data['targets'] = [ op_data["targets"] = [{"role": rule.role.value.upper()}]
{
"role": rule.role.value.upper()
}
]
elif isinstance(rule.role, EACLPubKey): elif isinstance(rule.role, EACLPubKey):
op_data['targets'] = [ op_data["targets"] = [{"keys": rule.role.keys}]
{
'keys': rule.role.keys
}
]
eacl_result["body"]["eaclTable"]["records"].append(op_data) eacl_result["body"]["eaclTable"]["records"].append(op_data)
@ -196,7 +201,7 @@ def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRul
for record in json_eacl["records"]: for record in json_eacl["records"]:
eacl_result["body"]["eaclTable"]["records"].append(record) eacl_result["body"]["eaclTable"]["records"].append(record)
with open(file_path, 'w', encoding='utf-8') as eacl_file: with open(file_path, "w", encoding="utf-8") as eacl_file:
json.dump(eacl_result, eacl_file, ensure_ascii=False, indent=4) json.dump(eacl_result, eacl_file, ensure_ascii=False, indent=4)
logger.info(f"Got these extended ACL records: {eacl_result}") logger.info(f"Got these extended ACL records: {eacl_result}")
@ -206,17 +211,17 @@ def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRul
def eacl_rules(access: str, verbs: list, user: str) -> list[str]: def eacl_rules(access: str, verbs: list, user: str) -> list[str]:
""" """
This function creates a list of eACL rules. This function creates a list of eACL rules.
Args: Args:
access (str): identifies if the following operation(s) access (str): identifies if the following operation(s)
is allowed or denied is allowed or denied
verbs (list): a list of operations to set rules for verbs (list): a list of operations to set rules for
user (str): a group of users (user/others) or a wallet of user (str): a group of users (user/others) or a wallet of
a certain user for whom rules are set a certain user for whom rules are set
Returns: Returns:
(list): a list of eACL rules (list): a list of eACL rules
""" """
if user not in ('others', 'user'): if user not in ("others", "user"):
pubkey = get_wallet_public_key(user, wallet_password="") pubkey = get_wallet_public_key(user, wallet_password="")
user = f"pubkey:{pubkey}" user = f"pubkey:{pubkey}"
@ -229,13 +234,13 @@ def eacl_rules(access: str, verbs: list, user: str) -> list[str]:
def sign_bearer_token(wallet_path: str, eacl_rules_file: str) -> None: def sign_bearer_token(wallet_path: str, eacl_rules_file: str) -> None:
cmd = ( cmd = (
f'{NEOFS_CLI_EXEC} util sign bearer-token --from {eacl_rules_file} ' f"{NEOFS_CLI_EXEC} util sign bearer-token --from {eacl_rules_file} "
f'--to {eacl_rules_file} --wallet {wallet_path} --config {WALLET_CONFIG} --json' f"--to {eacl_rules_file} --wallet {wallet_path} --config {WALLET_CONFIG} --json"
) )
_cmd_run(cmd) _cmd_run(cmd)
@allure.title('Wait for eACL cache expired') @allure.title("Wait for eACL cache expired")
def wait_for_cache_expired(): def wait_for_cache_expired():
sleep(NEOFS_CONTRACT_CACHE_TIMEOUT) sleep(NEOFS_CONTRACT_CACHE_TIMEOUT)
return return

View file

@ -6,8 +6,8 @@ from .completion import NeofsAdmCompletion
from .config import NeofsAdmConfig from .config import NeofsAdmConfig
from .gendoc import NeofsAdmGenDoc from .gendoc import NeofsAdmGenDoc
from .morph import NeofsAdmMorph from .morph import NeofsAdmMorph
from .subnet import NeofsAdmMorphSubnet
from .storage_config import NeofsAdmStorageConfig from .storage_config import NeofsAdmStorageConfig
from .subnet import NeofsAdmMorphSubnet
from .version import NeofsAdmVersion from .version import NeofsAdmVersion
@ -23,14 +23,27 @@ class NeofsAdm:
storage_config: Optional[NeofsAdmStorageConfig] = None storage_config: Optional[NeofsAdmStorageConfig] = None
version: Optional[NeofsAdmVersion] = None version: Optional[NeofsAdmVersion] = None
def __init__(self, neofs_adm_exec_path: Optional[str] = None, config_file: Optional[str] = None, timeout: int = 30): def __init__(
self,
neofs_adm_exec_path: Optional[str] = None,
config_file: Optional[str] = None,
timeout: int = 30,
):
self.config_file = config_file self.config_file = config_file
self.neofs_adm_exec_path = neofs_adm_exec_path or NEOFS_ADM_EXEC self.neofs_adm_exec_path = neofs_adm_exec_path or NEOFS_ADM_EXEC
self.completion = NeofsAdmCompletion(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.completion = NeofsAdmCompletion(
self.neofs_adm_exec_path, timeout=timeout, config=config_file
)
self.config = NeofsAdmConfig(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.config = NeofsAdmConfig(self.neofs_adm_exec_path, timeout=timeout, config=config_file)
self.gendoc = NeofsAdmGenDoc(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.gendoc = NeofsAdmGenDoc(self.neofs_adm_exec_path, timeout=timeout, config=config_file)
self.morph = NeofsAdmMorph(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.morph = NeofsAdmMorph(self.neofs_adm_exec_path, timeout=timeout, config=config_file)
self.subnet = NeofsAdmMorphSubnet(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.subnet = NeofsAdmMorphSubnet(
self.storage_config = NeofsAdmStorageConfig(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.neofs_adm_exec_path, timeout=timeout, config=config_file
self.version = NeofsAdmVersion(self.neofs_adm_exec_path, timeout=timeout, config=config_file) )
self.storage_config = NeofsAdmStorageConfig(
self.neofs_adm_exec_path, timeout=timeout, config=config_file
)
self.version = NeofsAdmVersion(
self.neofs_adm_exec_path, timeout=timeout, config=config_file
)

View file

@ -27,4 +27,4 @@ class NeofsAdmCompletion(NeofsCliCommand):
str: Command string str: Command string
""" """
return self._execute('completion ' + completion_type.value) return self._execute("completion " + completion_type.value)

View file

@ -2,7 +2,7 @@ from enum import Enum
class CompletionType(Enum): class CompletionType(Enum):
BASH = 'bash' BASH = "bash"
ZHS = 'zsh' ZHS = "zsh"
FISH = 'fish' FISH = "fish"
POWERSHELL = 'powershell' POWERSHELL = "powershell"

View file

@ -2,7 +2,7 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsAdmConfig(NeofsCliCommand): class NeofsAdmConfig(NeofsCliCommand):
def init(self, path: str = '~/.neofs/adm/config.yml') -> str: def init(self, path: str = "~/.neofs/adm/config.yml") -> str:
"""Initialize basic neofs-adm configuration file. """Initialize basic neofs-adm configuration file.
Args: Args:
@ -14,6 +14,6 @@ class NeofsAdmConfig(NeofsCliCommand):
""" """
return self._execute( return self._execute(
'config init', "config init",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -4,7 +4,9 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsAdmGenDoc(NeofsCliCommand): class NeofsAdmGenDoc(NeofsCliCommand):
def get(self, doc_file: str, depth: int = 1, doc_type: str = 'md', extension: Optional[str] = None) -> str: def get(
self, doc_file: str, depth: int = 1, doc_type: str = "md", extension: Optional[str] = None
) -> str:
"""Generate documentation for this command. If the template is not provided, """Generate documentation for this command. If the template is not provided,
builtin cobra generator is used and each subcommand is placed in builtin cobra generator is used and each subcommand is placed in
a separate file in the same directory. a separate file in the same directory.
@ -29,6 +31,10 @@ class NeofsAdmGenDoc(NeofsCliCommand):
""" """
return self._execute( return self._execute(
f'gendoc {doc_file}', f"gendoc {doc_file}",
**{param: param_value for param, param_value in locals().items() if param not in ['self', 'doc_file']} **{
param: value
for param, value in locals().items()
if param not in ["self", "doc_file"]
},
) )

View file

@ -4,9 +4,16 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsAdmMorph(NeofsCliCommand): class NeofsAdmMorph(NeofsCliCommand):
def deposit_notary(self, rpc_endpoint: str, account: str, gas: str, storage_wallet: Optional[str] = None, def deposit_notary(
till: Optional[str] = None) -> str: self,
"""Deposit GAS for notary service. rpc_endpoint: str,
account: str,
gas: str,
storage_wallet: Optional[str] = None,
till: Optional[str] = None,
) -> str:
"""
Deposit GAS for notary service.
Args: Args:
account (str): wallet account address account (str): wallet account address
@ -15,19 +22,24 @@ class NeofsAdmMorph(NeofsCliCommand):
storage_wallet (str): path to storage node wallet storage_wallet (str): path to storage node wallet
till (str): notary deposit duration in blocks till (str): notary deposit duration in blocks
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph deposit-notary', "morph deposit-notary",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def dump_balances(self, rpc_endpoint: str, alphabet: Optional[str] = None, proxy: Optional[str] = None, def dump_balances(
script_hash: Optional[str] = None, storage: Optional[str] = None) -> str: self,
"""Dump GAS balances rpc_endpoint: str,
alphabet: Optional[str] = None,
proxy: Optional[str] = None,
script_hash: Optional[str] = None,
storage: Optional[str] = None,
) -> str:
"""
Dump GAS balances.
Args: Args:
alphabet (str): dump balances of alphabet contracts alphabet (str): dump balances of alphabet contracts
@ -36,35 +48,38 @@ class NeofsAdmMorph(NeofsCliCommand):
script_hash (str): use script-hash format for addresses script_hash (str): use script-hash format for addresses
storage (str): dump balances of storage nodes from the current netmap storage (str): dump balances of storage nodes from the current netmap
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph dump-balances', "morph dump-balances",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def dump_config(self, rpc_endpoint: str) -> str: def dump_config(self, rpc_endpoint: str) -> str:
"""Section for morph network configuration commands. """
Dump NeoFS network config.
Args: Args:
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph dump-config', "morph dump-config",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def dump_containers(self, rpc_endpoint: str, cid: Optional[str] = None, container_contract: Optional[str] = None, def dump_containers(
dump: Optional[str] = None) -> str: self,
"""Dump NeoFS containers to file. rpc_endpoint: str,
cid: Optional[str] = None,
container_contract: Optional[str] = None,
dump: Optional[str] = None,
) -> str:
"""
Dump NeoFS containers to file.
Args: Args:
cid (str): containers to dump cid (str): containers to dump
@ -72,70 +87,73 @@ class NeofsAdmMorph(NeofsCliCommand):
dump (str): file where to save dumped containers dump (str): file where to save dumped containers
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph dump-containers', "morph dump-containers",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def dump_hashes(self, rpc_endpoint: str) -> str: def dump_hashes(self, rpc_endpoint: str) -> str:
"""Dump deployed contract hashes. """
Dump deployed contract hashes.
Args: Args:
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph dump-hashes', "morph dump-hashes",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def force_new_epoch(self, rpc_endpoint: Optional[str] = None, alphabet: Optional[str] = None) -> str: def force_new_epoch(
"""Create new NeoFS epoch event in the side chain self, rpc_endpoint: Optional[str] = None, alphabet: Optional[str] = None
) -> str:
"""
Create new NeoFS epoch event in the side chain
Args: Args:
alphabet (str): path to alphabet wallets dir alphabet (str): path to alphabet wallets dir
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph force-new-epoch', "morph force-new-epoch",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def generate_alphabet(self, rpc_endpoint: str, alphabet_wallets: str, size: int = 7) -> str: def generate_alphabet(self, rpc_endpoint: str, alphabet_wallets: str, size: int = 7) -> str:
"""Generate alphabet wallets for consensus nodes of the morph network """
Generate alphabet wallets for consensus nodes of the morph network.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
size (int): amount of alphabet wallets to generate (default 7) size (int): amount of alphabet wallets to generate (default 7)
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph generate-alphabet', "morph generate-alphabet",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def generate_storage_wallet(self, rpc_endpoint: str, alphabet_wallets: str, storage_wallet: str, def generate_storage_wallet(
initial_gas: Optional[str] = None) -> str: self,
"""Generate storage node wallet for the morph network rpc_endpoint: str,
alphabet_wallets: str,
storage_wallet: str,
initial_gas: Optional[str] = None,
) -> str:
"""
Generate storage node wallet for the morph network.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
@ -143,21 +161,29 @@ class NeofsAdmMorph(NeofsCliCommand):
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
storage_wallet (str): path to new storage node wallet storage_wallet (str): path to new storage node wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph generate-storage-wallet', "morph generate-storage-wallet",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def init(self, rpc_endpoint: str, alphabet_wallets: str, contracts: str, protocol: str, def init(
container_alias_fee: int = 500, container_fee: int = 1000, epoch_duration: int = 240, self,
homomorphic_disabled: bool = False, local_dump: Optional[str] = None, max_object_size: int = 67108864 rpc_endpoint: str,
) -> str: alphabet_wallets: str,
"""Section for morph network configuration commands. contracts: str,
protocol: str,
container_alias_fee: int = 500,
container_fee: int = 1000,
epoch_duration: int = 240,
homomorphic_disabled: bool = False,
local_dump: Optional[str] = None,
max_object_size: int = 67108864,
) -> str:
"""
Initialize side chain network with smart-contracts and network settings.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
@ -172,19 +198,23 @@ class NeofsAdmMorph(NeofsCliCommand):
protocol (str): path to the consensus node configuration protocol (str): path to the consensus node configuration
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph init', "morph init",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def refill_gas(self, rpc_endpoint: str, alphabet_wallets: str, storage_wallet: str, gas: Optional[str] = None def refill_gas(
) -> str: self,
"""Refill GAS of storage node's wallet in the morph network rpc_endpoint: str,
alphabet_wallets: str,
storage_wallet: str,
gas: Optional[str] = None,
) -> str:
"""
Refill GAS of storage node's wallet in the morph network.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
@ -192,18 +222,19 @@ class NeofsAdmMorph(NeofsCliCommand):
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
storage_wallet (str): path to new storage node wallet storage_wallet (str): path to new storage node wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph refill-gas', "morph refill-gas",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def restore_containers(self, rpc_endpoint: str, alphabet_wallets: str, cid: str, dump: str) -> str: def restore_containers(
"""Restore NeoFS containers from file. self, rpc_endpoint: str, alphabet_wallets: str, cid: str, dump: str
) -> str:
"""
Restore NeoFS containers from file.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
@ -211,19 +242,24 @@ class NeofsAdmMorph(NeofsCliCommand):
dump (str): file to restore containers from dump (str): file to restore containers from
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph restore-containers', "morph restore-containers",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def set_policy(self, rpc_endpoint: str, alphabet_wallets: str, exec_fee_factor: Optional[int] = None, def set_policy(
storage_price: Optional[int] = None, fee_per_byte: Optional[int] = None) -> str: self,
"""Set global policy values rpc_endpoint: str,
alphabet_wallets: str,
exec_fee_factor: Optional[int] = None,
storage_price: Optional[int] = None,
fee_per_byte: Optional[int] = None,
) -> str:
"""
Set global policy values.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
@ -232,28 +268,30 @@ class NeofsAdmMorph(NeofsCliCommand):
fee_per_byte (int): FeePerByte=<n3> fee_per_byte (int): FeePerByte=<n3>
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
non_param_attribute = '' non_param_attribute = ""
if exec_fee_factor: if exec_fee_factor:
non_param_attribute += f'ExecFeeFactor={exec_fee_factor} ' non_param_attribute += f"ExecFeeFactor={exec_fee_factor} "
if storage_price: if storage_price:
non_param_attribute += f'StoragePrice={storage_price} ' non_param_attribute += f"StoragePrice={storage_price} "
if fee_per_byte: if fee_per_byte:
non_param_attribute += f'FeePerByte={fee_per_byte} ' non_param_attribute += f"FeePerByte={fee_per_byte} "
return self._execute( return self._execute(
f'morph restore-containers {non_param_attribute}', f"morph restore-containers {non_param_attribute}",
**{param: param_value for param, param_value in locals().items() if param not in [ **{
'self', 'exec_fee_factor', 'storage_price', 'fee_per_byte' param: value
]} for param, value in locals().items()
if param not in ["self", "exec_fee_factor", "storage_price", "fee_per_byte"]
},
) )
def update_contracts(self, rpc_endpoint: str, alphabet_wallets: str, contracts: Optional[str] = None def update_contracts(
) -> str: self, rpc_endpoint: str, alphabet_wallets: str, contracts: Optional[str] = None
"""Update NeoFS contracts. ) -> str:
"""
Update NeoFS contracts.
Args: Args:
alphabet_wallets (str): path to alphabet wallets dir alphabet_wallets (str): path to alphabet wallets dir
@ -261,12 +299,10 @@ class NeofsAdmMorph(NeofsCliCommand):
(default fetched from latest github release) (default fetched from latest github release)
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph update-contracts', "morph update-contracts",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -15,6 +15,6 @@ class NeofsAdmStorageConfig(NeofsCliCommand):
""" """
return self._execute( return self._execute(
'storage-config', "storage-config",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -19,29 +19,31 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
""" """
return self._execute( return self._execute(
'morph subnet create', "morph subnet create",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def get(self, rpc_endpoint: str, subnet: str) -> str: def get(self, rpc_endpoint: str, subnet: str) -> str:
"""Read information about the NeoFS subnet. """
Read information about the NeoFS subnet.
Args: Args:
rpc_endpoint (str): N3 RPC node endpoint rpc_endpoint (str): N3 RPC node endpoint
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet get', "morph subnet get",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def remove(self, rpc_endpoint: str, wallet: str, subnet: str, address: Optional[str] = None) -> str: def remove(
"""Remove NeoFS subnet. self, rpc_endpoint: str, wallet: str, subnet: str, address: Optional[str] = None
) -> str:
"""
Remove NeoFS subnet.
Args: Args:
address (str): Address in the wallet, optional address (str): Address in the wallet, optional
@ -49,19 +51,26 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet remove', "morph subnet remove",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def admin_add(self, rpc_endpoint: str, wallet: str, admin: str, subnet: str, client: Optional[str] = None, def admin_add(
group: Optional[str] = None, address: Optional[str] = None) -> str: self,
"""Add admin to the NeoFS subnet. rpc_endpoint: str,
wallet: str,
admin: str,
subnet: str,
client: Optional[str] = None,
group: Optional[str] = None,
address: Optional[str] = None,
) -> str:
"""
Add admin to the NeoFS subnet.
Args: Args:
address (str): Address in the wallet, optional address (str): Address in the wallet, optional
@ -72,19 +81,25 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet admin add', "morph subnet admin add",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def admin_remove(self, rpc_endpoint: str, wallet: str, admin: str, subnet: str, client: Optional[str] = None, def admin_remove(
address: Optional[str] = None) -> str: self,
"""Remove admin of the NeoFS subnet. rpc_endpoint: str,
wallet: str,
admin: str,
subnet: str,
client: Optional[str] = None,
address: Optional[str] = None,
) -> str:
"""
Remove admin of the NeoFS subnet.
Args: Args:
address (str): Address in the wallet, optional address (str): Address in the wallet, optional
@ -94,19 +109,25 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet admin remove', "morph subnet admin remove",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def client_add(self, rpc_endpoint: str, wallet: str, subnet: str, client: Optional[str] = None, def client_add(
group: Optional[str] = None, address: Optional[str] = None) -> str: self,
"""Add client to the NeoFS subnet. rpc_endpoint: str,
wallet: str,
subnet: str,
client: Optional[str] = None,
group: Optional[str] = None,
address: Optional[str] = None,
) -> str:
"""
Add client to the NeoFS subnet.
Args: Args:
address (str): Address in the wallet, optional address (str): Address in the wallet, optional
@ -116,19 +137,25 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet client add', "morph subnet client add",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def client_remove(self, rpc_endpoint: str, wallet: str, client: str, group: str, subnet: str, def client_remove(
address: Optional[str] = None) -> str: self,
"""Remove client of the NeoFS subnet. rpc_endpoint: str,
wallet: str,
client: str,
group: str,
subnet: str,
address: Optional[str] = None,
) -> str:
"""
Remove client of the NeoFS subnet.
Args: Args:
address (str): Address in the wallet, optional address (str): Address in the wallet, optional
@ -138,18 +165,17 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet client remove', "morph subnet client remove",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def node_add(self, rpc_endpoint: str, wallet: str, node: str, subnet: str) -> str: def node_add(self, rpc_endpoint: str, wallet: str, node: str, subnet: str) -> str:
"""Add node to the NeoFS subnet. """
Add node to the NeoFS subnet.
Args: Args:
node (str): Hex-encoded public key of the node node (str): Hex-encoded public key of the node
@ -157,18 +183,17 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet node add', "morph subnet node add",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def node_remove(self, rpc_endpoint: str, wallet: str, node: str, subnet: str) -> str: def node_remove(self, rpc_endpoint: str, wallet: str, node: str, subnet: str) -> str:
"""Remove node from the NeoFS subnet. """
Remove node from the NeoFS subnet.
Args: Args:
node (str): Hex-encoded public key of the node node (str): Hex-encoded public key of the node
@ -176,12 +201,10 @@ class NeofsAdmMorphSubnet(NeofsCliCommand):
subnet (str): ID of the subnet to read subnet (str): ID of the subnet to read
wallet (str): Path to file with wallet wallet (str): Path to file with wallet
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'morph subnet node remove', "morph subnet node remove",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -9,4 +9,4 @@ class NeofsAdmVersion(NeofsCliCommand):
str: Command string str: Command string
""" """
return self._execute('', version=True) return self._execute("", version=True)

View file

@ -25,9 +25,5 @@ class NeofsCliAccounting(NeofsCliCommand):
""" """
return self._execute( return self._execute(
"accounting balance", "accounting balance",
**{ **{param: value for param, value in locals().items() if param not in ["self"]},
param: param_value
for param, param_value in locals().items()
if param not in ["self"]
}
) )

View file

@ -4,7 +4,9 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsCliACL(NeofsCliCommand): class NeofsCliACL(NeofsCliCommand):
def extended_create(self, cid: str, out: str, file: Optional[str] = None, rule: Optional[list] = None) -> str: def extended_create(
self, cid: str, out: str, file: Optional[str] = None, rule: Optional[list] = None
) -> str:
"""Create extended ACL from the text representation. """Create extended ACL from the text representation.
@ -42,6 +44,6 @@ class NeofsCliACL(NeofsCliCommand):
""" """
return self._execute( return self._execute(
'acl extended create', "acl extended create",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -4,12 +4,25 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsCliContainer(NeofsCliCommand): class NeofsCliContainer(NeofsCliCommand):
def create(self, rpc_endpoint: str, wallet: str, address: Optional[str] = None, attributes: Optional[dict] = None, def create(
basic_acl: Optional[str] = None, await_mode: bool = False, disable_timestamp: bool = False, self,
name: Optional[str] = None, nonce: Optional[str] = None, policy: Optional[str] = None, rpc_endpoint: str,
session: Optional[str] = None, subnet: Optional[str] = None, ttl: Optional[int] = None, wallet: str,
xhdr: Optional[dict] = None) -> str: address: Optional[str] = None,
"""Create a new container and register it in the NeoFS. attributes: Optional[dict] = None,
basic_acl: Optional[str] = None,
await_mode: bool = False,
disable_timestamp: bool = False,
name: Optional[str] = None,
nonce: Optional[str] = None,
policy: Optional[str] = None,
session: Optional[str] = None,
subnet: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Create a new container and register it in the NeoFS.
It will be stored in the sidechain when the Inner Ring accepts it. It will be stored in the sidechain when the Inner Ring accepts it.
Args: Args:
@ -31,17 +44,26 @@ class NeofsCliContainer(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'container create', "container create",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def delete(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, await_mode: bool = False, def delete(
session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None, self,
force: bool = False) -> str: rpc_endpoint: str,
"""Delete an existing container. wallet: str,
cid: str,
address: Optional[str] = None,
await_mode: bool = False,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
force: bool = False,
) -> str:
"""
Delete an existing container.
Only the owner of the container has permission to remove the container. Only the owner of the container has permission to remove the container.
Args: Args:
@ -55,20 +77,29 @@ class NeofsCliContainer(NeofsCliCommand):
wallet: WIF (NEP-2) string or path to the wallet or binary key wallet: WIF (NEP-2) string or path to the wallet or binary key
xhdr: Request X-Headers in form of Key=Value xhdr: Request X-Headers in form of Key=Value
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'container delete', "container delete",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def get(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, await_mode: bool = False, def get(
to: Optional[str] = None, json_mode: bool = False, ttl: Optional[int] = None, self,
xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Get container field info wallet: str,
cid: str,
address: Optional[str] = None,
await_mode: bool = False,
to: Optional[str] = None,
json_mode: bool = False,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Get container field info.
Args: Args:
address: address of wallet account address: address of wallet account
@ -83,18 +114,26 @@ class NeofsCliContainer(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'container get', "container get",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def get_eacl(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, def get_eacl(
await_mode: bool = False, to: Optional[str] = None, session: Optional[str] = None, self,
ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Get extended ACL talbe of container wallet: str,
cid: str,
address: Optional[str] = None,
await_mode: bool = False,
to: Optional[str] = None,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Get extended ACL table of container.
Args: Args:
address: address of wallet account address: address of wallet account
@ -112,13 +151,22 @@ class NeofsCliContainer(NeofsCliCommand):
""" """
return self._execute( return self._execute(
'container get-eacl', "container get-eacl",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def list(self, rpc_endpoint: str, wallet: str, address: Optional[str] = None, def list(
owner: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None, **params) -> str: self,
"""List all created containers rpc_endpoint: str,
wallet: str,
address: Optional[str] = None,
owner: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
**params,
) -> str:
"""
List all created containers.
Args: Args:
address: address of wallet account address: address of wallet account
@ -130,16 +178,23 @@ class NeofsCliContainer(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'container list', "container list",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def list_objects(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, def list_objects(
ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: self,
"""List existing objects in container rpc_endpoint: str,
wallet: str,
cid: str,
address: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
List existing objects in container.
Args: Args:
address: address of wallet account address: address of wallet account
@ -151,18 +206,26 @@ class NeofsCliContainer(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'container list-objects', "container list-objects",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def set_eacl(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, def set_eacl(
await_mode: bool = False, table: Optional[str] = None, session: Optional[str] = None, self,
ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Set a new extended ACL table for the container. wallet: str,
cid: str,
address: Optional[str] = None,
await_mode: bool = False,
table: Optional[str] = None,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Set a new extended ACL table for the container.
Container ID in the EACL table will be substituted with the ID from the CLI. Container ID in the EACL table will be substituted with the ID from the CLI.
Args: Args:
@ -178,9 +241,8 @@ class NeofsCliContainer(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'container set-eacl', "container set-eacl",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -4,10 +4,20 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsCliObject(NeofsCliCommand): class NeofsCliObject(NeofsCliCommand):
def delete(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, def delete(
bearer: Optional[str] = None, session: Optional[str] = None, ttl: Optional[int] = None, self,
xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Delete object from NeoFS wallet: str,
cid: str,
oid: str,
address: Optional[str] = None,
bearer: Optional[str] = None,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Delete object from NeoFS.
Args: Args:
address: address of wallet account address: address of wallet account
@ -22,18 +32,30 @@ class NeofsCliObject(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'object delete', "object delete",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def get(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, def get(
bearer: Optional[str] = None, file: Optional[str] = None, self,
header: Optional[str] = None, no_progress: bool = False, raw: bool = False, rpc_endpoint: str,
session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: wallet: str,
"""Get object from NeoFS cid: str,
oid: str,
address: Optional[str] = None,
bearer: Optional[str] = None,
file: Optional[str] = None,
header: Optional[str] = None,
no_progress: bool = False,
raw: bool = False,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Get object from NeoFS.
Args: Args:
address: address of wallet account address: address of wallet account
@ -52,17 +74,28 @@ class NeofsCliObject(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'object get', "object get",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def hash(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, def hash(
bearer: Optional[str] = None, range: Optional[str] = None, salt: Optional[str] = None, self,
ttl: Optional[int] = None, hash_type: Optional[str] = None, xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Get object hash wallet: str,
cid: str,
oid: str,
address: Optional[str] = None,
bearer: Optional[str] = None,
range: Optional[str] = None,
salt: Optional[str] = None,
ttl: Optional[int] = None,
hash_type: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Get object hash.
Args: Args:
address: address of wallet account address: address of wallet account
@ -79,78 +112,114 @@ class NeofsCliObject(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'object hash', "object hash",
**{param: param_value for param, param_value in locals().items() if param not in ['self', 'params']} **{
param: value for param, value in locals().items() if param not in ["self", "params"]
},
) )
def head(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, def head(
bearer: Optional[str] = None, file: Optional[str] = None, self,
json_mode: bool = False, main_only: bool = False, proto: bool = False, raw: bool = False, rpc_endpoint: str,
session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: wallet: str,
"""Get object header cid: str,
oid: str,
address: Optional[str] = None,
bearer: Optional[str] = None,
file: Optional[str] = None,
json_mode: bool = False,
main_only: bool = False,
proto: bool = False,
raw: bool = False,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Get object header.
Args: Args:
address: address of wallet account address: address of wallet account
bearer: File with signed JSON or binary encoded bearer token bearer: File with signed JSON or binary encoded bearer token
cid: Container ID cid: Container ID
file: File to write object payload to. Default: stdout. file: File to write object payload to. Default: stdout.
json_mode: Marshal output in JSON json_mode: Marshal output in JSON
main_only: Return only main fields main_only: Return only main fields
oid: Object ID oid: Object ID
proto: Marshal output in Protobuf proto: Marshal output in Protobuf
raw: Set raw request option raw: Set raw request option
rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>') rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>')
session: path to a JSON-encoded container session token session: path to a JSON-encoded container session token
ttl: TTL value in request meta header (default 2) ttl: TTL value in request meta header (default 2)
wallet: WIF (NEP-2) string or path to the wallet or binary key wallet: WIF (NEP-2) string or path to the wallet or binary key
xhdr: Request X-Headers in form of Key=Value xhdr: Request X-Headers in form of Key=Value
Returns:
Returns: str: Command string
str: Command string """
"""
return self._execute( return self._execute(
'object head', "object head",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def lock(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, lifetime: int, address: Optional[str] = None, def lock(
bearer: Optional[str] = None, session: Optional[str] = None, ttl: Optional[int] = None, self,
xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Lock object in container wallet: str,
cid: str,
oid: str,
lifetime: int,
address: Optional[str] = None,
bearer: Optional[str] = None,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Lock object in container.
Args: Args:
address: address of wallet account address: address of wallet account
bearer: File with signed JSON or binary encoded bearer token bearer: File with signed JSON or binary encoded bearer token
cid: Container ID cid: Container ID
oid: Object ID oid: Object ID
lifetime: Object lifetime lifetime: Object lifetime
rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>') rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>')
session: path to a JSON-encoded container session token session: path to a JSON-encoded container session token
ttl: TTL value in request meta header (default 2) ttl: TTL value in request meta header (default 2)
wallet: WIF (NEP-2) string or path to the wallet or binary key wallet: WIF (NEP-2) string or path to the wallet or binary key
xhdr: Request X-Headers in form of Key=Value xhdr: Request X-Headers in form of Key=Value
Returns:
Returns: str: Command string
str: Command string """
"""
return self._execute( return self._execute(
'object lock', "object lock",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def put(self, rpc_endpoint: str, wallet: str, cid: str, file: str, address: Optional[str] = None, def put(
attributes: Optional[dict] = None, bearer: Optional[str] = None, disable_filename: bool = False, self,
disable_timestamp: bool = False, expire_at: Optional[int] = None, no_progress: bool = False, rpc_endpoint: str,
notify: Optional[str] = None, session: Optional[str] = None, ttl: Optional[int] = None, wallet: str,
xhdr: Optional[dict] = None) -> str: cid: str,
"""Put object to NeoFS file: str,
address: Optional[str] = None,
attributes: Optional[dict] = None,
bearer: Optional[str] = None,
disable_filename: bool = False,
disable_timestamp: bool = False,
expire_at: Optional[int] = None,
no_progress: bool = False,
notify: Optional[str] = None,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Put object to NeoFS.
Args: Args:
address: address of wallet account address: address of wallet account
@ -171,69 +240,90 @@ class NeofsCliObject(NeofsCliCommand):
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute( return self._execute(
'object put', "object put",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def range(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, range: str, address: Optional[str] = None, def range(
bearer: Optional[str] = None, file: Optional[str] = None, json_mode: bool = False, raw: bool = False, self,
session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: rpc_endpoint: str,
"""Get payload range data of an object wallet: str,
cid: str,
oid: str,
range: str,
address: Optional[str] = None,
bearer: Optional[str] = None,
file: Optional[str] = None,
json_mode: bool = False,
raw: bool = False,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Get payload range data of an object.
Args: Args:
address: address of wallet account address: address of wallet account
bearer: File with signed JSON or binary encoded bearer token bearer: File with signed JSON or binary encoded bearer token
cid: Container ID cid: Container ID
file: File to write object payload to. Default: stdout. file: File to write object payload to. Default: stdout.
json_mode: Marshal output in JSON json_mode: Marshal output in JSON
oid: Object ID oid: Object ID
range: Range to take data from in the form offset:length range: Range to take data from in the form offset:length
raw: Set raw request option raw: Set raw request option
rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>') rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>')
session: path to a JSON-encoded container session token session: path to a JSON-encoded container session token
ttl: TTL value in request meta header (default 2) ttl: TTL value in request meta header (default 2)
wallet: WIF (NEP-2) string or path to the wallet or binary key wallet: WIF (NEP-2) string or path to the wallet or binary key
xhdr: Request X-Headers in form of Key=Value xhdr: Request X-Headers in form of Key=Value
Returns:
Returns: str: Command string
str: Command string """
"""
return self._execute( return self._execute(
'object range', "object range",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )
def search(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, def search(
bearer: Optional[str] = None, filters: Optional[list] = None, oid: Optional[str] = None, self,
phy: bool = False, root: bool = False, session: Optional[str] = None, ttl: Optional[int] = None, rpc_endpoint: str,
xhdr: Optional[dict] = None) -> str: wallet: str,
"""Search object cid: str,
address: Optional[str] = None,
bearer: Optional[str] = None,
filters: Optional[list] = None,
oid: Optional[str] = None,
phy: bool = False,
root: bool = False,
session: Optional[str] = None,
ttl: Optional[int] = None,
xhdr: Optional[dict] = None,
) -> str:
"""
Search object.
Args: Args:
address: address of wallet account address: address of wallet account
bearer: File with signed JSON or binary encoded bearer token bearer: File with signed JSON or binary encoded bearer token
cid: Container ID cid: Container ID
filters: Repeated filter expressions or files with protobuf JSON filters: Repeated filter expressions or files with protobuf JSON
oid: Object ID oid: Object ID
phy: Search physically stored objects phy: Search physically stored objects
root: Search for user objects root: Search for user objects
rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>') rpc_endpoint: remote node address (as 'multiaddr' or '<host>:<port>')
session: path to a JSON-encoded container session token session: path to a JSON-encoded container session token
ttl: TTL value in request meta header (default 2) ttl: TTL value in request meta header (default 2)
wallet: WIF (NEP-2) string or path to the wallet or binary key wallet: WIF (NEP-2) string or path to the wallet or binary key
xhdr: Request X-Headers in form of Key=Value xhdr: Request X-Headers in form of Key=Value
Returns:
Returns: str: Command string
str: Command string """
"""
return self._execute( return self._execute(
'object search', "object search",
**{param: param_value for param, param_value in locals().items() if param not in ['self']} **{param: value for param, value in locals().items() if param not in ["self"]},
) )

View file

@ -3,10 +3,10 @@ from cli_utils.cli_command import NeofsCliCommand
class NeofsCliVersion(NeofsCliCommand): class NeofsCliVersion(NeofsCliCommand):
def get(self) -> str: def get(self) -> str:
"""Application version and NeoFS API compatibility """
Application version and NeoFS API compatibility.
Returns: Returns:
str: Command string str: Command string
""" """
return self._execute('', version=True) return self._execute("", version=True)

View file

@ -7,31 +7,40 @@ class NeofsCliCommand:
neofs_cli_exec: Optional[str] = None neofs_cli_exec: Optional[str] = None
timeout: Optional[int] = None timeout: Optional[int] = None
__base_params: Optional[str] = None __base_params: Optional[str] = None
map_params = {'json_mode': 'json', 'await_mode': 'await', 'hash_type': 'hash', 'doc_type': 'type'} map_params = {
"json_mode": "json",
"await_mode": "await",
"hash_type": "hash",
"doc_type": "type",
}
def __init__(self, neofs_cli_exec: str, timeout: int, **base_params): def __init__(self, neofs_cli_exec: str, timeout: int, **base_params):
self.neofs_cli_exec = neofs_cli_exec self.neofs_cli_exec = neofs_cli_exec
self.timeout = timeout self.timeout = timeout
self.__base_params = ' '.join([f'--{param} {value}' for param, value in base_params.items() if value]) self.__base_params = " ".join(
[f"--{param} {value}" for param, value in base_params.items() if value]
)
def _format_command(self, command: str, **params) -> str: def _format_command(self, command: str, **params) -> str:
param_str = [] param_str = []
for param, value in params.items(): for param, value in params.items():
if param in self.map_params.keys(): if param in self.map_params.keys():
param = self.map_params[param] param = self.map_params[param]
param = param.replace('_', '-') param = param.replace("_", "-")
if not value: if not value:
continue continue
if isinstance(value, bool): if isinstance(value, bool):
param_str.append(f'--{param}') param_str.append(f"--{param}")
elif isinstance(value, int): elif isinstance(value, int):
param_str.append(f'--{param} {value}') param_str.append(f"--{param} {value}")
elif isinstance(value, list): elif isinstance(value, list):
for value_item in value: for value_item in value:
val_str = str(value_item).replace("'", "\\'") val_str = str(value_item).replace("'", "\\'")
param_str.append(f"--{param} '{val_str}'") param_str.append(f"--{param} '{val_str}'")
elif isinstance(value, dict): elif isinstance(value, dict):
param_str.append(f'--{param} \'{",".join(f"{key}={val}" for key, val in value.items())}\'') param_str.append(
f"--{param} '{','.join(f'{key}={val}' for key, val in value.items())}'"
)
else: else:
if "'" in str(value): if "'" in str(value):
value_str = str(value).replace('"', '\\"') value_str = str(value).replace('"', '\\"')
@ -39,7 +48,7 @@ class NeofsCliCommand:
else: else:
param_str.append(f"--{param} '{value}'") param_str.append(f"--{param} '{value}'")
param_str = ' '.join(param_str) param_str = " ".join(param_str)
return f'{self.neofs_cli_exec} {self.__base_params} {command or ""} {param_str}' return f'{self.neofs_cli_exec} {self.__base_params} {command or ""} {param_str}'

View file

@ -1,13 +1,26 @@
from typing import List, Optional from typing import List, Optional
from acl import EACLOperation from acl import EACLOperation
from python_keywords.object_access import (can_get_object, can_put_object, can_delete_object, can_get_head_object, from python_keywords.object_access import (
can_get_range_hash_of_object, can_get_range_of_object, can_search_object) can_delete_object,
can_get_head_object,
can_get_object,
can_get_range_hash_of_object,
can_get_range_of_object,
can_put_object,
can_search_object,
)
def check_full_access_to_container(wallet: str, cid: str, oid: str, file_name: str, def check_full_access_to_container(
bearer: Optional[str] = None, wallet_config: Optional[str] = None, wallet: str,
xhdr: Optional[dict] = None): cid: str,
oid: str,
file_name: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
):
assert can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr) assert can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr)
assert can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr)
assert can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr)
@ -17,9 +30,15 @@ def check_full_access_to_container(wallet: str, cid: str, oid: str, file_name: s
assert can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr)
def check_no_access_to_container(wallet: str, cid: str, oid: str, file_name: str, def check_no_access_to_container(
bearer: Optional[str] = None, wallet_config: Optional[str] = None, wallet: str,
xhdr: Optional[dict] = None): cid: str,
oid: str,
file_name: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
):
assert not can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr) assert not can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr)
assert not can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert not can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr)
assert not can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert not can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr)
@ -29,42 +48,78 @@ def check_no_access_to_container(wallet: str, cid: str, oid: str, file_name: str
assert not can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert not can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr)
def check_custom_access_to_container(wallet: str, cid: str, oid: str, file_name: str, def check_custom_access_to_container(
deny_operations: Optional[List[EACLOperation]] = None, wallet: str,
ignore_operations: Optional[List[EACLOperation]] = None, cid: str,
bearer: Optional[str] = None, wallet_config: Optional[str] = None, oid: str,
xhdr: Optional[dict] = None): file_name: str,
deny_operations: Optional[List[EACLOperation]] = None,
ignore_operations: Optional[List[EACLOperation]] = None,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
):
deny_operations = [op.value for op in deny_operations or []] deny_operations = [op.value for op in deny_operations or []]
ignore_operations = [op.value for op in ignore_operations or []] ignore_operations = [op.value for op in ignore_operations or []]
checks: dict = {} checks: dict = {}
if EACLOperation.PUT.value not in ignore_operations: if EACLOperation.PUT.value not in ignore_operations:
checks[EACLOperation.PUT.value] = can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr) checks[EACLOperation.PUT.value] = can_put_object(
wallet, cid, file_name, bearer, wallet_config, xhdr
)
if EACLOperation.HEAD.value not in ignore_operations: if EACLOperation.HEAD.value not in ignore_operations:
checks[EACLOperation.HEAD.value] = can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr) checks[EACLOperation.HEAD.value] = can_get_head_object(
wallet, cid, oid, bearer, wallet_config, xhdr
)
if EACLOperation.GET_RANGE.value not in ignore_operations: if EACLOperation.GET_RANGE.value not in ignore_operations:
checks[EACLOperation.GET_RANGE.value] = can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr) checks[EACLOperation.GET_RANGE.value] = can_get_range_of_object(
wallet, cid, oid, bearer, wallet_config, xhdr
)
if EACLOperation.GET_RANGE_HASH.value not in ignore_operations: if EACLOperation.GET_RANGE_HASH.value not in ignore_operations:
checks[EACLOperation.GET_RANGE_HASH.value] = can_get_range_hash_of_object(wallet, cid, oid, bearer, checks[EACLOperation.GET_RANGE_HASH.value] = can_get_range_hash_of_object(
wallet_config, xhdr) wallet, cid, oid, bearer, wallet_config, xhdr
)
if EACLOperation.SEARCH.value not in ignore_operations: if EACLOperation.SEARCH.value not in ignore_operations:
checks[EACLOperation.SEARCH.value] = can_search_object(wallet, cid, oid, bearer, wallet_config, xhdr) checks[EACLOperation.SEARCH.value] = can_search_object(
wallet, cid, oid, bearer, wallet_config, xhdr
)
if EACLOperation.GET.value not in ignore_operations: if EACLOperation.GET.value not in ignore_operations:
checks[EACLOperation.GET.value] = can_get_object(wallet, cid, oid, file_name, bearer, wallet_config, xhdr) checks[EACLOperation.GET.value] = can_get_object(
wallet, cid, oid, file_name, bearer, wallet_config, xhdr
)
if EACLOperation.DELETE.value not in ignore_operations: if EACLOperation.DELETE.value not in ignore_operations:
checks[EACLOperation.DELETE.value] = can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr) checks[EACLOperation.DELETE.value] = can_delete_object(
wallet, cid, oid, bearer, wallet_config, xhdr
)
failed_checks = ( failed_checks = [
[f'allowed {action} failed' for action, success in checks.items() if f"allowed {action} failed"
not success and action not in deny_operations] + for action, success in checks.items()
[f'denied {action} succeeded' for action, success in checks.items() if if not success and action not in deny_operations
success and action in deny_operations]) ] + [
f"denied {action} succeeded"
for action, success in checks.items()
if success and action in deny_operations
]
assert not failed_checks, ", ".join(failed_checks) assert not failed_checks, ", ".join(failed_checks)
def check_read_only_container(wallet: str, cid: str, oid: str, file_name: str, def check_read_only_container(
bearer: Optional[str] = None, wallet_config: Optional[str] = None, wallet: str,
xhdr: Optional[dict] = None): cid: str,
return check_custom_access_to_container(wallet, cid, oid, file_name, oid: str,
deny_operations=[EACLOperation.PUT, EACLOperation.DELETE], file_name: str,
bearer=bearer, wallet_config=wallet_config, xhdr=xhdr) bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
):
return check_custom_access_to_container(
wallet,
cid,
oid,
file_name,
deny_operations=[EACLOperation.PUT, EACLOperation.DELETE],
bearer=bearer,
wallet_config=wallet_config,
xhdr=xhdr,
)

View file

@ -3,17 +3,21 @@ from time import sleep
from typing import Optional from typing import Optional
import allure import allure
from common import NEOFS_NETMAP_DICT from common import NEOFS_NETMAP_DICT
from python_keywords.node_management import node_healthcheck from python_keywords.node_management import node_healthcheck
from storage_policy import get_nodes_with_object from storage_policy import get_nodes_with_object
logger = logging.getLogger('NeoLogger') logger = logging.getLogger("NeoLogger")
@allure.step('Wait for object replication') @allure.step("Wait for object replication")
def wait_object_replication_on_nodes(wallet: str, cid: str, oid: str, expected_copies: int, def wait_object_replication_on_nodes(
excluded_nodes: Optional[list[str]] = None) -> list[str]: wallet: str,
cid: str,
oid: str,
expected_copies: int,
excluded_nodes: Optional[list[str]] = None,
) -> list[str]:
excluded_nodes = excluded_nodes or [] excluded_nodes = excluded_nodes or []
sleep_interval, attempts = 10, 18 sleep_interval, attempts = 10, 18
nodes = [] nodes = []
@ -22,28 +26,30 @@ def wait_object_replication_on_nodes(wallet: str, cid: str, oid: str, expected_c
if len(nodes) == expected_copies: if len(nodes) == expected_copies:
return nodes return nodes
sleep(sleep_interval) sleep(sleep_interval)
raise AssertionError(f'Expected {expected_copies} copies of object, but found {len(nodes)}. ' raise AssertionError(
f'Waiting time {sleep_interval * attempts}') f"Expected {expected_copies} copies of object, but found {len(nodes)}. "
f"Waiting time {sleep_interval * attempts}"
)
@allure.step('Wait for storage node returned to cluster') @allure.step("Wait for storage node returned to cluster")
def wait_all_storage_node_returned(): def wait_all_storage_node_returned():
sleep_interval, attempts = 10, 12 sleep_interval, attempts = 10, 12
for __attempt in range(attempts): for __attempt in range(attempts):
if is_all_storage_node_returned(): if is_all_storage_node_returned():
return return
sleep(sleep_interval) sleep(sleep_interval)
raise AssertionError('Storage node(s) is broken') raise AssertionError("Storage node(s) is broken")
def is_all_storage_node_returned() -> bool: def is_all_storage_node_returned() -> bool:
with allure.step('Run health check for all storage nodes'): with allure.step("Run health check for all storage nodes"):
for node_name in NEOFS_NETMAP_DICT.keys(): for node_name in NEOFS_NETMAP_DICT.keys():
try: try:
health_check = node_healthcheck(node_name) health_check = node_healthcheck(node_name)
except Exception as err: except Exception as err:
logger.warning(f'Node healthcheck fails with error {err}') logger.warning(f"Node healthcheck fails with error {err}")
return False return False
if health_check.health_status != 'READY' or health_check.network_status != 'ONLINE': if health_check.health_status != "READY" or health_check.network_status != "ONLINE":
return False return False
return True return True

View file

@ -1,99 +1,181 @@
from typing import Optional from typing import Optional
import allure import allure
from grpc_responses import OBJECT_ACCESS_DENIED, error_matches_status from grpc_responses import OBJECT_ACCESS_DENIED, error_matches_status
from python_keywords.neofs_verbs import (delete_object, get_object, get_range, get_range_hash, head_object, put_object, from python_keywords.neofs_verbs import (
search_object) delete_object,
get_object,
get_range,
get_range_hash,
head_object,
put_object,
search_object,
)
from python_keywords.utility_keywords import get_file_hash from python_keywords.utility_keywords import get_file_hash
OPERATION_ERROR_TYPE = RuntimeError OPERATION_ERROR_TYPE = RuntimeError
def can_get_object(wallet: str, cid: str, oid: str, file_name: str, bearer: Optional[str] = None, def can_get_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None wallet: str,
) -> bool: cid: str,
with allure.step('Try get object from container'): oid: str,
file_name: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> bool:
with allure.step("Try get object from container"):
try: try:
got_file_path = get_object(wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr) got_file_path = get_object(
wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr
)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
assert get_file_hash(file_name) == get_file_hash(got_file_path) assert get_file_hash(file_name) == get_file_hash(got_file_path)
return True return True
def can_put_object(wallet: str, cid: str, file_name: str, bearer: Optional[str] = None, def can_put_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None, attributes: Optional[dict] = None, wallet: str,
) -> bool: cid: str,
with allure.step('Try put object to container'): file_name: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
attributes: Optional[dict] = None,
) -> bool:
with allure.step("Try put object to container"):
try: try:
put_object(wallet, file_name, cid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr, put_object(
attributes=attributes) wallet,
file_name,
cid,
bearer=bearer,
wallet_config=wallet_config,
xhdr=xhdr,
attributes=attributes,
)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
return True return True
def can_delete_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, def can_delete_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None wallet: str,
) -> bool: cid: str,
with allure.step('Try delete object from container'): oid: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> bool:
with allure.step("Try delete object from container"):
try: try:
delete_object(wallet, cid, oid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr) delete_object(wallet, cid, oid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
return True return True
def can_get_head_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, def can_get_head_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None wallet: str,
) -> bool: cid: str,
with allure.step('Try get head of object'): oid: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> bool:
with allure.step("Try get head of object"):
try: try:
head_object(wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr) head_object(
wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr
)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
return True return True
def can_get_range_of_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, def can_get_range_of_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None wallet: str,
) -> bool: cid: str,
with allure.step('Try get range of object'): oid: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> bool:
with allure.step("Try get range of object"):
try: try:
get_range(wallet, cid, oid, bearer=bearer, range_cut='0:10', wallet_config=wallet_config, get_range(
xhdr=xhdr) wallet,
cid,
oid,
bearer=bearer,
range_cut="0:10",
wallet_config=wallet_config,
xhdr=xhdr,
)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
return True return True
def can_get_range_hash_of_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, def can_get_range_hash_of_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None wallet: str,
) -> bool: cid: str,
with allure.step('Try get range hash of object'): oid: str,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> bool:
with allure.step("Try get range hash of object"):
try: try:
get_range_hash(wallet, cid, oid, bearer_token=bearer, range_cut='0:10', wallet_config=wallet_config, get_range_hash(
xhdr=xhdr) wallet,
cid,
oid,
bearer_token=bearer,
range_cut="0:10",
wallet_config=wallet_config,
xhdr=xhdr,
)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
return True return True
def can_search_object(wallet: str, cid: str, oid: Optional[str] = None, bearer: Optional[str] = None, def can_search_object(
wallet_config: Optional[str] = None, xhdr: Optional[dict] = None wallet: str,
) -> bool: cid: str,
with allure.step('Try search object in container'): oid: Optional[str] = None,
bearer: Optional[str] = None,
wallet_config: Optional[str] = None,
xhdr: Optional[dict] = None,
) -> bool:
with allure.step("Try search object in container"):
try: try:
oids = search_object(wallet, cid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr) oids = search_object(wallet, cid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr)
except OPERATION_ERROR_TYPE as err: except OPERATION_ERROR_TYPE as err:
assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' assert error_matches_status(
err, OBJECT_ACCESS_DENIED
), f"Expected {err} to match {OBJECT_ACCESS_DENIED}"
return False return False
if oid: if oid:
return oid in oids return oid in oids

View file

@ -6,22 +6,16 @@
""" """
import logging import logging
import allure
from cli_helpers import _cmd_run from cli_helpers import _cmd_run
from common import ( from common import COMPLEX_OBJ_SIZE, NEOFS_CLI_EXEC, NEOFS_ENDPOINT, SIMPLE_OBJ_SIZE, WALLET_CONFIG
COMPLEX_OBJ_SIZE,
NEOFS_CLI_EXEC,
NEOFS_ENDPOINT,
SIMPLE_OBJ_SIZE,
WALLET_CONFIG,
)
from complex_object_actions import get_link_object from complex_object_actions import get_link_object
from neofs_verbs import head_object from neofs_verbs import head_object
import allure
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")
@allure.step('Put Storagegroup')
@allure.step("Put Storagegroup")
def put_storagegroup( def put_storagegroup(
wallet: str, wallet: str,
cid: str, cid: str,
@ -55,7 +49,7 @@ def put_storagegroup(
return oid return oid
@allure.step('List Storagegroup') @allure.step("List Storagegroup")
def list_storagegroup( def list_storagegroup(
wallet: str, cid: str, bearer_token: str = "", wallet_config: str = WALLET_CONFIG wallet: str, cid: str, bearer_token: str = "", wallet_config: str = WALLET_CONFIG
): ):
@ -82,7 +76,7 @@ def list_storagegroup(
return found_objects return found_objects
@allure.step('Get Storagegroup') @allure.step("Get Storagegroup")
def get_storagegroup( def get_storagegroup(
wallet: str, wallet: str,
cid: str, cid: str,
@ -128,7 +122,7 @@ def get_storagegroup(
return sg_dict return sg_dict
@allure.step('Delete Storagegroup') @allure.step("Delete Storagegroup")
def delete_storagegroup( def delete_storagegroup(
wallet: str, wallet: str,
cid: str, cid: str,
@ -159,7 +153,7 @@ def delete_storagegroup(
return tombstone_id return tombstone_id
@allure.step('Verify list operation over Storagegroup') @allure.step("Verify list operation over Storagegroup")
def verify_list_storage_group( def verify_list_storage_group(
wallet: str, wallet: str,
cid: str, cid: str,
@ -173,7 +167,7 @@ def verify_list_storage_group(
assert storagegroup in storage_groups assert storagegroup in storage_groups
@allure.step('Verify get operation over Storagegroup') @allure.step("Verify get operation over Storagegroup")
def verify_get_storage_group( def verify_get_storage_group(
wallet: str, wallet: str,
cid: str, cid: str,

View file

@ -9,9 +9,9 @@ CONTAINER_WAIT_INTERVAL = "1m"
SIMPLE_OBJ_SIZE = int(os.getenv("SIMPLE_OBJ_SIZE", "1000")) SIMPLE_OBJ_SIZE = int(os.getenv("SIMPLE_OBJ_SIZE", "1000"))
COMPLEX_OBJ_SIZE = int(os.getenv("COMPLEX_OBJ_SIZE", "2000")) COMPLEX_OBJ_SIZE = int(os.getenv("COMPLEX_OBJ_SIZE", "2000"))
MAINNET_BLOCK_TIME = os.getenv('MAINNET_BLOCK_TIME', "1s") MAINNET_BLOCK_TIME = os.getenv("MAINNET_BLOCK_TIME", "1s")
MAINNET_TIMEOUT = os.getenv('MAINNET_TIMEOUT', "1min") MAINNET_TIMEOUT = os.getenv("MAINNET_TIMEOUT", "1min")
MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", '1s') MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", "1s")
NEOFS_CONTRACT_CACHE_TIMEOUT = os.getenv("NEOFS_CONTRACT_CACHE_TIMEOUT", "30s") NEOFS_CONTRACT_CACHE_TIMEOUT = os.getenv("NEOFS_CONTRACT_CACHE_TIMEOUT", "30s")
# Time interval that allows a GC pass on storage node (this includes GC sleep interval # Time interval that allows a GC pass on storage node (this includes GC sleep interval
@ -20,11 +20,11 @@ STORAGE_GC_TIME = os.getenv("STORAGE_GC_TIME", "75s")
NEOFS_ENDPOINT = os.getenv("NEOFS_ENDPOINT", "s01.neofs.devenv:8080") NEOFS_ENDPOINT = os.getenv("NEOFS_ENDPOINT", "s01.neofs.devenv:8080")
NEO_MAINNET_ENDPOINT = os.getenv("NEO_MAINNET_ENDPOINT", 'http://main-chain.neofs.devenv:30333') NEO_MAINNET_ENDPOINT = os.getenv("NEO_MAINNET_ENDPOINT", "http://main-chain.neofs.devenv:30333")
MORPH_ENDPOINT = os.getenv("MORPH_ENDPOINT", 'http://morph-chain.neofs.devenv:30333') MORPH_ENDPOINT = os.getenv("MORPH_ENDPOINT", "http://morph-chain.neofs.devenv:30333")
HTTP_GATE = os.getenv("HTTP_GATE", 'http://http.neofs.devenv') HTTP_GATE = os.getenv("HTTP_GATE", "http://http.neofs.devenv")
S3_GATE = os.getenv("S3_GATE", 'https://s3.neofs.devenv:8080') S3_GATE = os.getenv("S3_GATE", "https://s3.neofs.devenv:8080")
GAS_HASH = '0xd2a4cff31913016155e38e474a2c06d08be276cf' GAS_HASH = "0xd2a4cff31913016155e38e474a2c06d08be276cf"
NEOFS_CONTRACT = os.getenv("NEOFS_IR_CONTRACTS_NEOFS") NEOFS_CONTRACT = os.getenv("NEOFS_IR_CONTRACTS_NEOFS")
@ -43,53 +43,67 @@ STORAGE_CONTROL_ENDPOINT_1 = os.getenv("STORAGE_CONTROL_ENDPOINT_1", "s01.neofs.
STORAGE_CONTROL_ENDPOINT_2 = os.getenv("STORAGE_CONTROL_ENDPOINT_2", "s02.neofs.devenv:8081") STORAGE_CONTROL_ENDPOINT_2 = os.getenv("STORAGE_CONTROL_ENDPOINT_2", "s02.neofs.devenv:8081")
STORAGE_CONTROL_ENDPOINT_3 = os.getenv("STORAGE_CONTROL_ENDPOINT_3", "s03.neofs.devenv:8081") STORAGE_CONTROL_ENDPOINT_3 = os.getenv("STORAGE_CONTROL_ENDPOINT_3", "s03.neofs.devenv:8081")
STORAGE_CONTROL_ENDPOINT_4 = os.getenv("STORAGE_CONTROL_ENDPOINT_4", "s04.neofs.devenv:8081") STORAGE_CONTROL_ENDPOINT_4 = os.getenv("STORAGE_CONTROL_ENDPOINT_4", "s04.neofs.devenv:8081")
STORAGE_CONTROL_ENDPOINT_PRIVATE = os.getenv("STORAGE_CONTROL_ENDPOINT_PRIVATE", "false").lower() == "true" STORAGE_CONTROL_ENDPOINT_PRIVATE = (
os.getenv("STORAGE_CONTROL_ENDPOINT_PRIVATE", "false").lower() == "true"
)
STORAGE_WALLET_PATH_1 = os.getenv("STORAGE_WALLET_PATH_1", f"{DEVENV_PATH}/services/storage/wallet01.json") STORAGE_WALLET_PATH_1 = os.getenv(
STORAGE_WALLET_PATH_2 = os.getenv("STORAGE_WALLET_PATH_2", f"{DEVENV_PATH}/services/storage/wallet02.json") "STORAGE_WALLET_PATH_1", f"{DEVENV_PATH}/services/storage/wallet01.json"
STORAGE_WALLET_PATH_3 = os.getenv("STORAGE_WALLET_PATH_3", f"{DEVENV_PATH}/services/storage/wallet03.json") )
STORAGE_WALLET_PATH_4 = os.getenv("STORAGE_WALLET_PATH_4", f"{DEVENV_PATH}/services/storage/wallet04.json") STORAGE_WALLET_PATH_2 = os.getenv(
"STORAGE_WALLET_PATH_2", f"{DEVENV_PATH}/services/storage/wallet02.json"
)
STORAGE_WALLET_PATH_3 = os.getenv(
"STORAGE_WALLET_PATH_3", f"{DEVENV_PATH}/services/storage/wallet03.json"
)
STORAGE_WALLET_PATH_4 = os.getenv(
"STORAGE_WALLET_PATH_4", f"{DEVENV_PATH}/services/storage/wallet04.json"
)
STORAGE_WALLET_PATH = STORAGE_WALLET_PATH_1 STORAGE_WALLET_PATH = STORAGE_WALLET_PATH_1
STORAGE_WALLET_PASS = os.getenv("STORAGE_WALLET_PASS", "") STORAGE_WALLET_PASS = os.getenv("STORAGE_WALLET_PASS", "")
STORAGE_WALLET_CONFIG = f"{CLI_CONFIGS_PATH}/empty_passwd.yml" STORAGE_WALLET_CONFIG = f"{CLI_CONFIGS_PATH}/empty_passwd.yml"
NEOFS_NETMAP_DICT = { NEOFS_NETMAP_DICT = {
's01': { "s01": {
'rpc': STORAGE_RPC_ENDPOINT_1, "rpc": STORAGE_RPC_ENDPOINT_1,
'control': STORAGE_CONTROL_ENDPOINT_1, "control": STORAGE_CONTROL_ENDPOINT_1,
'wallet_path': STORAGE_WALLET_PATH_1, "wallet_path": STORAGE_WALLET_PATH_1,
'UN-LOCODE': 'RU MOW' "UN-LOCODE": "RU MOW",
}, },
's02': { "s02": {
'rpc': STORAGE_RPC_ENDPOINT_2, "rpc": STORAGE_RPC_ENDPOINT_2,
'control': STORAGE_CONTROL_ENDPOINT_2, "control": STORAGE_CONTROL_ENDPOINT_2,
'wallet_path': STORAGE_WALLET_PATH_2, "wallet_path": STORAGE_WALLET_PATH_2,
'UN-LOCODE': 'RU LED' "UN-LOCODE": "RU LED",
}, },
's03': { "s03": {
'rpc': STORAGE_RPC_ENDPOINT_3, "rpc": STORAGE_RPC_ENDPOINT_3,
'control': STORAGE_CONTROL_ENDPOINT_3, "control": STORAGE_CONTROL_ENDPOINT_3,
'wallet_path': STORAGE_WALLET_PATH_3, "wallet_path": STORAGE_WALLET_PATH_3,
'UN-LOCODE': 'SE STO' "UN-LOCODE": "SE STO",
}, },
's04': { "s04": {
'rpc': STORAGE_RPC_ENDPOINT_4, "rpc": STORAGE_RPC_ENDPOINT_4,
'control': STORAGE_CONTROL_ENDPOINT_4, "control": STORAGE_CONTROL_ENDPOINT_4,
'wallet_path': STORAGE_WALLET_PATH_4, "wallet_path": STORAGE_WALLET_PATH_4,
'UN-LOCODE': 'FI HEL' "UN-LOCODE": "FI HEL",
}, },
} }
NEOFS_NETMAP = [i['rpc'] for i in NEOFS_NETMAP_DICT.values()] NEOFS_NETMAP = [i["rpc"] for i in NEOFS_NETMAP_DICT.values()]
NEOGO_EXECUTABLE = os.getenv('NEOGO_EXECUTABLE', 'neo-go') NEOGO_EXECUTABLE = os.getenv("NEOGO_EXECUTABLE", "neo-go")
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli') NEOFS_CLI_EXEC = os.getenv("NEOFS_CLI_EXEC", "neofs-cli")
# Config of wallet with empty password. We use it for: # Config of wallet with empty password. We use it for:
# - for wallet of user on behalf of whom we are running tests # - for wallet of user on behalf of whom we are running tests
# - for wallets of storage nodes # - for wallets of storage nodes
WALLET_CONFIG = f"{CLI_CONFIGS_PATH}/empty_passwd.yml" WALLET_CONFIG = f"{CLI_CONFIGS_PATH}/empty_passwd.yml"
MAINNET_WALLET_PATH = os.getenv("MAINNET_WALLET_PATH", f"{DEVENV_PATH}/services/chain/node-wallet.json") MAINNET_WALLET_PATH = os.getenv(
MAINNET_WALLET_CONFIG = os.getenv("MAINNET_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml") "MAINNET_WALLET_PATH", f"{DEVENV_PATH}/services/chain/node-wallet.json"
)
MAINNET_WALLET_CONFIG = os.getenv(
"MAINNET_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml"
)
MAINNET_SINGLE_ADDR = os.getenv("MAINNET_SINGLE_ADDR", "NfgHwwTi3wHAS8aFAN243C5vGbkYDpqLHP") MAINNET_SINGLE_ADDR = os.getenv("MAINNET_SINGLE_ADDR", "NfgHwwTi3wHAS8aFAN243C5vGbkYDpqLHP")
MAINNET_WALLET_PASS = os.getenv("MAINNET_WALLET_PASS", "one") MAINNET_WALLET_PASS = os.getenv("MAINNET_WALLET_PASS", "one")
@ -97,7 +111,9 @@ IR_WALLET_PATH = os.getenv("IR_WALLET_PATH", f"{DEVENV_PATH}/services/ir/az.json
IR_WALLET_CONFIG = os.getenv("IR_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml") IR_WALLET_CONFIG = os.getenv("IR_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml")
IR_WALLET_PASS = os.getenv("IR_WALLET_PASS", "one") IR_WALLET_PASS = os.getenv("IR_WALLET_PASS", "one")
S3_GATE_WALLET_PATH = os.getenv("S3_GATE_WALLET_PATH", f"{DEVENV_PATH}/services/s3_gate/wallet.json") S3_GATE_WALLET_PATH = os.getenv(
"S3_GATE_WALLET_PATH", f"{DEVENV_PATH}/services/s3_gate/wallet.json"
)
S3_GATE_WALLET_PASS = os.getenv("S3_GATE_WALLET_PASS", "s3") S3_GATE_WALLET_PASS = os.getenv("S3_GATE_WALLET_PASS", "s3")
# Parameters that control SSH connection to storage node # Parameters that control SSH connection to storage node
@ -109,7 +125,7 @@ STORAGE_NODE_SSH_PRIVATE_KEY_PATH = os.getenv("STORAGE_NODE_SSH_PRIVATE_KEY_PATH
STORAGE_NODE_BIN_PATH = os.getenv("STORAGE_NODE_BIN_PATH", f"{DEVENV_PATH}/vendor") STORAGE_NODE_BIN_PATH = os.getenv("STORAGE_NODE_BIN_PATH", f"{DEVENV_PATH}/vendor")
# Path and config for neofs-adm utility. Optional if tests are running against devenv # Path and config for neofs-adm utility. Optional if tests are running against devenv
NEOFS_ADM_EXEC = os.getenv('NEOFS_ADM_EXEC', 'neofs-adm') NEOFS_ADM_EXEC = os.getenv("NEOFS_ADM_EXEC", "neofs-adm")
NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH") NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH")

View file

@ -1,21 +1,23 @@
EACL_OBJ_FILTERS = {'$Object:objectID': 'objectID', EACL_OBJ_FILTERS = {
'$Object:containerID': 'containerID', "$Object:objectID": "objectID",
'$Object:ownerID': 'ownerID', "$Object:containerID": "containerID",
'$Object:creationEpoch': 'creationEpoch', "$Object:ownerID": "ownerID",
'$Object:payloadLength': 'payloadLength', "$Object:creationEpoch": "creationEpoch",
'$Object:payloadHash': 'payloadHash', "$Object:payloadLength": "payloadLength",
'$Object:objectType': 'objectType', "$Object:payloadHash": "payloadHash",
'$Object:homomorphicHash': 'homomorphicHash', "$Object:objectType": "objectType",
'$Object:version': 'version'} "$Object:homomorphicHash": "homomorphicHash",
"$Object:version": "version",
}
VERB_FILTER_DEP = { VERB_FILTER_DEP = {
'$Object:objectID': ['GET', 'HEAD', 'DELETE', 'RANGE', 'RANGEHASH'], "$Object:objectID": ["GET", "HEAD", "DELETE", "RANGE", "RANGEHASH"],
'$Object:containerID': ['GET', 'PUT', 'HEAD', 'DELETE', 'SEARCH', 'RANGE', 'RANGEHASH'], "$Object:containerID": ["GET", "PUT", "HEAD", "DELETE", "SEARCH", "RANGE", "RANGEHASH"],
'$Object:ownerID': ['GET', 'HEAD'], "$Object:ownerID": ["GET", "HEAD"],
'$Object:creationEpoch': ['GET', 'PUT', 'HEAD'], "$Object:creationEpoch": ["GET", "PUT", "HEAD"],
'$Object:payloadLength': ['GET', 'PUT', 'HEAD'], "$Object:payloadLength": ["GET", "PUT", "HEAD"],
'$Object:payloadHash': ['GET', 'PUT', 'HEAD'], "$Object:payloadHash": ["GET", "PUT", "HEAD"],
'$Object:objectType': ['GET', 'PUT', 'HEAD'], "$Object:objectType": ["GET", "PUT", "HEAD"],
'$Object:homomorphicHash': ['GET', 'PUT', 'HEAD'], "$Object:homomorphicHash": ["GET", "PUT", "HEAD"],
'$Object:version': ['GET', 'PUT', 'HEAD'] "$Object:version": ["GET", "PUT", "HEAD"],
} }