From 147cac0ebc863638392f97c7c2c15d74cff4da98 Mon Sep 17 00:00:00 2001 From: Vladimir Domnich Date: Wed, 28 Sep 2022 16:07:16 +0400 Subject: [PATCH] [#314] Format all files with black and isort Signed-off-by: Vladimir Domnich --- pyproject.toml | 2 +- pytest_tests/helpers/env_properties.py | 26 +- pytest_tests/helpers/grpc_responses.py | 1 - pytest_tests/helpers/iptables_helper.py | 5 +- pytest_tests/helpers/k6.py | 19 +- pytest_tests/helpers/remote_process.py | 9 +- pytest_tests/helpers/s3_helper.py | 2 +- pytest_tests/helpers/sbercloud_helper.py | 100 ++--- pytest_tests/helpers/service_helper.py | 115 +++--- pytest_tests/helpers/ssh_helper.py | 125 ++++--- pytest_tests/steps/s3_gate_base.py | 29 +- pytest_tests/testsuites/acl/conftest.py | 25 +- .../acl/storage_group/test_storagegroup.py | 43 +-- pytest_tests/testsuites/acl/test_acl.py | 33 +- pytest_tests/testsuites/acl/test_bearer.py | 42 +-- pytest_tests/testsuites/acl/test_eacl.py | 5 +- .../testsuites/acl/test_eacl_filters.py | 78 +--- .../testsuites/container/test_container.py | 58 +-- .../failovers/test_failover_network.py | 64 ++-- .../failovers/test_failover_storage.py | 83 +++-- .../testsuites/object/test_object_api.py | 103 ++++-- .../services/s3_gate/test_s3_object.py | 5 +- .../testsuites/services/test_binaries.py | 32 +- .../test_object_session_token.py | 3 +- robot/resources/lib/python_keywords/acl.py | 159 ++++---- .../lib/python_keywords/cli_utils/adm/adm.py | 25 +- .../cli_utils/adm/completion.py | 2 +- .../cli_utils/adm/completion_type.py | 8 +- .../python_keywords/cli_utils/adm/config.py | 6 +- .../python_keywords/cli_utils/adm/gendoc.py | 12 +- .../python_keywords/cli_utils/adm/morph.py | 218 ++++++----- .../cli_utils/adm/storage_config.py | 4 +- .../python_keywords/cli_utils/adm/subnet.py | 125 ++++--- .../python_keywords/cli_utils/adm/version.py | 2 +- .../cli_utils/cli/accounting.py | 6 +- .../lib/python_keywords/cli_utils/cli/acl.py | 8 +- .../cli_utils/cli/container.py | 162 +++++--- .../python_keywords/cli_utils/cli/object.py | 350 +++++++++++------- .../python_keywords/cli_utils/cli/version.py | 6 +- .../python_keywords/cli_utils/cli_command.py | 23 +- .../lib/python_keywords/container_access.py | 119 ++++-- .../lib/python_keywords/failover_utils.py | 30 +- .../lib/python_keywords/object_access.py | 174 ++++++--- .../lib/python_keywords/storage_group.py | 24 +- robot/variables/common.py | 96 +++-- robot/variables/eacl_object_filters.py | 40 +- 46 files changed, 1506 insertions(+), 1100 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c8f7731d..13ca64c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.isort] profile = "black" -src_paths = ["pytest_tests", "robot"] +src_paths = ["neofs-keywords", "pytest_tests", "robot"] line_length = 100 [tool.black] diff --git a/pytest_tests/helpers/env_properties.py b/pytest_tests/helpers/env_properties.py index a961546b..53e802a4 100644 --- a/pytest_tests/helpers/env_properties.py +++ b/pytest_tests/helpers/env_properties.py @@ -4,37 +4,37 @@ import re import allure from pytest import Config -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") -@allure.step('Read environment.properties') +@allure.step("Read environment.properties") def read_env_properties(config: Config) -> dict: - environment_dir = config.getoption('--alluredir') + environment_dir = config.getoption("--alluredir") if not environment_dir: return None - file_path = f'{environment_dir}/environment.properties' - with open(file_path, 'r') as file: + file_path = f"{environment_dir}/environment.properties" + with open(file_path, "r") as file: raw_content = file.read() env_properties = {} - for line in raw_content.split('\n'): - m = re.match('(.*?)=(.*)', line) + for line in raw_content.split("\n"): + m = re.match("(.*?)=(.*)", line) if not m: - logger.warning(f'Could not parse env property from {line}') + logger.warning(f"Could not parse env property from {line}") continue key, value = m.group(1), m.group(2) env_properties[key] = value return env_properties -@allure.step('Update data in environment.properties') +@allure.step("Update data in environment.properties") def save_env_properties(config: Config, env_data: dict) -> None: - environment_dir = config.getoption('--alluredir') + environment_dir = config.getoption("--alluredir") if not environment_dir: return None - file_path = f'{environment_dir}/environment.properties' - with open(file_path, 'a+') as env_file: + file_path = f"{environment_dir}/environment.properties" + with open(file_path, "a+") as env_file: for env, env_value in env_data.items(): - env_file.write(f'{env}={env_value}\n') + env_file.write(f"{env}={env_value}\n") diff --git a/pytest_tests/helpers/grpc_responses.py b/pytest_tests/helpers/grpc_responses.py index 9979b822..47a96f54 100644 --- a/pytest_tests/helpers/grpc_responses.py +++ b/pytest_tests/helpers/grpc_responses.py @@ -1,6 +1,5 @@ import re - # Regex patterns of status codes of Container service (https://github.com/nspcc-dev/neofs-spec/blob/98b154848116223e486ce8b43eaa35fec08b4a99/20-api-v2/container.md) CONTAINER_NOT_FOUND = "code = 3072.*message = container not found" diff --git a/pytest_tests/helpers/iptables_helper.py b/pytest_tests/helpers/iptables_helper.py index 551a6366..dc0a2579 100644 --- a/pytest_tests/helpers/iptables_helper.py +++ b/pytest_tests/helpers/iptables_helper.py @@ -2,15 +2,14 @@ from ssh_helper import HostClient class IpTablesHelper: - @staticmethod def drop_input_traffic_to_port(client: HostClient, ports: list[str]): for port in ports: - cmd_output = client.exec(cmd=f'sudo iptables -A INPUT -p tcp --dport {port} -j DROP') + cmd_output = client.exec(cmd=f"sudo iptables -A INPUT -p tcp --dport {port} -j DROP") assert cmd_output.rc == 0 @staticmethod def restore_input_traffic_to_port(client: HostClient, ports: list[str]): for port in ports: - cmd_output = client.exec(cmd=f'sudo iptables -D INPUT -p tcp --dport {port} -j DROP') + cmd_output = client.exec(cmd=f"sudo iptables -D INPUT -p tcp --dport {port} -j DROP") assert cmd_output.rc == 0 diff --git a/pytest_tests/helpers/k6.py b/pytest_tests/helpers/k6.py index 4afa9dd4..5f5a9f28 100644 --- a/pytest_tests/helpers/k6.py +++ b/pytest_tests/helpers/k6.py @@ -1,9 +1,11 @@ -from dataclasses import dataclass -import allure from contextlib import contextmanager +from dataclasses import dataclass from time import sleep -from pytest_tests.helpers.ssh_helper import HostClient + +import allure + from pytest_tests.helpers.remote_process import RemoteProcess +from pytest_tests.helpers.ssh_helper import HostClient EXIT_RESULT_CODE = 0 @@ -22,11 +24,7 @@ class LoadParams: class K6: - def __init__( - self, - load_params: LoadParams, - host_client: HostClient - ): + def __init__(self, load_params: LoadParams, host_client: HostClient): self.load_params = load_params self.host_client = host_client @@ -51,10 +49,7 @@ class K6: @allure.step("Prepare containers and objects") def prepare(self) -> str: self._k6_dir = self.k6_dir - if ( - self.load_params.load_type == "http" - or self.load_params.load_type == "grpc" - ): + if self.load_params.load_type == "http" or self.load_params.load_type == "grpc": command = ( f"{self.k6_dir}/scenarios/preset/preset_grpc.py " f"--size {self.load_params.obj_size} " diff --git a/pytest_tests/helpers/remote_process.py b/pytest_tests/helpers/remote_process.py index 3e6e6245..9caf8d01 100644 --- a/pytest_tests/helpers/remote_process.py +++ b/pytest_tests/helpers/remote_process.py @@ -1,8 +1,11 @@ from __future__ import annotations + import uuid +from typing import Optional + import allure from tenacity import retry, stop_after_attempt, wait_fixed -from typing import Optional + from pytest_tests.helpers.ssh_helper import HostClient @@ -179,8 +182,6 @@ class RemoteProcess: f"echo $? > {self.process_dir}/rc" ) - self.host_client.exec( - f'echo "{script}" > {self.process_dir}/command.sh' - ) + self.host_client.exec(f'echo "{script}" > {self.process_dir}/command.sh') self.host_client.exec(f"cat {self.process_dir}/command.sh") self.host_client.exec(f"chmod +x {self.process_dir}/command.sh") diff --git a/pytest_tests/helpers/s3_helper.py b/pytest_tests/helpers/s3_helper.py index 95ad2073..0c9ece90 100644 --- a/pytest_tests/helpers/s3_helper.py +++ b/pytest_tests/helpers/s3_helper.py @@ -1,7 +1,7 @@ from typing import Optional import allure -import pytest + from steps import s3_gate_bucket, s3_gate_object diff --git a/pytest_tests/helpers/sbercloud_helper.py b/pytest_tests/helpers/sbercloud_helper.py index 2441a7ea..2775bc5f 100644 --- a/pytest_tests/helpers/sbercloud_helper.py +++ b/pytest_tests/helpers/sbercloud_helper.py @@ -20,17 +20,17 @@ class SberCloudConfig: project_id: Optional[str] = None @staticmethod - def from_dict(config_dict: dict) -> 'SberCloudConfig': + def from_dict(config_dict: dict) -> "SberCloudConfig": return SberCloudConfig(**config_dict) @staticmethod - def from_yaml(config_path: str) -> 'SberCloudConfig': + def from_yaml(config_path: str) -> "SberCloudConfig": with open(config_path) as file: config_dict = yaml.load(file, Loader=yaml.FullLoader) return SberCloudConfig.from_dict(config_dict["sbercloud"]) @staticmethod - def from_env() -> 'SberCloudConfig': + def from_env() -> "SberCloudConfig": config_dict = { "access_key_id": os.getenv("SBERCLOUD_ACCESS_KEY_ID"), "secret_key": os.getenv("SBERCLOUD_SECRET_KEY"), @@ -53,7 +53,9 @@ class SberCloudAuthRequests: ALGORITHM = "SDK-HMAC-SHA256" TIMESTAMP_FORMAT = "%Y%m%dT%H%M%SZ" - def __init__(self, endpoint: str, access_key_id: str, secret_key: str, base_path: str = "") -> None: + def __init__( + self, endpoint: str, access_key_id: str, secret_key: str, base_path: str = "" + ) -> None: self.endpoint = endpoint self.base_path = base_path self.access_key_id = access_key_id @@ -62,12 +64,14 @@ class SberCloudAuthRequests: def get(self, path: str, query: Optional[dict] = None) -> requests.Response: return self._send_request("GET", path, query, data=None) - def post(self, path: str, query: Optional[dict] = None, - data: Optional[dict] = None) -> requests.Response: + def post( + self, path: str, query: Optional[dict] = None, data: Optional[dict] = None + ) -> requests.Response: return self._send_request("POST", path, query, data) - def _send_request(self, method: str, path: str, query: Optional[dict], - data: Optional[dict]) -> requests.Response: + def _send_request( + self, method: str, path: str, query: Optional[dict], data: Optional[dict] + ) -> requests.Response: if self.base_path: path = self.base_path + path @@ -82,8 +86,9 @@ class SberCloudAuthRequests: body = content.encode(self.ENCODING) signed_headers = self._build_signed_headers(headers) - canonical_request = self._build_canonical_request(method, path, query, body, headers, - signed_headers) + canonical_request = self._build_canonical_request( + method, path, query, body, headers, signed_headers + ) signature = self._build_signature(timestamp, canonical_request) headers["Authorization"] = self._build_authorization_header(signature, signed_headers) @@ -92,8 +97,10 @@ class SberCloudAuthRequests: response = requests.request(method, url, headers=headers, data=body) if response.status_code < 200 or response.status_code >= 300: - raise AssertionError(f"Request to url={url} failed: status={response.status_code} " - f"response={response.text})") + raise AssertionError( + f"Request to url={url} failed: status={response.status_code} " + f"response={response.text})" + ) return response def _build_original_headers(self, timestamp: str) -> dict[str, str]: @@ -105,21 +112,30 @@ class SberCloudAuthRequests: def _build_signed_headers(self, headers: dict[str, str]) -> list[str]: return sorted(header_name.lower() for header_name in headers) - def _build_canonical_request(self, method: str, path: str, query: Optional[dict], body: bytes, - headers: dict[str, str], signed_headers: list[str]) -> str: + def _build_canonical_request( + self, + method: str, + path: str, + query: Optional[dict], + body: bytes, + headers: dict[str, str], + signed_headers: list[str], + ) -> str: canonical_headers = self._build_canonical_headers(headers, signed_headers) body_hash = self._calc_sha256_hash(body) canonical_url = self._build_canonical_url(path) canonical_query_string = self._build_canonical_query_string(query) - return "\n".join([ - method.upper(), - canonical_url, - canonical_query_string, - canonical_headers, - ";".join(signed_headers), - body_hash - ]) + return "\n".join( + [ + method.upper(), + canonical_url, + canonical_query_string, + canonical_headers, + ";".join(signed_headers), + body_hash, + ] + ) def _build_canonical_headers(self, headers: dict[str, str], signed_headers: list[str]) -> str: normalized_headers = {} @@ -166,7 +182,7 @@ class SberCloudAuthRequests: hmac_digest = hmac.new( key=self.secret_key.encode(self.ENCODING), msg=string_to_sign.encode(self.ENCODING), - digestmod=hashlib.sha256 + digestmod=hashlib.sha256, ).digest() signature = binascii.hexlify(hmac_digest).decode() @@ -185,6 +201,7 @@ class SberCloud: https://docs.sbercloud.ru/terraform/ug/topics/quickstart.html https://support.hc.sbercloud.ru/en-us/api/ecs/en-us_topic_0020212668.html """ + def __init__(self, config: SberCloudConfig) -> None: self.ecs_requests = SberCloudAuthRequests( endpoint=config.ecs_endpoint, @@ -198,42 +215,29 @@ class SberCloud: if not self.ecs_nodes or no_cache: self.ecs_nodes = self.get_ecs_nodes() nodes_by_ip = [ - node for node in self.ecs_nodes - if ip in [ - node_ip['addr'] - for node_ips in node['addresses'].values() - for node_ip in node_ips - ] + node + for node in self.ecs_nodes + if ip + in [node_ip["addr"] for node_ips in node["addresses"].values() for node_ip in node_ips] ] assert len(nodes_by_ip) == 1 - return nodes_by_ip[0]['id'] + return nodes_by_ip[0]["id"] def get_ecs_nodes(self) -> list[dict]: response = self.ecs_requests.get("/detail", {"limit": "1000"}).json() return response["servers"] def start_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None) -> None: - data = { - 'os-start': { - 'servers': [ - { - 'id': node_id or self.find_ecs_node_by_ip(node_ip) - } - ] - } - } + data = {"os-start": {"servers": [{"id": node_id or self.find_ecs_node_by_ip(node_ip)}]}} self.ecs_requests.post("/action", data=data) - def stop_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None, - hard: bool = False) -> None: + def stop_node( + self, node_id: Optional[str] = None, node_ip: Optional[str] = None, hard: bool = False + ) -> None: data = { - 'os-stop': { - 'type': 'HARD' if hard else 'SOFT', - 'servers': [ - { - 'id': node_id or self.find_ecs_node_by_ip(node_ip) - } - ] + "os-stop": { + "type": "HARD" if hard else "SOFT", + "servers": [{"id": node_id or self.find_ecs_node_by_ip(node_ip)}], } } self.ecs_requests.post("/action", data=data) diff --git a/pytest_tests/helpers/service_helper.py b/pytest_tests/helpers/service_helper.py index 22e486c7..6df5a620 100644 --- a/pytest_tests/helpers/service_helper.py +++ b/pytest_tests/helpers/service_helper.py @@ -6,18 +6,23 @@ import time from contextlib import contextmanager from datetime import datetime from typing import Optional -from requests import HTTPError import docker - from cli_helpers import _cmd_run -from common import (INFRASTRUCTURE_TYPE, NEOFS_CLI_EXEC, NEOFS_NETMAP_DICT, STORAGE_NODE_BIN_PATH, - STORAGE_NODE_SSH_PASSWORD, STORAGE_NODE_SSH_PRIVATE_KEY_PATH, - STORAGE_NODE_SSH_USER, WALLET_CONFIG) +from common import ( + INFRASTRUCTURE_TYPE, + NEOFS_CLI_EXEC, + NEOFS_NETMAP_DICT, + STORAGE_NODE_BIN_PATH, + STORAGE_NODE_SSH_PASSWORD, + STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + STORAGE_NODE_SSH_USER, + WALLET_CONFIG, +) +from requests import HTTPError from ssh_helper import HostClient - -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") class LocalDevEnvStorageServiceHelper: @@ -59,8 +64,8 @@ class LocalDevEnvStorageServiceHelper: wallet_path = NEOFS_NETMAP_DICT[node_name]["wallet_path"] cmd = ( - f'{NEOFS_CLI_EXEC} {command} --endpoint {control_endpoint} ' - f'--wallet {wallet_path} --config {WALLET_CONFIG}' + f"{NEOFS_CLI_EXEC} {command} --endpoint {control_endpoint} " + f"--wallet {wallet_path} --config {WALLET_CONFIG}" ) output = _cmd_run(cmd) return output @@ -77,8 +82,9 @@ class LocalDevEnvStorageServiceHelper: def get_binaries_version(self) -> dict: return {} - def dump_logs(self, directory_path: str, since: Optional[datetime], - until: Optional[datetime]) -> None: + def dump_logs( + self, directory_path: str, since: Optional[datetime], until: Optional[datetime] + ) -> None: # All containers are running on the same host, so we can use 1st node to collect all logs first_node_name = next(iter(NEOFS_NETMAP_DICT)) client = self._get_docker_client(first_node_name) @@ -107,8 +113,9 @@ class LocalDevEnvStorageServiceHelper: return container return None - def _wait_for_container_to_be_in_state(self, node_name: str, container_name: str, - expected_state: str) -> None: + def _wait_for_container_to_be_in_state( + self, node_name: str, container_name: str, expected_state: str + ) -> None: for __attempt in range(10): container = self._get_container_by_name(node_name, container_name) logger.info(f"Container info:\n{json.dumps(container, indent=2)}") @@ -116,7 +123,7 @@ class LocalDevEnvStorageServiceHelper: return time.sleep(5) - raise AssertionError(f'Container {container_name} is not in {expected_state} state.') + raise AssertionError(f"Container {container_name} is not in {expected_state} state.") def _get_docker_client(self, node_name: str) -> docker.APIClient: # For local docker we use default docker client that talks to unix socket @@ -143,7 +150,9 @@ class CloudVmStorageServiceHelper: logger.info(f"Start command output: {output.stdout}") if wait: - self._wait_for_service_to_be_in_state(node_name, self.STORAGE_SERVICE, "active (running)") + self._wait_for_service_to_be_in_state( + node_name, self.STORAGE_SERVICE, "active (running)" + ) def run_control_command(self, node_name: str, command: str) -> str: control_endpoint = NEOFS_NETMAP_DICT[node_name]["control"] @@ -161,28 +170,31 @@ class CloudVmStorageServiceHelper: # Put config on storage node host remote_config_path = f"/tmp/{node_name}-config.yaml" remote_config = 'password: ""' - ssh_client.exec_with_confirmation(f"echo '{remote_config}' > {remote_config_path}", [""]) + ssh_client.exec_with_confirmation( + f"echo '{remote_config}' > {remote_config_path}", [""] + ) # Execute command cmd = ( - f'sudo {STORAGE_NODE_BIN_PATH}/neofs-cli {command} --endpoint {control_endpoint} ' - f'--wallet {remote_wallet_path} --config {remote_config_path}' + f"sudo {STORAGE_NODE_BIN_PATH}/neofs-cli {command} --endpoint {control_endpoint} " + f"--wallet {remote_wallet_path} --config {remote_config_path}" ) output = ssh_client.exec_with_confirmation(cmd, [""]) return output.stdout - def _wait_for_service_to_be_in_state(self, node_name: str, service_name: str, - expected_state: str) -> None: + def _wait_for_service_to_be_in_state( + self, node_name: str, service_name: str, expected_state: str + ) -> None: with _create_ssh_client(node_name) as ssh_client: for __attempt in range(10): # Run command to get service status (set --lines=0 to suppress logs output) # Also we don't verify return code, because for an inactive service return code will be 3 - command = f'sudo systemctl status {service_name} --lines=0' + command = f"sudo systemctl status {service_name} --lines=0" output = ssh_client.exec(command, verify=False) if expected_state in output.stdout: return time.sleep(3) - raise AssertionError(f'Service {service_name} is not in {expected_state} state') + raise AssertionError(f"Service {service_name} is not in {expected_state} state") def delete_node_data(self, node_name: str) -> None: with _create_ssh_client(node_name) as ssh_client: @@ -190,16 +202,16 @@ class CloudVmStorageServiceHelper: def get_binaries_version(self, binaries: list = None) -> dict: default_binaries = [ - 'neo-go', - 'neofs-adm', - 'neofs-cli', - 'neofs-http-gw', - 'neofs-ir', - 'neofs-lens', - 'neofs-node', - 'neofs-s3-authmate', - 'neofs-s3-gw', - 'neogo-morph-cn', + "neo-go", + "neofs-adm", + "neofs-cli", + "neofs-http-gw", + "neofs-ir", + "neofs-lens", + "neofs-node", + "neofs-s3-authmate", + "neofs-s3-gw", + "neogo-morph-cn", ] binaries = binaries or default_binaries @@ -208,31 +220,35 @@ class CloudVmStorageServiceHelper: with _create_ssh_client(node_name) as ssh_client: for binary in binaries: try: - out = ssh_client.exec(f'sudo {binary} --version').stdout + out = ssh_client.exec(f"sudo {binary} --version").stdout except AssertionError as err: - logger.error(f'Can not get version for {binary} because of\n{err}') - version_map[binary] = 'Can not get version' + logger.error(f"Can not get version for {binary} because of\n{err}") + version_map[binary] = "Can not get version" continue - version = re.search(r'version[:\s]*v?(.+)', out, re.IGNORECASE) - version = version.group(1).strip() if version else 'Unknown' + version = re.search(r"version[:\s]*v?(.+)", out, re.IGNORECASE) + version = version.group(1).strip() if version else "Unknown" if not version_map.get(binary): version_map[binary] = version else: - assert version_map[binary] == version, \ - f'Expected binary {binary} to have identical version on all nodes ' \ - f'(mismatch on node {node_name})' + assert version_map[binary] == version, ( + f"Expected binary {binary} to have identical version on all nodes " + f"(mismatch on node {node_name})" + ) return version_map - def dump_logs(self, directory_path: str, since: Optional[datetime], - until: Optional[datetime]) -> None: + def dump_logs( + self, directory_path: str, since: Optional[datetime], until: Optional[datetime] + ) -> None: for node_name, node_info in NEOFS_NETMAP_DICT.items(): with _create_ssh_client(node_name) as ssh_client: # We do not filter out logs of neofs services, because system logs might contain # information that is useful for troubleshooting - filters = " ".join([ - f"--since '{since:%Y-%m-%d %H:%M:%S}'" if since else "", - f"--until '{until:%Y-%m-%d %H:%M:%S}'" if until else "", - ]) + filters = " ".join( + [ + f"--since '{since:%Y-%m-%d %H:%M:%S}'" if since else "", + f"--until '{until:%Y-%m-%d %H:%M:%S}'" if until else "", + ] + ) result = ssh_client.exec(f"journalctl --no-pager {filters}") logs = result.stdout @@ -250,6 +266,7 @@ class RemoteDevEnvStorageServiceHelper(LocalDevEnvStorageServiceHelper): Most of operations are identical to local devenv, however, any interactions with host resources (files, etc.) require ssh into the remote host machine. """ + def _get_docker_client(self, node_name: str) -> docker.APIClient: # For remote devenv we use docker client that talks to tcp socket 2375: # https://docs.docker.com/engine/reference/commandline/dockerd/#daemon-socket-option @@ -299,12 +316,12 @@ def _create_ssh_client(node_name: str) -> HostClient: def _get_node_host(node_name: str) -> str: if node_name not in NEOFS_NETMAP_DICT: - raise AssertionError(f'Node {node_name} is not found!') + raise AssertionError(f"Node {node_name} is not found!") # We use rpc endpoint to determine host address, because control endpoint # (if it is private) will be a local address on the host machine node_config = NEOFS_NETMAP_DICT.get(node_name) - host = node_config.get('rpc').split(':')[0] + host = node_config.get("rpc").split(":")[0] return host @@ -313,7 +330,7 @@ def _get_storage_container_name(node_name: str) -> str: Converts name of storage node (as it is listed in netmap) into the name of docker container that runs instance of this storage node. """ - return node_name.split('.')[0] + return node_name.split(".")[0] def _get_storage_volume_name(node_name: str) -> str: diff --git a/pytest_tests/helpers/ssh_helper.py b/pytest_tests/helpers/ssh_helper.py index 6a99bf04..5eea5eb3 100644 --- a/pytest_tests/helpers/ssh_helper.py +++ b/pytest_tests/helpers/ssh_helper.py @@ -10,7 +10,7 @@ from time import sleep from typing import ClassVar, Optional import allure -from paramiko import AutoAddPolicy, SFTPClient, SSHClient, SSHException, ssh_exception, RSAKey +from paramiko import AutoAddPolicy, RSAKey, SFTPClient, SSHClient, SSHException, ssh_exception from paramiko.ssh_exception import AuthenticationException @@ -20,34 +20,37 @@ class HostIsNotAvailable(Exception): def __init__(self, ip: str = None, exc: Exception = None): msg = f'Host is not available{f" by ip: {ip}" if ip else ""}' if exc: - msg = f'{msg}. {exc}' + msg = f"{msg}. {exc}" super().__init__(msg) def log_command(func): @wraps(func) - def wrapper(host: 'HostClient', command: str, *args, **kwargs): + def wrapper(host: "HostClient", command: str, *args, **kwargs): display_length = 60 short = command.removeprefix("$ProgressPreference='SilentlyContinue'\n") short = short[:display_length] - short += '...' if short != command else '' - with allure.step(f'SSH: {short}'): + short += "..." if short != command else "" + with allure.step(f"SSH: {short}"): logging.info(f'Execute command "{command}" on "{host.ip}"') start_time = datetime.utcnow() cmd_result = func(host, command, *args, **kwargs) end_time = datetime.utcnow() - log_message = f'HOST: {host.ip}\n' \ - f'COMMAND:\n{textwrap.indent(command, " ")}\n' \ - f'RC:\n {cmd_result.rc}\n' \ - f'STDOUT:\n{textwrap.indent(cmd_result.stdout, " ")}\n' \ - f'STDERR:\n{textwrap.indent(cmd_result.stderr, " ")}\n' \ - f'Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}' + log_message = ( + f"HOST: {host.ip}\n" + f'COMMAND:\n{textwrap.indent(command, " ")}\n' + f"RC:\n {cmd_result.rc}\n" + f'STDOUT:\n{textwrap.indent(cmd_result.stdout, " ")}\n' + f'STDERR:\n{textwrap.indent(cmd_result.stderr, " ")}\n' + f"Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}" + ) logging.info(log_message) - allure.attach(log_message, 'SSH command', allure.attachment_type.TEXT) + allure.attach(log_message, "SSH command", allure.attachment_type.TEXT) return cmd_result + return wrapper @@ -65,9 +68,15 @@ class HostClient: TIMEOUT_RESTORE_CONNECTION = 10, 24 - def __init__(self, ip: str, login: str, password: Optional[str] = None, - private_key_path: Optional[str] = None, private_key_passphrase: Optional[str] = None, - init_ssh_client=True) -> None: + def __init__( + self, + ip: str, + login: str, + password: Optional[str] = None, + private_key_path: Optional[str] = None, + private_key_passphrase: Optional[str] = None, + init_ssh_client=True, + ) -> None: self.ip = ip self.login = login self.password = password @@ -83,21 +92,25 @@ class HostClient: return cmd_result @log_command - def exec_with_confirmation(self, cmd: str, confirmation: list, verify=True, timeout=90) -> SSHCommand: + def exec_with_confirmation( + self, cmd: str, confirmation: list, verify=True, timeout=90 + ) -> SSHCommand: ssh_stdin, ssh_stdout, ssh_stderr = self.ssh_client.exec_command(cmd, timeout=timeout) for line in confirmation: - if not line.endswith('\n'): - line = f'{line}\n' + if not line.endswith("\n"): + line = f"{line}\n" try: ssh_stdin.write(line) except OSError as err: - logging.error(f'Got error {err} executing command {cmd}') + logging.error(f"Got error {err} executing command {cmd}") ssh_stdin.close() - output = SSHCommand(stdout=ssh_stdout.read().decode(errors='ignore'), - stderr=ssh_stderr.read().decode(errors='ignore'), - rc=ssh_stdout.channel.recv_exit_status()) + output = SSHCommand( + stdout=ssh_stdout.read().decode(errors="ignore"), + stderr=ssh_stderr.read().decode(errors="ignore"), + rc=ssh_stdout.channel.recv_exit_status(), + ) if verify: - debug_info = f'\nSTDOUT: {output.stdout}\nSTDERR: {output.stderr}\nRC: {output.rc}' + debug_info = f"\nSTDOUT: {output.stdout}\nSTDERR: {output.stderr}\nRC: {output.rc}" assert output.rc == 0, f'Non zero rc from command: "{cmd}"{debug_info}' return output @@ -111,7 +124,7 @@ class HostClient: self.create_connection() @contextmanager - def create_ssh_connection(self) -> 'SSHClient': + def create_ssh_connection(self) -> "SSHClient": if not self.ssh_client: self.create_connection() try: @@ -119,38 +132,40 @@ class HostClient: finally: self.drop() - @allure.step('Restore connection') + @allure.step("Restore connection") def restore_ssh_connection(self): retry_time, retry_count = self.TIMEOUT_RESTORE_CONNECTION for _ in range(retry_count): try: self.create_connection() except AssertionError: - logging.warning(f'Host: Cant reach host: {self.ip}.') + logging.warning(f"Host: Cant reach host: {self.ip}.") sleep(retry_time) else: - logging.info(f'Host: Cant reach host: {self.ip}.') + logging.info(f"Host: Cant reach host: {self.ip}.") return - raise AssertionError(f'Host: Cant reach host: {self.ip} after 240 seconds..') + raise AssertionError(f"Host: Cant reach host: {self.ip} after 240 seconds..") - @allure.step('Copy file {host_path_to_file} to local file {path_to_file}') + @allure.step("Copy file {host_path_to_file} to local file {path_to_file}") def copy_file_from_host(self, host_path_to_file: str, path_to_file: str): with self._sftp_client() as sftp_client: sftp_client.get(host_path_to_file, path_to_file) def copy_file_to_host(self, path_to_file: str, host_path_to_file: str): - with allure.step(f'Copy local file {path_to_file} to remote file {host_path_to_file} on host {self.ip}'): + with allure.step( + f"Copy local file {path_to_file} to remote file {host_path_to_file} on host {self.ip}" + ): with self._sftp_client() as sftp_client: sftp_client.put(path_to_file, host_path_to_file) - @allure.step('Save string to remote file {host_path_to_file}') + @allure.step("Save string to remote file {host_path_to_file}") def copy_str_to_host_file(self, string: str, host_path_to_file: str): - with tempfile.NamedTemporaryFile(mode='r+') as temp: + with tempfile.NamedTemporaryFile(mode="r+") as temp: temp.writelines(string) temp.flush() with self._sftp_client() as client: client.put(temp.name, host_path_to_file) - self.exec(f'cat {host_path_to_file}', verify=False) + self.exec(f"cat {host_path_to_file}", verify=False) def create_connection(self, attempts=SSH_CONNECTION_ATTEMPTS): exc_err = None @@ -166,8 +181,10 @@ class HostClient: self.ssh_client.connect( hostname=self.ip, username=self.login, - pkey=RSAKey.from_private_key_file(self.private_key_path, self.private_key_passphrase), - timeout=self.CONNECTION_TIMEOUT + pkey=RSAKey.from_private_key_file( + self.private_key_path, self.private_key_passphrase + ), + timeout=self.CONNECTION_TIMEOUT, ) else: logging.info( @@ -178,25 +195,25 @@ class HostClient: hostname=self.ip, username=self.login, password=self.password, - timeout=self.CONNECTION_TIMEOUT + timeout=self.CONNECTION_TIMEOUT, ) return True except AuthenticationException as auth_err: - logging.error(f'Host: {self.ip}. {auth_err}') + logging.error(f"Host: {self.ip}. {auth_err}") self.drop() raise auth_err except ( - SSHException, - ssh_exception.NoValidConnectionsError, - AttributeError, - socket.timeout, - OSError + SSHException, + ssh_exception.NoValidConnectionsError, + AttributeError, + socket.timeout, + OSError, ) as ssh_err: exc_err = ssh_err self.drop() - logging.error(f'Host: {self.ip}, connection error. {exc_err}') + logging.error(f"Host: {self.ip}, connection error. {exc_err}") raise HostIsNotAvailable(self.ip, exc_err) @@ -211,21 +228,21 @@ class HostClient: try: _, stdout, stderr = self.ssh_client.exec_command(cmd, timeout=timeout) return SSHCommand( - stdout=stdout.read().decode(errors='ignore'), - stderr=stderr.read().decode(errors='ignore'), - rc=stdout.channel.recv_exit_status() + stdout=stdout.read().decode(errors="ignore"), + stderr=stderr.read().decode(errors="ignore"), + rc=stdout.channel.recv_exit_status(), ) except ( - SSHException, - TimeoutError, - ssh_exception.NoValidConnectionsError, - ConnectionResetError, - AttributeError, - socket.timeout, + SSHException, + TimeoutError, + ssh_exception.NoValidConnectionsError, + ConnectionResetError, + AttributeError, + socket.timeout, ) as ssh_err: - logging.error(f'Host: {self.ip}, exec command error {ssh_err}') + logging.error(f"Host: {self.ip}, exec command error {ssh_err}") self.create_connection() - raise HostIsNotAvailable(f'Host: {self.ip} is not reachable.') + raise HostIsNotAvailable(f"Host: {self.ip} is not reachable.") @contextmanager def _sftp_client(self) -> SFTPClient: diff --git a/pytest_tests/steps/s3_gate_base.py b/pytest_tests/steps/s3_gate_base.py index b0a0b293..4bdd430d 100644 --- a/pytest_tests/steps/s3_gate_base.py +++ b/pytest_tests/steps/s3_gate_base.py @@ -3,6 +3,7 @@ import os import allure import pytest from python_keywords.container import list_containers + from steps import s3_gate_bucket from steps.aws_cli_client import AwsCliClient @@ -10,29 +11,31 @@ from steps.aws_cli_client import AwsCliClient class TestS3GateBase: s3_client = None - @pytest.fixture(scope='class', autouse=True) - @allure.title('[Class/Autouse]: Create S3 client') + @pytest.fixture(scope="class", autouse=True) + @allure.title("[Class/Autouse]: Create S3 client") def s3_client(self, prepare_wallet_and_deposit, request): wallet = prepare_wallet_and_deposit s3_bearer_rules_file = f"{os.getcwd()}/robot/resources/files/s3_bearer_rules.json" - cid, bucket, access_key_id, secret_access_key, owner_private_key = \ - s3_gate_bucket.init_s3_credentials( - wallet, s3_bearer_rules_file=s3_bearer_rules_file) + ( + cid, + bucket, + access_key_id, + secret_access_key, + owner_private_key, + ) = s3_gate_bucket.init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file) containers_list = list_containers(wallet) - assert cid in containers_list, f'Expected cid {cid} in {containers_list}' + assert cid in containers_list, f"Expected cid {cid} in {containers_list}" - if request.param == 'aws cli': + if request.param == "aws cli": try: client = AwsCliClient(access_key_id, secret_access_key) except Exception as err: - if 'command was not found or was not executable' in str(err): - pytest.skip('AWS CLI was not found') + if "command was not found or was not executable" in str(err): + pytest.skip("AWS CLI was not found") else: - raise RuntimeError( - 'Error on creating instance for AwsCliClient') from err + raise RuntimeError("Error on creating instance for AwsCliClient") from err else: - client = s3_gate_bucket.config_s3_client( - access_key_id, secret_access_key) + client = s3_gate_bucket.config_s3_client(access_key_id, secret_access_key) TestS3GateBase.s3_client = client TestS3GateBase.wallet = wallet diff --git a/pytest_tests/testsuites/acl/conftest.py b/pytest_tests/testsuites/acl/conftest.py index 2c48e260..1858b56b 100644 --- a/pytest_tests/testsuites/acl/conftest.py +++ b/pytest_tests/testsuites/acl/conftest.py @@ -3,9 +3,14 @@ from typing import Dict, List, Optional import allure import pytest - -from common import ASSETS_DIR, IR_WALLET_CONFIG, IR_WALLET_PATH, WALLET_CONFIG -from common import STORAGE_WALLET_PATH, STORAGE_WALLET_CONFIG +from common import ( + ASSETS_DIR, + IR_WALLET_CONFIG, + IR_WALLET_PATH, + STORAGE_WALLET_CONFIG, + STORAGE_WALLET_PATH, + WALLET_CONFIG, +) from python_keywords.acl import EACLRole from python_keywords.container import create_container from python_keywords.neofs_verbs import put_object @@ -38,21 +43,15 @@ def wallets(prepare_wallet_and_deposit): yield Wallets( wallets={ EACLRole.USER: [ - Wallet( - wallet_path=prepare_wallet_and_deposit, config_path=WALLET_CONFIG - ) + Wallet(wallet_path=prepare_wallet_and_deposit, config_path=WALLET_CONFIG) ], EACLRole.OTHERS: [ - Wallet( - wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG - ), - Wallet( - wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG - ), + Wallet(wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG), + Wallet(wallet_path=init_wallet(ASSETS_DIR)[0], config_path=WALLET_CONFIG), ], EACLRole.SYSTEM: [ Wallet(wallet_path=IR_WALLET_PATH, config_path=IR_WALLET_CONFIG), - Wallet(wallet_path=STORAGE_WALLET_PATH, config_path=STORAGE_WALLET_CONFIG) + Wallet(wallet_path=STORAGE_WALLET_PATH, config_path=STORAGE_WALLET_CONFIG), ], } ) diff --git a/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py b/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py index 910478f8..694ca2bd 100644 --- a/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py +++ b/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py @@ -1,17 +1,18 @@ import logging +from typing import Optional +import allure import pytest from common import ( + ASSETS_DIR, COMPLEX_OBJ_SIZE, + FREE_STORAGE, IR_WALLET_CONFIG, IR_WALLET_PASS, IR_WALLET_PATH, SIMPLE_OBJ_SIZE, - ASSETS_DIR, - FREE_STORAGE ) from epoch import tick_epoch -from typing import Optional from grpc_responses import OBJECT_ACCESS_DENIED, OBJECT_NOT_FOUND from python_keywords.acl import ( EACLAccess, @@ -36,8 +37,6 @@ from python_keywords.storage_group import ( from python_keywords.utility_keywords import generate_file from wallet import init_wallet -import allure - logger = logging.getLogger("NeoLogger") deposit = 30 @@ -65,9 +64,7 @@ class TestStorageGroup: objects = [oid] storage_group = put_storagegroup(self.main_wallet, cid, objects) - self.expect_success_for_storagegroup_operations( - self.main_wallet, cid, objects, object_size - ) + self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size) self.expect_failure_for_storagegroup_operations( self.other_wallet, cid, objects, storage_group ) @@ -81,9 +78,7 @@ class TestStorageGroup: file_path = generate_file(object_size) oid = put_object(self.main_wallet, file_path, cid) objects = [oid] - self.expect_success_for_storagegroup_operations( - self.main_wallet, cid, objects, object_size - ) + self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size) self.expect_success_for_storagegroup_operations( self.other_wallet, cid, objects, object_size ) @@ -97,9 +92,7 @@ class TestStorageGroup: file_path = generate_file(object_size) oid = put_object(self.main_wallet, file_path, cid) objects = [oid] - self.expect_success_for_storagegroup_operations( - self.main_wallet, cid, objects, object_size - ) + self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size) self.storagegroup_operations_by_other_ro_container( self.main_wallet, self.other_wallet, cid, objects, object_size ) @@ -113,14 +106,12 @@ class TestStorageGroup: file_path = generate_file(object_size) oid = put_object(self.main_wallet, file_path, cid) objects = [oid] - self.expect_success_for_storagegroup_operations( - self.main_wallet, cid, objects, object_size - ) + self.expect_success_for_storagegroup_operations(self.main_wallet, cid, objects, object_size) storage_group = put_storagegroup(self.main_wallet, cid, objects) eacl_deny = [ - EACLRule(access=EACLAccess.DENY, role=role, operation=op) - for op in EACLOperation - for role in EACLRole + EACLRule(access=EACLAccess.DENY, role=role, operation=op) + for op in EACLOperation + for role in EACLRole ] set_eacl(self.main_wallet, cid, create_eacl(cid, eacl_deny)) self.expect_failure_for_storagegroup_operations( @@ -208,15 +199,11 @@ class TestStorageGroup: that System key is granted to make all operations except PUT and DELETE. """ if not FREE_STORAGE: - transfer_mainnet_gas( - IR_WALLET_PATH, deposit + 1, wallet_password=IR_WALLET_PASS - ) + transfer_mainnet_gas(IR_WALLET_PATH, deposit + 1, wallet_password=IR_WALLET_PASS) neofs_deposit(IR_WALLET_PATH, deposit, wallet_password=IR_WALLET_PASS) storage_group = put_storagegroup(wallet, cid, obj_list) with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED): - put_storagegroup( - IR_WALLET_PATH, cid, obj_list, wallet_config=IR_WALLET_CONFIG - ) + put_storagegroup(IR_WALLET_PATH, cid, obj_list, wallet_config=IR_WALLET_CONFIG) verify_list_storage_group( IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG ) @@ -229,6 +216,4 @@ class TestStorageGroup: wallet_config=IR_WALLET_CONFIG, ) with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED): - delete_storagegroup( - IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG - ) + delete_storagegroup(IR_WALLET_PATH, cid, storage_group, wallet_config=IR_WALLET_CONFIG) diff --git a/pytest_tests/testsuites/acl/test_acl.py b/pytest_tests/testsuites/acl/test_acl.py index 62fc1bd6..06a7ab1e 100644 --- a/pytest_tests/testsuites/acl/test_acl.py +++ b/pytest_tests/testsuites/acl/test_acl.py @@ -1,6 +1,5 @@ import allure import pytest - from python_keywords.acl import EACLRole from python_keywords.container import create_container from python_keywords.container_access import ( @@ -31,9 +30,7 @@ class TestACLBasic: def private_container(self, wallets): user_wallet = wallets.get_wallet() with allure.step("Create private container"): - cid_private = create_container( - user_wallet.wallet_path, basic_acl=PRIVATE_ACL_F - ) + cid_private = create_container(user_wallet.wallet_path, basic_acl=PRIVATE_ACL_F) yield cid_private @@ -44,9 +41,7 @@ class TestACLBasic: def read_only_container(self, wallets): user_wallet = wallets.get_wallet() with allure.step("Create public readonly container"): - cid_read_only = create_container( - user_wallet.wallet_path, basic_acl=READONLY_ACL_F - ) + cid_read_only = create_container(user_wallet.wallet_path, basic_acl=READONLY_ACL_F) yield cid_read_only @@ -78,12 +73,8 @@ class TestACLBasic: attributes={"created": "other"}, ) with allure.step(f"Check {desc} has full access to public container"): - check_full_access_to_container( - wallet.wallet_path, cid, owner_object_oid, file_path - ) - check_full_access_to_container( - wallet.wallet_path, cid, other_object_oid, file_path - ) + check_full_access_to_container(wallet.wallet_path, cid, owner_object_oid, file_path) + check_full_access_to_container(wallet.wallet_path, cid, other_object_oid, file_path) @allure.title("Test basic ACL on private container") def test_basic_acl_private(self, wallets, private_container, file_path): @@ -97,9 +88,7 @@ class TestACLBasic: owner_object_oid = put_object(user_wallet.wallet_path, file_path, cid) with allure.step("Check only owner has full access to private container"): - with allure.step( - "Check no one except owner has access to operations with container" - ): + with allure.step("Check no one except owner has access to operations with container"): check_no_access_to_container( other_wallet.wallet_path, cid, owner_object_oid, file_path ) @@ -121,14 +110,8 @@ class TestACLBasic: with allure.step("Add test objects to container"): object_oid = put_object(user_wallet.wallet_path, file_path, cid) - with allure.step( - "Check other has read-only access to operations with container" - ): - check_read_only_container( - other_wallet.wallet_path, cid, object_oid, file_path - ) + with allure.step("Check other has read-only access to operations with container"): + check_read_only_container(other_wallet.wallet_path, cid, object_oid, file_path) with allure.step("Check owner has full access to public container"): - check_full_access_to_container( - user_wallet.wallet_path, cid, object_oid, file_path - ) + check_full_access_to_container(user_wallet.wallet_path, cid, object_oid, file_path) diff --git a/pytest_tests/testsuites/acl/test_bearer.py b/pytest_tests/testsuites/acl/test_bearer.py index 93e5636a..787b004f 100644 --- a/pytest_tests/testsuites/acl/test_bearer.py +++ b/pytest_tests/testsuites/acl/test_bearer.py @@ -1,6 +1,5 @@ import allure import pytest - from python_keywords.acl import ( EACLAccess, EACLOperation, @@ -24,16 +23,12 @@ from python_keywords.container_access import ( class TestACLBearer: @pytest.mark.parametrize("role", [EACLRole.USER, EACLRole.OTHERS]) def test_bearer_token_operations(self, wallets, eacl_container_with_objects, role): - allure.dynamic.title( - f"Testcase to validate NeoFS operations with {role.value} BearerToken" - ) + allure.dynamic.title(f"Testcase to validate NeoFS operations with {role.value} BearerToken") cid, objects_oids, file_path = eacl_container_with_objects user_wallet = wallets.get_wallet() deny_wallet = wallets.get_wallet(role) - with allure.step( - f"Check {role.value} has full access to container without bearer token" - ): + with allure.step(f"Check {role.value} has full access to container without bearer token"): check_full_access_to_container( deny_wallet.wallet_path, cid, @@ -44,16 +39,13 @@ class TestACLBearer: with allure.step(f"Set deny all operations for {role.value} via eACL"): eacl = [ - EACLRule(access=EACLAccess.DENY, role=role, operation=op) - for op in EACLOperation + EACLRule(access=EACLAccess.DENY, role=role, operation=op) for op in EACLOperation ] eacl_file = create_eacl(cid, eacl) set_eacl(user_wallet.wallet_path, cid, eacl_file) wait_for_cache_expired() - with allure.step( - f"Create bearer token for {role.value} with all operations allowed" - ): + with allure.step(f"Create bearer token for {role.value} with all operations allowed"): bearer_token = form_bearertoken_file( user_wallet.wallet_path, cid, @@ -88,8 +80,7 @@ class TestACLBearer: with allure.step(f"Set allow all operations for {role.value} via eACL"): eacl = [ - EACLRule(access=EACLAccess.ALLOW, role=role, operation=op) - for op in EACLOperation + EACLRule(access=EACLAccess.ALLOW, role=role, operation=op) for op in EACLOperation ] eacl_file = create_eacl(cid, eacl) set_eacl(user_wallet.wallet_path, cid, eacl_file) @@ -107,9 +98,7 @@ class TestACLBearer: ) @allure.title("BearerToken Operations for compound Operations") - def test_bearer_token_compound_operations( - self, wallets, eacl_container_with_objects - ): + def test_bearer_token_compound_operations(self, wallets, eacl_container_with_objects): cid, objects_oids, file_path = eacl_container_with_objects user_wallet = wallets.get_wallet() other_wallet = wallets.get_wallet(role=EACLRole.OTHERS) @@ -140,26 +129,19 @@ class TestACLBearer: deny_map_with_bearer = { EACLRole.USER: [ - op - for op in deny_map[EACLRole.USER] - if op not in bearer_map[EACLRole.USER] + op for op in deny_map[EACLRole.USER] if op not in bearer_map[EACLRole.USER] ], EACLRole.OTHERS: [ - op - for op in deny_map[EACLRole.OTHERS] - if op not in bearer_map[EACLRole.OTHERS] + op for op in deny_map[EACLRole.OTHERS] if op not in bearer_map[EACLRole.OTHERS] ], } eacl_deny = [] for role, operations in deny_map.items(): eacl_deny += [ - EACLRule(access=EACLAccess.DENY, role=role, operation=op) - for op in operations + EACLRule(access=EACLAccess.DENY, role=role, operation=op) for op in operations ] - set_eacl( - user_wallet.wallet_path, cid, eacl_table_path=create_eacl(cid, eacl_deny) - ) + set_eacl(user_wallet.wallet_path, cid, eacl_table_path=create_eacl(cid, eacl_deny)) wait_for_cache_expired() with allure.step("Check rule consistency without bearer"): @@ -194,9 +176,7 @@ class TestACLBearer: user_wallet.wallet_path, cid, [ - EACLRule( - operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS - ) + EACLRule(operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS) for op in bearer_map[EACLRole.OTHERS] ], ) diff --git a/pytest_tests/testsuites/acl/test_eacl.py b/pytest_tests/testsuites/acl/test_eacl.py index 2c4de11c..470cdd18 100644 --- a/pytest_tests/testsuites/acl/test_eacl.py +++ b/pytest_tests/testsuites/acl/test_eacl.py @@ -1,6 +1,5 @@ import allure import pytest - from common import NEOFS_NETMAP_DICT from failover_utils import wait_object_replication_on_nodes from python_keywords.acl import ( @@ -20,12 +19,12 @@ from python_keywords.container_access import ( from python_keywords.neofs_verbs import put_object from python_keywords.node_management import drop_object from python_keywords.object_access import ( - can_get_object, - can_put_object, can_delete_object, can_get_head_object, + can_get_object, can_get_range_hash_of_object, can_get_range_of_object, + can_put_object, can_search_object, ) from wellknown_acl import PUBLIC_ACL diff --git a/pytest_tests/testsuites/acl/test_eacl_filters.py b/pytest_tests/testsuites/acl/test_eacl_filters.py index 8d85ef34..c541bc35 100644 --- a/pytest_tests/testsuites/acl/test_eacl_filters.py +++ b/pytest_tests/testsuites/acl/test_eacl_filters.py @@ -1,6 +1,5 @@ import allure import pytest - from python_keywords.acl import ( EACLAccess, EACLFilter, @@ -21,11 +20,7 @@ from python_keywords.container_access import ( check_no_access_to_container, ) from python_keywords.neofs_verbs import put_object -from python_keywords.object_access import ( - can_get_head_object, - can_get_object, - can_put_object, -) +from python_keywords.object_access import can_get_head_object, can_get_object, can_put_object from wellknown_acl import PUBLIC_ACL @@ -112,12 +107,8 @@ class TestEACLFilters: @pytest.mark.parametrize( "match_type", [EACLMatchType.STRING_EQUAL, EACLMatchType.STRING_NOT_EQUAL] ) - def test_extended_acl_filters_request( - self, wallets, eacl_container_with_objects, match_type - ): - allure.dynamic.title( - f"Validate NeoFS operations with request filter: {match_type.name}" - ) + def test_extended_acl_filters_request(self, wallets, eacl_container_with_objects, match_type): + allure.dynamic.title(f"Validate NeoFS operations with request filter: {match_type.name}") user_wallet = wallets.get_wallet() other_wallet = wallets.get_wallet(EACLRole.OTHERS) ( @@ -147,14 +138,10 @@ class TestEACLFilters: # is STRING_EQUAL, then requests with "check_key=OTHER_ATTRIBUTE" will be allowed while # requests with "check_key=ATTRIBUTE" will be denied, and vice versa allow_headers = ( - self.OTHER_ATTRIBUTE - if match_type == EACLMatchType.STRING_EQUAL - else self.ATTRIBUTE + self.OTHER_ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.ATTRIBUTE ) deny_headers = ( - self.ATTRIBUTE - if match_type == EACLMatchType.STRING_EQUAL - else self.OTHER_ATTRIBUTE + self.ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.OTHER_ATTRIBUTE ) # We test on 3 groups of objects with various headers, # but eACL rule should ignore object headers and @@ -164,12 +151,8 @@ class TestEACLFilters: objects_with_other_header, objects_without_header, ): - with allure.step( - "Check other has full access when sending request without headers" - ): - check_full_access_to_container( - other_wallet.wallet_path, cid, oid.pop(), file_path - ) + with allure.step("Check other has full access when sending request without headers"): + check_full_access_to_container(other_wallet.wallet_path, cid, oid.pop(), file_path) with allure.step( "Check other has full access when sending request with allowed headers" @@ -182,9 +165,7 @@ class TestEACLFilters: xhdr=allow_headers, ) - with allure.step( - "Check other has no access when sending request with denied headers" - ): + with allure.step("Check other has no access when sending request with denied headers"): check_no_access_to_container( other_wallet.wallet_path, cid, @@ -201,9 +182,7 @@ class TestEACLFilters: user_wallet.wallet_path, cid, [ - EACLRule( - operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS - ) + EACLRule(operation=op, access=EACLAccess.ALLOW, role=EACLRole.OTHERS) for op in EACLOperation ], ) @@ -265,9 +244,7 @@ class TestEACLFilters: # but eACL rule should ignore request headers and validate # only object headers for xhdr in (self.ATTRIBUTE, self.OTHER_ATTRIBUTE, None): - with allure.step( - "Check other have full access to objects without attributes" - ): + with allure.step("Check other have full access to objects without attributes"): check_full_access_to_container( other_wallet.wallet_path, cid, @@ -276,9 +253,7 @@ class TestEACLFilters: xhdr=xhdr, ) - with allure.step( - "Check other have full access to objects without deny attribute" - ): + with allure.step("Check other have full access to objects without deny attribute"): check_full_access_to_container( other_wallet.wallet_path, cid, @@ -287,9 +262,7 @@ class TestEACLFilters: xhdr=xhdr, ) - with allure.step( - "Check other have no access to objects with deny attribute" - ): + with allure.step("Check other have no access to objects with deny attribute"): with pytest.raises(AssertionError): assert can_get_head_object( other_wallet.wallet_path, cid, deny_objects[0], xhdr=xhdr @@ -328,9 +301,7 @@ class TestEACLFilters: ) allow_attribute = ( - self.OTHER_ATTRIBUTE - if match_type == EACLMatchType.STRING_EQUAL - else self.ATTRIBUTE + self.OTHER_ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.ATTRIBUTE ) with allure.step("Check other can PUT objects without denied attribute"): assert can_put_object( @@ -339,9 +310,7 @@ class TestEACLFilters: assert can_put_object(other_wallet.wallet_path, cid, file_path) deny_attribute = ( - self.ATTRIBUTE - if match_type == EACLMatchType.STRING_EQUAL - else self.OTHER_ATTRIBUTE + self.ATTRIBUTE if match_type == EACLMatchType.STRING_EQUAL else self.OTHER_ATTRIBUTE ) with allure.step("Check other can not PUT objects with denied attribute"): with pytest.raises(AssertionError): @@ -432,8 +401,7 @@ class TestEACLFilters: assert can_put_object(other_wallet.wallet_path, cid, file_path) with allure.step( - "Check other can get and put objects without attributes " - "and using bearer token" + "Check other can get and put objects without attributes and using bearer token" ): bearer_token_other = form_bearertoken_file( user_wallet.wallet_path, @@ -464,9 +432,7 @@ class TestEACLFilters: other_wallet.wallet_path, cid, file_path, bearer=bearer_token_other ) - with allure.step( - f"Check other can get objects with attributes matching the filter" - ): + with allure.step(f"Check other can get objects with attributes matching the filter"): oid = allow_objects.pop() assert can_get_head_object(other_wallet.wallet_path, cid, oid) assert can_get_object(other_wallet.wallet_path, cid, oid, file_path) @@ -474,17 +440,11 @@ class TestEACLFilters: other_wallet.wallet_path, cid, file_path, attributes=allow_attribute ) - with allure.step( - "Check other cannot get objects without attributes matching the filter" - ): + with allure.step("Check other cannot get objects without attributes matching the filter"): with pytest.raises(AssertionError): - assert can_get_head_object( - other_wallet.wallet_path, cid, deny_objects[0] - ) + assert can_get_head_object(other_wallet.wallet_path, cid, deny_objects[0]) with pytest.raises(AssertionError): - assert can_get_object( - other_wallet.wallet_path, cid, deny_objects[0], file_path - ) + assert can_get_object(other_wallet.wallet_path, cid, deny_objects[0], file_path) with pytest.raises(AssertionError): assert can_put_object( other_wallet.wallet_path, cid, file_path, attributes=deny_attribute diff --git a/pytest_tests/testsuites/container/test_container.py b/pytest_tests/testsuites/container/test_container.py index 73d57965..31afbd17 100644 --- a/pytest_tests/testsuites/container/test_container.py +++ b/pytest_tests/testsuites/container/test_container.py @@ -3,75 +3,87 @@ import json import allure import pytest from epoch import tick_epoch -from python_keywords.container import (create_container, delete_container, get_container, list_containers, - wait_for_container_creation, wait_for_container_deletion) +from python_keywords.container import ( + create_container, + delete_container, + get_container, + list_containers, + wait_for_container_creation, + wait_for_container_deletion, +) from utility import placement_policy_from_container from wellknown_acl import PRIVATE_ACL_F -@pytest.mark.parametrize('name', ['', 'test-container'], ids=['No name', 'Set particular name']) +@pytest.mark.parametrize("name", ["", "test-container"], ids=["No name", "Set particular name"]) @pytest.mark.sanity @pytest.mark.container def test_container_creation(prepare_wallet_and_deposit, name): - scenario_title = f'with name {name}' if name else 'without name' - allure.dynamic.title(f'User can create container {scenario_title}') + scenario_title = f"with name {name}" if name else "without name" + allure.dynamic.title(f"User can create container {scenario_title}") wallet = prepare_wallet_and_deposit with open(wallet) as file: json_wallet = json.load(file) - placement_rule = 'REP 2 IN X CBF 1 SELECT 2 FROM * AS X' + placement_rule = "REP 2 IN X CBF 1 SELECT 2 FROM * AS X" cid = create_container(wallet, rule=placement_rule, name=name) containers = list_containers(wallet) - assert cid in containers, f'Expected container {cid} in containers: {containers}' + assert cid in containers, f"Expected container {cid} in containers: {containers}" container_info: str = get_container(wallet, cid, json_mode=False) container_info = container_info.casefold() # To ignore case when comparing with expected values info_to_check = { - f'basic ACL: {PRIVATE_ACL_F} (private)', - f'owner ID: {json_wallet.get("accounts")[0].get("address")}', - f'container ID: {cid}', + f"basic ACL: {PRIVATE_ACL_F} (private)", + f"owner ID: {json_wallet.get('accounts')[0].get('address')}", + f"container ID: {cid}", } if name: - info_to_check.add(f'Name={name}') + info_to_check.add(f"Name={name}") - with allure.step('Check container has correct information'): + with allure.step("Check container has correct information"): expected_policy = placement_rule.casefold() actual_policy = placement_policy_from_container(container_info) - assert actual_policy == expected_policy, \ - f'Expected policy\n{expected_policy} but got policy\n{actual_policy}' + assert ( + actual_policy == expected_policy + ), f"Expected policy\n{expected_policy} but got policy\n{actual_policy}" for info in info_to_check: expected_info = info.casefold() - assert expected_info in container_info, \ - f'Expected {expected_info} in container info:\n{container_info}' + assert ( + expected_info in container_info + ), f"Expected {expected_info} in container info:\n{container_info}" - with allure.step('Delete container and check it was deleted'): + with allure.step("Delete container and check it was deleted"): delete_container(wallet, cid) tick_epoch() wait_for_container_deletion(wallet, cid) -@allure.title('Parallel container creation and deletion') +@allure.title("Parallel container creation and deletion") @pytest.mark.sanity @pytest.mark.container def test_container_creation_deletion_parallel(prepare_wallet_and_deposit): containers_count = 3 wallet = prepare_wallet_and_deposit - placement_rule = 'REP 2 IN X CBF 1 SELECT 2 FROM * AS X' + placement_rule = "REP 2 IN X CBF 1 SELECT 2 FROM * AS X" cids: list[str] = [] - with allure.step(f'Create {containers_count} containers'): + with allure.step(f"Create {containers_count} containers"): for _ in range(containers_count): - cids.append(create_container(wallet, rule=placement_rule, await_mode=False, wait_for_creation=False)) + cids.append( + create_container( + wallet, rule=placement_rule, await_mode=False, wait_for_creation=False + ) + ) - with allure.step(f'Wait for containers occur in container list'): + with allure.step(f"Wait for containers occur in container list"): for cid in cids: wait_for_container_creation(wallet, cid, sleep_interval=containers_count) - with allure.step('Delete containers and check they were deleted'): + with allure.step("Delete containers and check they were deleted"): for cid in cids: delete_container(wallet, cid) tick_epoch() diff --git a/pytest_tests/testsuites/failovers/test_failover_network.py b/pytest_tests/testsuites/failovers/test_failover_network.py index b8efebf7..f58b8966 100644 --- a/pytest_tests/testsuites/failovers/test_failover_network.py +++ b/pytest_tests/testsuites/failovers/test_failover_network.py @@ -4,8 +4,11 @@ from time import sleep import allure import pytest -from common import (STORAGE_NODE_SSH_PASSWORD, STORAGE_NODE_SSH_PRIVATE_KEY_PATH, - STORAGE_NODE_SSH_USER) +from common import ( + STORAGE_NODE_SSH_PASSWORD, + STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + STORAGE_NODE_SSH_USER, +) from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from iptables_helper import IpTablesHelper from python_keywords.container import create_container @@ -14,24 +17,27 @@ from python_keywords.utility_keywords import generate_file, get_file_hash from ssh_helper import HostClient from wellknown_acl import PUBLIC_ACL -logger = logging.getLogger('NeoLogger') -STORAGE_NODE_COMMUNICATION_PORT = '8080' -STORAGE_NODE_COMMUNICATION_PORT_TLS = '8082' +logger = logging.getLogger("NeoLogger") +STORAGE_NODE_COMMUNICATION_PORT = "8080" +STORAGE_NODE_COMMUNICATION_PORT_TLS = "8082" PORTS_TO_BLOCK = [STORAGE_NODE_COMMUNICATION_PORT, STORAGE_NODE_COMMUNICATION_PORT_TLS] blocked_hosts = [] @pytest.fixture(autouse=True) -@allure.step('Restore network') +@allure.step("Restore network") def restore_network(): yield not_empty = len(blocked_hosts) != 0 for host in list(blocked_hosts): - with allure.step(f'Start storage node {host}'): - client = HostClient(ip=host, login=STORAGE_NODE_SSH_USER, - password=STORAGE_NODE_SSH_PASSWORD, - private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) + with allure.step(f"Start storage node {host}"): + client = HostClient( + ip=host, + login=STORAGE_NODE_SSH_USER, + password=STORAGE_NODE_SSH_PASSWORD, + private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + ) with client.create_ssh_connection(): IpTablesHelper.restore_input_traffic_to_port(client, PORTS_TO_BLOCK) blocked_hosts.remove(host) @@ -39,7 +45,7 @@ def restore_network(): wait_all_storage_node_returned() -@allure.title('Block Storage node traffic') +@allure.title("Block Storage node traffic") @pytest.mark.failover @pytest.mark.failover_net def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastructure_check): @@ -47,7 +53,7 @@ def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastruc Block storage nodes traffic using iptables and wait for replication for objects. """ wallet = prepare_wallet_and_deposit - placement_rule = 'REP 2 IN X CBF 2 SELECT 2 FROM * AS X' + placement_rule = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X" excluded_nodes = [] wakeup_node_timeout = 10 # timeout to let nodes detect that traffic has blocked nodes_to_block_count = 2 @@ -57,24 +63,29 @@ def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastruc oid = put_object(wallet, source_file_path, cid) nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) - logger.info(f'Nodes are {nodes}') - random_nodes = [(node, node.split(':')[0]) for node in nodes] + logger.info(f"Nodes are {nodes}") + random_nodes = [(node, node.split(":")[0]) for node in nodes] if nodes_to_block_count > len(nodes): - random_nodes = [(node, node.split(':')[0]) for node in choices(nodes, k=2)] + random_nodes = [(node, node.split(":")[0]) for node in choices(nodes, k=2)] for random_node, random_node_ip in random_nodes: - client = HostClient(ip=random_node_ip, login=STORAGE_NODE_SSH_USER, - password=STORAGE_NODE_SSH_PASSWORD, - private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) + client = HostClient( + ip=random_node_ip, + login=STORAGE_NODE_SSH_USER, + password=STORAGE_NODE_SSH_PASSWORD, + private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + ) - with allure.step(f'Block incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}'): + with allure.step(f"Block incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}"): with client.create_ssh_connection(): IpTablesHelper.drop_input_traffic_to_port(client, PORTS_TO_BLOCK) blocked_hosts.append(random_node_ip) excluded_nodes.append(random_node) sleep(wakeup_node_timeout) - new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=excluded_nodes) + new_nodes = wait_object_replication_on_nodes( + wallet, cid, oid, 2, excluded_nodes=excluded_nodes + ) assert random_node not in new_nodes @@ -82,11 +93,16 @@ def test_block_storage_node_traffic(prepare_wallet_and_deposit, cloud_infrastruc assert get_file_hash(source_file_path) == get_file_hash(got_file_path) for random_node, random_node_ip in random_nodes: - client = HostClient(ip=random_node_ip, login=STORAGE_NODE_SSH_USER, - password=STORAGE_NODE_SSH_PASSWORD, - private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) + client = HostClient( + ip=random_node_ip, + login=STORAGE_NODE_SSH_USER, + password=STORAGE_NODE_SSH_PASSWORD, + private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + ) - with allure.step(f'Unblock incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}'): + with allure.step( + f"Unblock incoming traffic for node {random_node} on port {PORTS_TO_BLOCK}" + ): with client.create_ssh_connection(): IpTablesHelper.restore_input_traffic_to_port(client, PORTS_TO_BLOCK) blocked_hosts.remove(random_node_ip) diff --git a/pytest_tests/testsuites/failovers/test_failover_storage.py b/pytest_tests/testsuites/failovers/test_failover_storage.py index e07492f5..80a042d3 100644 --- a/pytest_tests/testsuites/failovers/test_failover_storage.py +++ b/pytest_tests/testsuites/failovers/test_failover_storage.py @@ -2,7 +2,11 @@ import logging import allure import pytest -from common import STORAGE_NODE_SSH_PASSWORD, STORAGE_NODE_SSH_PRIVATE_KEY_PATH, STORAGE_NODE_SSH_USER +from common import ( + STORAGE_NODE_SSH_PASSWORD, + STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + STORAGE_NODE_SSH_USER, +) from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from python_keywords.container import create_container from python_keywords.neofs_verbs import get_object, put_object @@ -11,52 +15,61 @@ from sbercloud_helper import SberCloud, SberCloudConfig from ssh_helper import HostClient from wellknown_acl import PUBLIC_ACL -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") stopped_hosts = [] -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def sbercloud_client(): - with allure.step('Connect to SberCloud'): + with allure.step("Connect to SberCloud"): try: config = SberCloudConfig.from_env() yield SberCloud(config) except Exception as err: - pytest.fail(f'SberCloud infrastructure not available. Error\n{err}') + pytest.fail(f"SberCloud infrastructure not available. Error\n{err}") @pytest.fixture(autouse=True) -@allure.step('Return all storage nodes') +@allure.step("Return all storage nodes") def return_all_storage_nodes_fixture(sbercloud_client): yield return_all_storage_nodes(sbercloud_client) def panic_reboot_host(ip: str = None): - ssh = HostClient(ip=ip, login=STORAGE_NODE_SSH_USER, - password=STORAGE_NODE_SSH_PASSWORD, - private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH) + ssh = HostClient( + ip=ip, + login=STORAGE_NODE_SSH_USER, + password=STORAGE_NODE_SSH_PASSWORD, + private_key_path=STORAGE_NODE_SSH_PRIVATE_KEY_PATH, + ) ssh.exec('sudo sh -c "echo 1 > /proc/sys/kernel/sysrq"') - ssh_stdin, _, _ = ssh.ssh_client.exec_command('sudo sh -c "echo b > /proc/sysrq-trigger"', timeout=1) + ssh_stdin, _, _ = ssh.ssh_client.exec_command( + 'sudo sh -c "echo b > /proc/sysrq-trigger"', timeout=1 + ) ssh_stdin.close() def return_all_storage_nodes(sbercloud_client: SberCloud) -> None: for host in list(stopped_hosts): - with allure.step(f'Start storage node {host}'): - sbercloud_client.start_node(node_ip=host.split(':')[-2]) + with allure.step(f"Start storage node {host}"): + sbercloud_client.start_node(node_ip=host.split(":")[-2]) stopped_hosts.remove(host) wait_all_storage_node_returned() -@allure.title('Lost and returned nodes') -@pytest.mark.parametrize('hard_reboot', [True, False]) +@allure.title("Lost and returned nodes") +@pytest.mark.parametrize("hard_reboot", [True, False]) @pytest.mark.failover -def test_lost_storage_node(prepare_wallet_and_deposit, sbercloud_client: SberCloud, cloud_infrastructure_check, - hard_reboot: bool): +def test_lost_storage_node( + prepare_wallet_and_deposit, + sbercloud_client: SberCloud, + cloud_infrastructure_check, + hard_reboot: bool, +): wallet = prepare_wallet_and_deposit - placement_rule = 'REP 2 IN X CBF 2 SELECT 2 FROM * AS X' + placement_rule = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X" source_file_path = generate_file() cid = create_container(wallet, rule=placement_rule, basic_acl=PUBLIC_ACL) oid = put_object(wallet, source_file_path, cid) @@ -65,15 +78,15 @@ def test_lost_storage_node(prepare_wallet_and_deposit, sbercloud_client: SberClo new_nodes = [] for node in nodes: stopped_hosts.append(node) - with allure.step(f'Stop storage node {node}'): - sbercloud_client.stop_node(node_ip=node.split(':')[-2], hard=hard_reboot) + with allure.step(f"Stop storage node {node}"): + sbercloud_client.stop_node(node_ip=node.split(":")[-2], hard=hard_reboot) new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=[node]) assert not [node for node in nodes if node in new_nodes] got_file_path = get_object(wallet, cid, oid, endpoint=new_nodes[0]) assert get_file_hash(source_file_path) == get_file_hash(got_file_path) - with allure.step(f'Return storage nodes'): + with allure.step(f"Return storage nodes"): return_all_storage_nodes(sbercloud_client) new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) @@ -82,36 +95,42 @@ def test_lost_storage_node(prepare_wallet_and_deposit, sbercloud_client: SberClo assert get_file_hash(source_file_path) == get_file_hash(got_file_path) -@allure.title('Panic storage node(s)') -@pytest.mark.parametrize('sequence', [True, False]) +@allure.title("Panic storage node(s)") +@pytest.mark.parametrize("sequence", [True, False]) @pytest.mark.failover_panic @pytest.mark.failover -def test_panic_storage_node(prepare_wallet_and_deposit, cloud_infrastructure_check, - sequence: bool): +def test_panic_storage_node(prepare_wallet_and_deposit, cloud_infrastructure_check, sequence: bool): wallet = prepare_wallet_and_deposit - placement_rule = 'REP 2 IN X CBF 2 SELECT 2 FROM * AS X' + placement_rule = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X" source_file_path = generate_file() cid = create_container(wallet, rule=placement_rule, basic_acl=PUBLIC_ACL) oid = put_object(wallet, source_file_path, cid) nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) new_nodes: list[str] = [] - allure.attach('\n'.join(nodes), 'Current nodes with object', allure.attachment_type.TEXT) + allure.attach("\n".join(nodes), "Current nodes with object", allure.attachment_type.TEXT) for node in nodes: - with allure.step(f'Hard reboot host {node} via magic SysRq option'): - panic_reboot_host(ip=node.split(':')[-2]) + with allure.step(f"Hard reboot host {node} via magic SysRq option"): + panic_reboot_host(ip=node.split(":")[-2]) if sequence: try: - new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2, excluded_nodes=[node]) + new_nodes = wait_object_replication_on_nodes( + wallet, cid, oid, 2, excluded_nodes=[node] + ) except AssertionError: new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) - allure.attach('\n'.join(new_nodes), f'Nodes with object after {node} fail', - allure.attachment_type.TEXT) + allure.attach( + "\n".join(new_nodes), + f"Nodes with object after {node} fail", + allure.attachment_type.TEXT, + ) if not sequence: new_nodes = wait_object_replication_on_nodes(wallet, cid, oid, 2) - allure.attach('\n'.join(new_nodes), 'Nodes with object after nodes fail', allure.attachment_type.TEXT) + allure.attach( + "\n".join(new_nodes), "Nodes with object after nodes fail", allure.attachment_type.TEXT + ) got_file_path = get_object(wallet, cid, oid, endpoint=new_nodes[0]) assert get_file_hash(source_file_path) == get_file_hash(got_file_path) diff --git a/pytest_tests/testsuites/object/test_object_api.py b/pytest_tests/testsuites/object/test_object_api.py index bbd512b3..894b881f 100644 --- a/pytest_tests/testsuites/object/test_object_api.py +++ b/pytest_tests/testsuites/object/test_object_api.py @@ -7,82 +7,98 @@ from common import COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE from container import create_container from epoch import get_epoch, tick_epoch from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status -from python_keywords.neofs_verbs import (delete_object, get_object, get_range, get_range_hash, head_object, put_object, - search_object) +from python_keywords.neofs_verbs import ( + delete_object, + get_object, + get_range, + get_range_hash, + head_object, + put_object, + search_object, +) from python_keywords.storage_policy import get_simple_object_copies from python_keywords.utility_keywords import generate_file, get_file_hash from tombstone import verify_head_tombstone from utility import get_file_content, wait_for_gc_pass_on_storage_nodes -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") CLEANUP_TIMEOUT = 10 -@allure.title('Test native object API') +@allure.title("Test native object API") @pytest.mark.sanity @pytest.mark.grpc_api -@pytest.mark.parametrize('object_size', [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=['simple object', 'complex object']) +@pytest.mark.parametrize( + "object_size", [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=["simple object", "complex object"] +) def test_object_api(prepare_wallet_and_deposit, request, object_size): """ Test common gRPC API for object (put/get/head/get_range_hash/get_range/search/delete). """ wallet = prepare_wallet_and_deposit cid = create_container(wallet) - wallet_cid = {'wallet': wallet, 'cid': cid} - file_usr_header = {'key1': 1, 'key2': 'abc', 'common_key': 'common_value'} - file_usr_header_oth = {'key1': 2, 'common_key': 'common_value'} - common_header = {'common_key': 'common_value'} + wallet_cid = {"wallet": wallet, "cid": cid} + file_usr_header = {"key1": 1, "key2": "abc", "common_key": "common_value"} + file_usr_header_oth = {"key1": 2, "common_key": "common_value"} + common_header = {"common_key": "common_value"} range_len = 10 - range_cut = f'0:{range_len}' + range_cut = f"0:{range_len}" oids = [] - allure.dynamic.title(f'Test native object API for {request.node.callspec.id}') + allure.dynamic.title(f"Test native object API for {request.node.callspec.id}") file_path = generate_file(object_size) file_hash = get_file_hash(file_path) search_object(**wallet_cid, expected_objects_list=oids) - with allure.step('Put objects'): + with allure.step("Put objects"): oids.append(put_object(wallet=wallet, path=file_path, cid=cid)) oids.append(put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header)) - oids.append(put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header_oth)) + oids.append( + put_object(wallet=wallet, path=file_path, cid=cid, attributes=file_usr_header_oth) + ) - with allure.step('Validate storage policy for objects'): + with allure.step("Validate storage policy for objects"): for oid_to_check in oids: - assert get_simple_object_copies(wallet=wallet, cid=cid, oid=oid_to_check) == 2, 'Expected 2 copies' + assert ( + get_simple_object_copies(wallet=wallet, cid=cid, oid=oid_to_check) == 2 + ), "Expected 2 copies" - with allure.step('Get objects and compare hashes'): + with allure.step("Get objects and compare hashes"): for oid_to_check in oids: got_file_path = get_object(wallet=wallet, cid=cid, oid=oid_to_check) got_file_hash = get_file_hash(got_file_path) assert file_hash == got_file_hash - with allure.step('Get range/range hash'): - range_hash = get_range_hash(**wallet_cid, oid=oids[0], bearer_token='', range_cut=range_cut) - assert get_file_hash(file_path, range_len) == range_hash, \ - f'Expected range hash to match {range_cut} slice of file payload' + with allure.step("Get range/range hash"): + range_hash = get_range_hash(**wallet_cid, oid=oids[0], bearer_token="", range_cut=range_cut) + assert ( + get_file_hash(file_path, range_len) == range_hash + ), f"Expected range hash to match {range_cut} slice of file payload" - range_hash = get_range_hash(**wallet_cid, oid=oids[1], bearer_token='', range_cut=range_cut) - assert get_file_hash(file_path, range_len) == range_hash, \ - f'Expected range hash to match {range_cut} slice of file payload' + range_hash = get_range_hash(**wallet_cid, oid=oids[1], bearer_token="", range_cut=range_cut) + assert ( + get_file_hash(file_path, range_len) == range_hash + ), f"Expected range hash to match {range_cut} slice of file payload" - _, range_content = get_range(**wallet_cid, oid=oids[1], bearer='', range_cut=range_cut) - assert get_file_content(file_path, content_len=range_len, mode='rb') == range_content, \ - f'Expected range content to match {range_cut} slice of file payload' + _, range_content = get_range(**wallet_cid, oid=oids[1], bearer="", range_cut=range_cut) + assert ( + get_file_content(file_path, content_len=range_len, mode="rb") == range_content + ), f"Expected range content to match {range_cut} slice of file payload" - with allure.step('Search objects'): + with allure.step("Search objects"): search_object(**wallet_cid, expected_objects_list=oids) search_object(**wallet_cid, filters=file_usr_header, expected_objects_list=oids[1:2]) search_object(**wallet_cid, filters=file_usr_header_oth, expected_objects_list=oids[2:3]) search_object(**wallet_cid, filters=common_header, expected_objects_list=oids[1:3]) - with allure.step('Head object and validate'): + with allure.step("Head object and validate"): head_object(**wallet_cid, oid=oids[0]) head_info = head_object(**wallet_cid, oid=oids[1]) check_header_is_presented(head_info, file_usr_header) - with allure.step('Delete objects'): + with allure.step("Delete objects"): tombstone_s = delete_object(**wallet_cid, oid=oids[0]) tombstone_h = delete_object(**wallet_cid, oid=oids[1]) @@ -92,15 +108,17 @@ def test_object_api(prepare_wallet_and_deposit, request, object_size): tick_epoch() sleep(CLEANUP_TIMEOUT) - with allure.step('Get objects and check errors'): + with allure.step("Get objects and check errors"): get_object_and_check_error(**wallet_cid, oid=oids[0], error_pattern=OBJECT_ALREADY_REMOVED) get_object_and_check_error(**wallet_cid, oid=oids[1], error_pattern=OBJECT_ALREADY_REMOVED) -@allure.title('Test object life time') +@allure.title("Test object life time") @pytest.mark.sanity @pytest.mark.grpc_api -@pytest.mark.parametrize('object_size', [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=['simple object', 'complex object']) +@pytest.mark.parametrize( + "object_size", [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE], ids=["simple object", "complex object"] +) def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size): """ Test object deleted after expiration epoch. @@ -108,7 +126,7 @@ def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size): wallet = prepare_wallet_and_deposit cid = create_container(wallet) - allure.dynamic.title(f'Test object life time for {request.node.callspec.id}') + allure.dynamic.title(f"Test object life time for {request.node.callspec.id}") file_path = generate_file(object_size) file_hash = get_file_hash(file_path) @@ -118,14 +136,14 @@ def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size): got_file = get_object(wallet, cid, oid) assert get_file_hash(got_file) == file_hash - with allure.step('Tick two epochs'): + with allure.step("Tick two epochs"): for _ in range(2): tick_epoch() # Wait for GC, because object with expiration is counted as alive until GC removes it wait_for_gc_pass_on_storage_nodes() - with allure.step('Check object deleted because it expires-on epoch'): + with allure.step("Check object deleted because it expires-on epoch"): with pytest.raises(Exception, match=OBJECT_NOT_FOUND): get_object(wallet, cid, oid) @@ -133,14 +151,17 @@ def test_object_api_lifetime(prepare_wallet_and_deposit, request, object_size): def get_object_and_check_error(wallet: str, cid: str, oid: str, error_pattern: str) -> None: try: get_object(wallet=wallet, cid=cid, oid=oid) - raise AssertionError(f'Expected object {oid} removed, but it is not') + raise AssertionError(f"Expected object {oid} removed, but it is not") except Exception as err: - logger.info(f'Error is {err}') - assert error_matches_status(err, error_pattern), f'Expected {err} to match {error_pattern}' + logger.info(f"Error is {err}") + assert error_matches_status(err, error_pattern), f"Expected {err} to match {error_pattern}" def check_header_is_presented(head_info: dict, object_header: dict): for key_to_check, val_to_check in object_header.items(): - assert key_to_check in head_info['header']['attributes'], f'Key {key_to_check} is found in {head_object}' - assert head_info['header']['attributes'].get(key_to_check) == str( - val_to_check), f'Value {val_to_check} is equal' + assert ( + key_to_check in head_info["header"]["attributes"] + ), f"Key {key_to_check} is found in {head_object}" + assert head_info["header"]["attributes"].get(key_to_check) == str( + val_to_check + ), f"Value {val_to_check} is equal" diff --git a/pytest_tests/testsuites/services/s3_gate/test_s3_object.py b/pytest_tests/testsuites/services/s3_gate/test_s3_object.py index 81df70d4..cf6ea455 100644 --- a/pytest_tests/testsuites/services/s3_gate/test_s3_object.py +++ b/pytest_tests/testsuites/services/s3_gate/test_s3_object.py @@ -11,11 +11,12 @@ from python_keywords.container import list_containers from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas from python_keywords.utility_keywords import concat_files, generate_file, get_file_hash from s3_helper import check_objects_in_bucket, set_bucket_versioning +from utility import create_file_with_content +from wallet import init_wallet + from steps import s3_gate_bucket, s3_gate_object from steps.aws_cli_client import AwsCliClient from steps.s3_gate_base import TestS3GateBase -from utility import create_file_with_content -from wallet import init_wallet def pytest_generate_tests(metafunc): diff --git a/pytest_tests/testsuites/services/test_binaries.py b/pytest_tests/testsuites/services/test_binaries.py index eeda285b..2aa133bd 100644 --- a/pytest_tests/testsuites/services/test_binaries.py +++ b/pytest_tests/testsuites/services/test_binaries.py @@ -5,26 +5,25 @@ from re import match import allure import pytest import requests - from common import BIN_VERSIONS_FILE from env_properties import read_env_properties, save_env_properties from service_helper import get_storage_service_helper -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") -@allure.title('Check binaries versions') +@allure.title("Check binaries versions") @pytest.mark.check_binaries -@pytest.mark.skip('Skipped due to https://j.yadro.com/browse/OBJECT-628') +@pytest.mark.skip("Skipped due to https://j.yadro.com/browse/OBJECT-628") def test_binaries_versions(request): """ Compare binaries versions from external source (url) and deployed on servers. """ if not BIN_VERSIONS_FILE: - pytest.skip('File with binaries and versions was not provided') + pytest.skip("File with binaries and versions was not provided") binaries_to_check = download_versions_info(BIN_VERSIONS_FILE) - with allure.step('Get binaries versions from servers'): + with allure.step("Get binaries versions from servers"): helper = get_storage_service_helper() got_versions = helper.get_binaries_version(binaries=list(binaries_to_check.keys())) @@ -36,7 +35,7 @@ def test_binaries_versions(request): for binary, version in binaries_to_check.items(): actual_version = got_versions.get(binary) if actual_version != version: - failed_versions[binary] = f'Expected version {version}, found version {actual_version}' + failed_versions[binary] = f"Expected version {version}, found version {actual_version}" # If some binary was not listed in the env properties file, let's add it # so that we have full information about versions in allure report @@ -48,26 +47,27 @@ def test_binaries_versions(request): # create clear beautiful error with aggregation info if failed_versions: - msg = '\n'.join({f'{binary}: {error}' for binary, error in failed_versions.items()}) - raise AssertionError(f'Found binaries with unexpected versions:\n{msg}') + msg = "\n".join({f"{binary}: {error}" for binary, error in failed_versions.items()}) + raise AssertionError(f"Found binaries with unexpected versions:\n{msg}") -@allure.step('Download versions info from {url}') +@allure.step("Download versions info from {url}") def download_versions_info(url: str) -> dict: binaries_to_version = {} response = requests.get(url) - assert response.status_code == HTTPStatus.OK, \ - f'Got {response.status_code} code. Content {response.json()}' + assert ( + response.status_code == HTTPStatus.OK + ), f"Got {response.status_code} code. Content {response.json()}" content = response.text - assert content, f'Expected file with content, got {response}' + assert content, f"Expected file with content, got {response}" - for line in content.split('\n'): - m = match('(.*)=(.*)', line) + for line in content.split("\n"): + m = match("(.*)=(.*)", line) if not m: - logger.warning(f'Could not get binary/version from {line}') + logger.warning(f"Could not get binary/version from {line}") continue bin_name, bin_version = m.group(1), m.group(2) binaries_to_version[bin_name] = bin_version diff --git a/pytest_tests/testsuites/session_token/test_object_session_token.py b/pytest_tests/testsuites/session_token/test_object_session_token.py index 6fabb4c7..8babc39d 100644 --- a/pytest_tests/testsuites/session_token/test_object_session_token.py +++ b/pytest_tests/testsuites/session_token/test_object_session_token.py @@ -1,5 +1,6 @@ import random +import allure import pytest from common import COMPLEX_OBJ_SIZE, NEOFS_NETMAP_DICT, SIMPLE_OBJ_SIZE from grpc_responses import SESSION_NOT_FOUND @@ -16,8 +17,6 @@ from python_keywords.neofs_verbs import ( from python_keywords.session_token import create_session_token from python_keywords.utility_keywords import generate_file -import allure - @allure.title("Test Object Operations with Session Token") @pytest.mark.session_token diff --git a/robot/resources/lib/python_keywords/acl.py b/robot/resources/lib/python_keywords/acl.py index 678401a9..14e74ed3 100644 --- a/robot/resources/lib/python_keywords/acl.py +++ b/robot/resources/lib/python_keywords/acl.py @@ -15,41 +15,41 @@ from cli_utils import NeofsCli from common import ASSETS_DIR, NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_CONFIG from data_formatters import get_wallet_public_key -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") EACL_LIFETIME = 100500 NEOFS_CONTRACT_CACHE_TIMEOUT = 30 class EACLOperation(Enum): - PUT = 'put' - GET = 'get' - HEAD = 'head' - GET_RANGE = 'getrange' - GET_RANGE_HASH = 'getrangehash' - SEARCH = 'search' - DELETE = 'delete' + PUT = "put" + GET = "get" + HEAD = "head" + GET_RANGE = "getrange" + GET_RANGE_HASH = "getrangehash" + SEARCH = "search" + DELETE = "delete" class EACLAccess(Enum): - ALLOW = 'allow' - DENY = 'deny' + ALLOW = "allow" + DENY = "deny" class EACLRole(Enum): - OTHERS = 'others' - USER = 'user' - SYSTEM = 'system' + OTHERS = "others" + USER = "user" + SYSTEM = "system" class EACLHeaderType(Enum): - REQUEST = 'req' # Filter request headers - OBJECT = 'obj' # Filter object headers - SERVICE = 'SERVICE' # Filter service headers. These are not processed by NeoFS nodes and exist for service use only + REQUEST = "req" # Filter request headers + OBJECT = "obj" # Filter object headers + SERVICE = "SERVICE" # Filter service headers. These are not processed by NeoFS nodes and exist for service use only class EACLMatchType(Enum): - STRING_EQUAL = '=' # Return true if strings are equal - STRING_NOT_EQUAL = '!=' # Return true if strings are different + STRING_EQUAL = "=" # Return true if strings are equal + STRING_NOT_EQUAL = "!=" # Return true if strings are different @dataclass @@ -60,7 +60,12 @@ class EACLFilter: value: Optional[str] = None def to_dict(self) -> Dict[str, Any]: - return {'headerType': self.header_type, 'matchType': self.match_type, 'key': self.key, 'value': self.value} + return { + "headerType": self.header_type, + "matchType": self.match_type, + "key": self.key, + "value": self.value, + } @dataclass @@ -68,10 +73,16 @@ class EACLFilters: filters: Optional[List[EACLFilter]] = None def __str__(self): - return ','.join( - [f'{filter.header_type.value}:{filter.key}{filter.match_type.value}{filter.value}' - for filter in self.filters] - ) if self.filters else [] + return ( + ",".join( + [ + f"{filter.header_type.value}:{filter.key}{filter.match_type.value}{filter.value}" + for filter in self.filters + ] + ) + if self.filters + else [] + ) @dataclass @@ -87,15 +98,23 @@ class EACLRule: filters: Optional[EACLFilters] = None def to_dict(self) -> Dict[str, Any]: - return {'Operation': self.operation, 'Access': self.access, 'Role': self.role, - 'Filters': self.filters or []} + return { + "Operation": self.operation, + "Access": self.access, + "Role": self.role, + "Filters": self.filters or [], + } def __str__(self): - role = self.role.value if isinstance(self.role, EACLRole) else f'pubkey:{get_wallet_public_key(self.role, "")}' + role = ( + self.role.value + if isinstance(self.role, EACLRole) + else f'pubkey:{get_wallet_public_key(self.role, "")}' + ) return f'{self.access.value} {self.operation.value} {self.filters or ""} {role}' -@allure.title('Get extended ACL') +@allure.title("Get extended ACL") def get_eacl(wallet_path: str, cid: str) -> Optional[str]: cli = NeofsCli(config=WALLET_CONFIG) try: @@ -104,16 +123,21 @@ def get_eacl(wallet_path: str, cid: str) -> Optional[str]: logger.info("Extended ACL table is not set for this container") logger.info(f"Got exception while getting eacl: {exc}") return None - if 'extended ACL table is not set for this container' in output: + if "extended ACL table is not set for this container" in output: return None return output -@allure.title('Set extended ACL') +@allure.title("Set extended ACL") def set_eacl(wallet_path: str, cid: str, eacl_table_path: str) -> None: cli = NeofsCli(config=WALLET_CONFIG, timeout=60) - cli.container.set_eacl(wallet=wallet_path, rpc_endpoint=NEOFS_ENDPOINT, cid=cid, table=eacl_table_path, - await_mode=True) + cli.container.set_eacl( + wallet=wallet_path, + rpc_endpoint=NEOFS_ENDPOINT, + cid=cid, + table=eacl_table_path, + await_mode=True, + ) def _encode_cid_for_eacl(cid: str) -> str: @@ -125,14 +149,16 @@ def create_eacl(cid: str, rules_list: List[EACLRule]) -> str: table_file_path = f"{os.getcwd()}/{ASSETS_DIR}/eacl_table_{str(uuid.uuid4())}.json" NeofsCli().acl.extended_create(cid=cid, out=table_file_path, rule=rules_list) - with open(table_file_path, 'r') as file: + with open(table_file_path, "r") as file: table_data = file.read() logger.info(f"Generated eACL:\n{table_data}") return table_file_path -def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRule, EACLPubKey]]) -> str: +def form_bearertoken_file( + wif: str, cid: str, eacl_rule_list: List[Union[EACLRule, EACLPubKey]] +) -> str: """ This function fetches eACL for given on behalf of , then extends it with filters taken from , signs @@ -144,50 +170,29 @@ def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRul eacl = get_eacl(wif, cid) json_eacl = dict() if eacl: - eacl = eacl.replace('eACL: ', '').split('Signature')[0] + eacl = eacl.replace("eACL: ", "").split("Signature")[0] json_eacl = json.loads(eacl) logger.info(json_eacl) eacl_result = { - "body": - { - "eaclTable": - { - "containerID": - { - "value": enc_cid - }, - "records": [] - }, - "lifetime": - { - "exp": EACL_LIFETIME, - "nbf": "1", - "iat": "0" - } - } + "body": { + "eaclTable": {"containerID": {"value": enc_cid}, "records": []}, + "lifetime": {"exp": EACL_LIFETIME, "nbf": "1", "iat": "0"}, + } } - assert eacl_rules, 'Got empty eacl_records list' + assert eacl_rules, "Got empty eacl_records list" for rule in eacl_rule_list: op_data = { "operation": rule.operation.value.upper(), "action": rule.access.value.upper(), "filters": rule.filters or [], - "targets": [] + "targets": [], } if isinstance(rule.role, EACLRole): - op_data['targets'] = [ - { - "role": rule.role.value.upper() - } - ] + op_data["targets"] = [{"role": rule.role.value.upper()}] elif isinstance(rule.role, EACLPubKey): - op_data['targets'] = [ - { - 'keys': rule.role.keys - } - ] + op_data["targets"] = [{"keys": rule.role.keys}] eacl_result["body"]["eaclTable"]["records"].append(op_data) @@ -196,7 +201,7 @@ def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRul for record in json_eacl["records"]: eacl_result["body"]["eaclTable"]["records"].append(record) - with open(file_path, 'w', encoding='utf-8') as eacl_file: + with open(file_path, "w", encoding="utf-8") as eacl_file: json.dump(eacl_result, eacl_file, ensure_ascii=False, indent=4) logger.info(f"Got these extended ACL records: {eacl_result}") @@ -206,17 +211,17 @@ def form_bearertoken_file(wif: str, cid: str, eacl_rule_list: List[Union[EACLRul def eacl_rules(access: str, verbs: list, user: str) -> list[str]: """ - This function creates a list of eACL rules. - Args: - access (str): identifies if the following operation(s) - is allowed or denied - verbs (list): a list of operations to set rules for - user (str): a group of users (user/others) or a wallet of - a certain user for whom rules are set - Returns: - (list): a list of eACL rules + This function creates a list of eACL rules. + Args: + access (str): identifies if the following operation(s) + is allowed or denied + verbs (list): a list of operations to set rules for + user (str): a group of users (user/others) or a wallet of + a certain user for whom rules are set + Returns: + (list): a list of eACL rules """ - if user not in ('others', 'user'): + if user not in ("others", "user"): pubkey = get_wallet_public_key(user, wallet_password="") user = f"pubkey:{pubkey}" @@ -229,13 +234,13 @@ def eacl_rules(access: str, verbs: list, user: str) -> list[str]: def sign_bearer_token(wallet_path: str, eacl_rules_file: str) -> None: cmd = ( - f'{NEOFS_CLI_EXEC} util sign bearer-token --from {eacl_rules_file} ' - f'--to {eacl_rules_file} --wallet {wallet_path} --config {WALLET_CONFIG} --json' + f"{NEOFS_CLI_EXEC} util sign bearer-token --from {eacl_rules_file} " + f"--to {eacl_rules_file} --wallet {wallet_path} --config {WALLET_CONFIG} --json" ) _cmd_run(cmd) -@allure.title('Wait for eACL cache expired') +@allure.title("Wait for eACL cache expired") def wait_for_cache_expired(): sleep(NEOFS_CONTRACT_CACHE_TIMEOUT) return diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/adm.py b/robot/resources/lib/python_keywords/cli_utils/adm/adm.py index a5914693..a6a3a8f6 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/adm.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/adm.py @@ -6,8 +6,8 @@ from .completion import NeofsAdmCompletion from .config import NeofsAdmConfig from .gendoc import NeofsAdmGenDoc from .morph import NeofsAdmMorph -from .subnet import NeofsAdmMorphSubnet from .storage_config import NeofsAdmStorageConfig +from .subnet import NeofsAdmMorphSubnet from .version import NeofsAdmVersion @@ -23,14 +23,27 @@ class NeofsAdm: storage_config: Optional[NeofsAdmStorageConfig] = None version: Optional[NeofsAdmVersion] = None - def __init__(self, neofs_adm_exec_path: Optional[str] = None, config_file: Optional[str] = None, timeout: int = 30): + def __init__( + self, + neofs_adm_exec_path: Optional[str] = None, + config_file: Optional[str] = None, + timeout: int = 30, + ): self.config_file = config_file self.neofs_adm_exec_path = neofs_adm_exec_path or NEOFS_ADM_EXEC - self.completion = NeofsAdmCompletion(self.neofs_adm_exec_path, timeout=timeout, config=config_file) + self.completion = NeofsAdmCompletion( + self.neofs_adm_exec_path, timeout=timeout, config=config_file + ) self.config = NeofsAdmConfig(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.gendoc = NeofsAdmGenDoc(self.neofs_adm_exec_path, timeout=timeout, config=config_file) self.morph = NeofsAdmMorph(self.neofs_adm_exec_path, timeout=timeout, config=config_file) - self.subnet = NeofsAdmMorphSubnet(self.neofs_adm_exec_path, timeout=timeout, config=config_file) - self.storage_config = NeofsAdmStorageConfig(self.neofs_adm_exec_path, timeout=timeout, config=config_file) - self.version = NeofsAdmVersion(self.neofs_adm_exec_path, timeout=timeout, config=config_file) + self.subnet = NeofsAdmMorphSubnet( + self.neofs_adm_exec_path, timeout=timeout, config=config_file + ) + self.storage_config = NeofsAdmStorageConfig( + self.neofs_adm_exec_path, timeout=timeout, config=config_file + ) + self.version = NeofsAdmVersion( + self.neofs_adm_exec_path, timeout=timeout, config=config_file + ) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/completion.py b/robot/resources/lib/python_keywords/cli_utils/adm/completion.py index 9bd42d8a..11f0bc09 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/completion.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/completion.py @@ -27,4 +27,4 @@ class NeofsAdmCompletion(NeofsCliCommand): str: Command string """ - return self._execute('completion ' + completion_type.value) + return self._execute("completion " + completion_type.value) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/completion_type.py b/robot/resources/lib/python_keywords/cli_utils/adm/completion_type.py index 9bf2ba8b..7cd07028 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/completion_type.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/completion_type.py @@ -2,7 +2,7 @@ from enum import Enum class CompletionType(Enum): - BASH = 'bash' - ZHS = 'zsh' - FISH = 'fish' - POWERSHELL = 'powershell' + BASH = "bash" + ZHS = "zsh" + FISH = "fish" + POWERSHELL = "powershell" diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/config.py b/robot/resources/lib/python_keywords/cli_utils/adm/config.py index 3c7af171..cf5b06a8 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/config.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/config.py @@ -2,7 +2,7 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsAdmConfig(NeofsCliCommand): - def init(self, path: str = '~/.neofs/adm/config.yml') -> str: + def init(self, path: str = "~/.neofs/adm/config.yml") -> str: """Initialize basic neofs-adm configuration file. Args: @@ -14,6 +14,6 @@ class NeofsAdmConfig(NeofsCliCommand): """ return self._execute( - 'config init', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "config init", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/gendoc.py b/robot/resources/lib/python_keywords/cli_utils/adm/gendoc.py index 28c52dac..9ed00402 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/gendoc.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/gendoc.py @@ -4,7 +4,9 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsAdmGenDoc(NeofsCliCommand): - def get(self, doc_file: str, depth: int = 1, doc_type: str = 'md', extension: Optional[str] = None) -> str: + def get( + self, doc_file: str, depth: int = 1, doc_type: str = "md", extension: Optional[str] = None + ) -> str: """Generate documentation for this command. If the template is not provided, builtin cobra generator is used and each subcommand is placed in a separate file in the same directory. @@ -29,6 +31,10 @@ class NeofsAdmGenDoc(NeofsCliCommand): """ return self._execute( - f'gendoc {doc_file}', - **{param: param_value for param, param_value in locals().items() if param not in ['self', 'doc_file']} + f"gendoc {doc_file}", + **{ + param: value + for param, value in locals().items() + if param not in ["self", "doc_file"] + }, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/morph.py b/robot/resources/lib/python_keywords/cli_utils/adm/morph.py index 458b0438..0deee522 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/morph.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/morph.py @@ -4,9 +4,16 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsAdmMorph(NeofsCliCommand): - def deposit_notary(self, rpc_endpoint: str, account: str, gas: str, storage_wallet: Optional[str] = None, - till: Optional[str] = None) -> str: - """Deposit GAS for notary service. + def deposit_notary( + self, + rpc_endpoint: str, + account: str, + gas: str, + storage_wallet: Optional[str] = None, + till: Optional[str] = None, + ) -> str: + """ + Deposit GAS for notary service. Args: account (str): wallet account address @@ -15,19 +22,24 @@ class NeofsAdmMorph(NeofsCliCommand): storage_wallet (str): path to storage node wallet till (str): notary deposit duration in blocks - Returns: str: Command string - """ return self._execute( - 'morph deposit-notary', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph deposit-notary", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def dump_balances(self, rpc_endpoint: str, alphabet: Optional[str] = None, proxy: Optional[str] = None, - script_hash: Optional[str] = None, storage: Optional[str] = None) -> str: - """Dump GAS balances + def dump_balances( + self, + rpc_endpoint: str, + alphabet: Optional[str] = None, + proxy: Optional[str] = None, + script_hash: Optional[str] = None, + storage: Optional[str] = None, + ) -> str: + """ + Dump GAS balances. Args: alphabet (str): dump balances of alphabet contracts @@ -36,35 +48,38 @@ class NeofsAdmMorph(NeofsCliCommand): script_hash (str): use script-hash format for addresses storage (str): dump balances of storage nodes from the current netmap - Returns: str: Command string - """ return self._execute( - 'morph dump-balances', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph dump-balances", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) def dump_config(self, rpc_endpoint: str) -> str: - """Section for morph network configuration commands. + """ + Dump NeoFS network config. Args: rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph dump-config', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph dump-config", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def dump_containers(self, rpc_endpoint: str, cid: Optional[str] = None, container_contract: Optional[str] = None, - dump: Optional[str] = None) -> str: - """Dump NeoFS containers to file. + def dump_containers( + self, + rpc_endpoint: str, + cid: Optional[str] = None, + container_contract: Optional[str] = None, + dump: Optional[str] = None, + ) -> str: + """ + Dump NeoFS containers to file. Args: cid (str): containers to dump @@ -72,70 +87,73 @@ class NeofsAdmMorph(NeofsCliCommand): dump (str): file where to save dumped containers rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph dump-containers', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph dump-containers", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) def dump_hashes(self, rpc_endpoint: str) -> str: - """Dump deployed contract hashes. + """ + Dump deployed contract hashes. Args: rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph dump-hashes', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph dump-hashes", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def force_new_epoch(self, rpc_endpoint: Optional[str] = None, alphabet: Optional[str] = None) -> str: - """Create new NeoFS epoch event in the side chain + def force_new_epoch( + self, rpc_endpoint: Optional[str] = None, alphabet: Optional[str] = None + ) -> str: + """ + Create new NeoFS epoch event in the side chain Args: alphabet (str): path to alphabet wallets dir rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph force-new-epoch', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph force-new-epoch", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) def generate_alphabet(self, rpc_endpoint: str, alphabet_wallets: str, size: int = 7) -> str: - """Generate alphabet wallets for consensus nodes of the morph network + """ + Generate alphabet wallets for consensus nodes of the morph network. Args: alphabet_wallets (str): path to alphabet wallets dir size (int): amount of alphabet wallets to generate (default 7) rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph generate-alphabet', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph generate-alphabet", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def generate_storage_wallet(self, rpc_endpoint: str, alphabet_wallets: str, storage_wallet: str, - initial_gas: Optional[str] = None) -> str: - """Generate storage node wallet for the morph network + def generate_storage_wallet( + self, + rpc_endpoint: str, + alphabet_wallets: str, + storage_wallet: str, + initial_gas: Optional[str] = None, + ) -> str: + """ + Generate storage node wallet for the morph network. Args: alphabet_wallets (str): path to alphabet wallets dir @@ -143,21 +161,29 @@ class NeofsAdmMorph(NeofsCliCommand): rpc_endpoint (str): N3 RPC node endpoint storage_wallet (str): path to new storage node wallet - Returns: str: Command string - """ return self._execute( - 'morph generate-storage-wallet', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph generate-storage-wallet", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def init(self, rpc_endpoint: str, alphabet_wallets: str, contracts: str, protocol: str, - container_alias_fee: int = 500, container_fee: int = 1000, epoch_duration: int = 240, - homomorphic_disabled: bool = False, local_dump: Optional[str] = None, max_object_size: int = 67108864 - ) -> str: - """Section for morph network configuration commands. + def init( + self, + rpc_endpoint: str, + alphabet_wallets: str, + contracts: str, + protocol: str, + container_alias_fee: int = 500, + container_fee: int = 1000, + epoch_duration: int = 240, + homomorphic_disabled: bool = False, + local_dump: Optional[str] = None, + max_object_size: int = 67108864, + ) -> str: + """ + Initialize side chain network with smart-contracts and network settings. Args: alphabet_wallets (str): path to alphabet wallets dir @@ -172,19 +198,23 @@ class NeofsAdmMorph(NeofsCliCommand): protocol (str): path to the consensus node configuration rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph init', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph init", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def refill_gas(self, rpc_endpoint: str, alphabet_wallets: str, storage_wallet: str, gas: Optional[str] = None - ) -> str: - """Refill GAS of storage node's wallet in the morph network + def refill_gas( + self, + rpc_endpoint: str, + alphabet_wallets: str, + storage_wallet: str, + gas: Optional[str] = None, + ) -> str: + """ + Refill GAS of storage node's wallet in the morph network. Args: alphabet_wallets (str): path to alphabet wallets dir @@ -192,18 +222,19 @@ class NeofsAdmMorph(NeofsCliCommand): rpc_endpoint (str): N3 RPC node endpoint storage_wallet (str): path to new storage node wallet - Returns: str: Command string - """ return self._execute( - 'morph refill-gas', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph refill-gas", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def restore_containers(self, rpc_endpoint: str, alphabet_wallets: str, cid: str, dump: str) -> str: - """Restore NeoFS containers from file. + def restore_containers( + self, rpc_endpoint: str, alphabet_wallets: str, cid: str, dump: str + ) -> str: + """ + Restore NeoFS containers from file. Args: alphabet_wallets (str): path to alphabet wallets dir @@ -211,19 +242,24 @@ class NeofsAdmMorph(NeofsCliCommand): dump (str): file to restore containers from rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph restore-containers', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph restore-containers", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def set_policy(self, rpc_endpoint: str, alphabet_wallets: str, exec_fee_factor: Optional[int] = None, - storage_price: Optional[int] = None, fee_per_byte: Optional[int] = None) -> str: - """Set global policy values + def set_policy( + self, + rpc_endpoint: str, + alphabet_wallets: str, + exec_fee_factor: Optional[int] = None, + storage_price: Optional[int] = None, + fee_per_byte: Optional[int] = None, + ) -> str: + """ + Set global policy values. Args: alphabet_wallets (str): path to alphabet wallets dir @@ -232,28 +268,30 @@ class NeofsAdmMorph(NeofsCliCommand): fee_per_byte (int): FeePerByte= rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ - non_param_attribute = '' + non_param_attribute = "" if exec_fee_factor: - non_param_attribute += f'ExecFeeFactor={exec_fee_factor} ' + non_param_attribute += f"ExecFeeFactor={exec_fee_factor} " if storage_price: - non_param_attribute += f'StoragePrice={storage_price} ' + non_param_attribute += f"StoragePrice={storage_price} " if fee_per_byte: - non_param_attribute += f'FeePerByte={fee_per_byte} ' + non_param_attribute += f"FeePerByte={fee_per_byte} " return self._execute( - f'morph restore-containers {non_param_attribute}', - **{param: param_value for param, param_value in locals().items() if param not in [ - 'self', 'exec_fee_factor', 'storage_price', 'fee_per_byte' - ]} + f"morph restore-containers {non_param_attribute}", + **{ + param: value + for param, value in locals().items() + if param not in ["self", "exec_fee_factor", "storage_price", "fee_per_byte"] + }, ) - def update_contracts(self, rpc_endpoint: str, alphabet_wallets: str, contracts: Optional[str] = None - ) -> str: - """Update NeoFS contracts. + def update_contracts( + self, rpc_endpoint: str, alphabet_wallets: str, contracts: Optional[str] = None + ) -> str: + """ + Update NeoFS contracts. Args: alphabet_wallets (str): path to alphabet wallets dir @@ -261,12 +299,10 @@ class NeofsAdmMorph(NeofsCliCommand): (default fetched from latest github release) rpc_endpoint (str): N3 RPC node endpoint - Returns: str: Command string - """ return self._execute( - 'morph update-contracts', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph update-contracts", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/storage_config.py b/robot/resources/lib/python_keywords/cli_utils/adm/storage_config.py index 502e21fe..b241ecbf 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/storage_config.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/storage_config.py @@ -15,6 +15,6 @@ class NeofsAdmStorageConfig(NeofsCliCommand): """ return self._execute( - 'storage-config', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "storage-config", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/subnet.py b/robot/resources/lib/python_keywords/cli_utils/adm/subnet.py index bb7615ae..8ba1d843 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/subnet.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/subnet.py @@ -19,29 +19,31 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): """ return self._execute( - 'morph subnet create', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet create", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) def get(self, rpc_endpoint: str, subnet: str) -> str: - """Read information about the NeoFS subnet. + """ + Read information about the NeoFS subnet. Args: rpc_endpoint (str): N3 RPC node endpoint subnet (str): ID of the subnet to read - Returns: str: Command string - """ return self._execute( - 'morph subnet get', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet get", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def remove(self, rpc_endpoint: str, wallet: str, subnet: str, address: Optional[str] = None) -> str: - """Remove NeoFS subnet. + def remove( + self, rpc_endpoint: str, wallet: str, subnet: str, address: Optional[str] = None + ) -> str: + """ + Remove NeoFS subnet. Args: address (str): Address in the wallet, optional @@ -49,19 +51,26 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet remove', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet remove", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def admin_add(self, rpc_endpoint: str, wallet: str, admin: str, subnet: str, client: Optional[str] = None, - group: Optional[str] = None, address: Optional[str] = None) -> str: - """Add admin to the NeoFS subnet. + def admin_add( + self, + rpc_endpoint: str, + wallet: str, + admin: str, + subnet: str, + client: Optional[str] = None, + group: Optional[str] = None, + address: Optional[str] = None, + ) -> str: + """ + Add admin to the NeoFS subnet. Args: address (str): Address in the wallet, optional @@ -72,19 +81,25 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet admin add', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet admin add", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def admin_remove(self, rpc_endpoint: str, wallet: str, admin: str, subnet: str, client: Optional[str] = None, - address: Optional[str] = None) -> str: - """Remove admin of the NeoFS subnet. + def admin_remove( + self, + rpc_endpoint: str, + wallet: str, + admin: str, + subnet: str, + client: Optional[str] = None, + address: Optional[str] = None, + ) -> str: + """ + Remove admin of the NeoFS subnet. Args: address (str): Address in the wallet, optional @@ -94,19 +109,25 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet admin remove', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet admin remove", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def client_add(self, rpc_endpoint: str, wallet: str, subnet: str, client: Optional[str] = None, - group: Optional[str] = None, address: Optional[str] = None) -> str: - """Add client to the NeoFS subnet. + def client_add( + self, + rpc_endpoint: str, + wallet: str, + subnet: str, + client: Optional[str] = None, + group: Optional[str] = None, + address: Optional[str] = None, + ) -> str: + """ + Add client to the NeoFS subnet. Args: address (str): Address in the wallet, optional @@ -116,19 +137,25 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet client add', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet client add", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def client_remove(self, rpc_endpoint: str, wallet: str, client: str, group: str, subnet: str, - address: Optional[str] = None) -> str: - """Remove client of the NeoFS subnet. + def client_remove( + self, + rpc_endpoint: str, + wallet: str, + client: str, + group: str, + subnet: str, + address: Optional[str] = None, + ) -> str: + """ + Remove client of the NeoFS subnet. Args: address (str): Address in the wallet, optional @@ -138,18 +165,17 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet client remove', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet client remove", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) def node_add(self, rpc_endpoint: str, wallet: str, node: str, subnet: str) -> str: - """Add node to the NeoFS subnet. + """ + Add node to the NeoFS subnet. Args: node (str): Hex-encoded public key of the node @@ -157,18 +183,17 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet node add', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet node add", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) def node_remove(self, rpc_endpoint: str, wallet: str, node: str, subnet: str) -> str: - """Remove node from the NeoFS subnet. + """ + Remove node from the NeoFS subnet. Args: node (str): Hex-encoded public key of the node @@ -176,12 +201,10 @@ class NeofsAdmMorphSubnet(NeofsCliCommand): subnet (str): ID of the subnet to read wallet (str): Path to file with wallet - Returns: str: Command string - """ return self._execute( - 'morph subnet node remove', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "morph subnet node remove", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/adm/version.py b/robot/resources/lib/python_keywords/cli_utils/adm/version.py index 593aa1fa..d307e7a8 100644 --- a/robot/resources/lib/python_keywords/cli_utils/adm/version.py +++ b/robot/resources/lib/python_keywords/cli_utils/adm/version.py @@ -9,4 +9,4 @@ class NeofsAdmVersion(NeofsCliCommand): str: Command string """ - return self._execute('', version=True) + return self._execute("", version=True) diff --git a/robot/resources/lib/python_keywords/cli_utils/cli/accounting.py b/robot/resources/lib/python_keywords/cli_utils/cli/accounting.py index 52b9f439..de2bb907 100644 --- a/robot/resources/lib/python_keywords/cli_utils/cli/accounting.py +++ b/robot/resources/lib/python_keywords/cli_utils/cli/accounting.py @@ -25,9 +25,5 @@ class NeofsCliAccounting(NeofsCliCommand): """ return self._execute( "accounting balance", - **{ - param: param_value - for param, param_value in locals().items() - if param not in ["self"] - } + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/cli/acl.py b/robot/resources/lib/python_keywords/cli_utils/cli/acl.py index 4902d62e..eaa5cee6 100644 --- a/robot/resources/lib/python_keywords/cli_utils/cli/acl.py +++ b/robot/resources/lib/python_keywords/cli_utils/cli/acl.py @@ -4,7 +4,9 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsCliACL(NeofsCliCommand): - def extended_create(self, cid: str, out: str, file: Optional[str] = None, rule: Optional[list] = None) -> str: + def extended_create( + self, cid: str, out: str, file: Optional[str] = None, rule: Optional[list] = None + ) -> str: """Create extended ACL from the text representation. @@ -42,6 +44,6 @@ class NeofsCliACL(NeofsCliCommand): """ return self._execute( - 'acl extended create', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "acl extended create", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/cli/container.py b/robot/resources/lib/python_keywords/cli_utils/cli/container.py index 292f51c4..239aa8c4 100644 --- a/robot/resources/lib/python_keywords/cli_utils/cli/container.py +++ b/robot/resources/lib/python_keywords/cli_utils/cli/container.py @@ -4,12 +4,25 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsCliContainer(NeofsCliCommand): - def create(self, rpc_endpoint: str, wallet: str, address: Optional[str] = None, attributes: Optional[dict] = None, - basic_acl: Optional[str] = None, await_mode: bool = False, disable_timestamp: bool = False, - name: Optional[str] = None, nonce: Optional[str] = None, policy: Optional[str] = None, - session: Optional[str] = None, subnet: Optional[str] = None, ttl: Optional[int] = None, - xhdr: Optional[dict] = None) -> str: - """Create a new container and register it in the NeoFS. + def create( + self, + rpc_endpoint: str, + wallet: str, + address: Optional[str] = None, + attributes: Optional[dict] = None, + basic_acl: Optional[str] = None, + await_mode: bool = False, + disable_timestamp: bool = False, + name: Optional[str] = None, + nonce: Optional[str] = None, + policy: Optional[str] = None, + session: Optional[str] = None, + subnet: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Create a new container and register it in the NeoFS. It will be stored in the sidechain when the Inner Ring accepts it. Args: @@ -31,17 +44,26 @@ class NeofsCliContainer(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'container create', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container create", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def delete(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, await_mode: bool = False, - session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None, - force: bool = False) -> str: - """Delete an existing container. + def delete( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + address: Optional[str] = None, + await_mode: bool = False, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + force: bool = False, + ) -> str: + """ + Delete an existing container. Only the owner of the container has permission to remove the container. Args: @@ -55,20 +77,29 @@ class NeofsCliContainer(NeofsCliCommand): wallet: WIF (NEP-2) string or path to the wallet or binary key xhdr: Request X-Headers in form of Key=Value - Returns: str: Command string """ return self._execute( - 'container delete', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container delete", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def get(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, await_mode: bool = False, - to: Optional[str] = None, json_mode: bool = False, ttl: Optional[int] = None, - xhdr: Optional[dict] = None) -> str: - """Get container field info + def get( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + address: Optional[str] = None, + await_mode: bool = False, + to: Optional[str] = None, + json_mode: bool = False, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Get container field info. Args: address: address of wallet account @@ -83,18 +114,26 @@ class NeofsCliContainer(NeofsCliCommand): Returns: str: Command string - """ - return self._execute( - 'container get', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container get", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def get_eacl(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, - await_mode: bool = False, to: Optional[str] = None, session: Optional[str] = None, - ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: - """Get extended ACL talbe of container + def get_eacl( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + address: Optional[str] = None, + await_mode: bool = False, + to: Optional[str] = None, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Get extended ACL table of container. Args: address: address of wallet account @@ -112,13 +151,22 @@ class NeofsCliContainer(NeofsCliCommand): """ return self._execute( - 'container get-eacl', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container get-eacl", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def list(self, rpc_endpoint: str, wallet: str, address: Optional[str] = None, - owner: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None, **params) -> str: - """List all created containers + def list( + self, + rpc_endpoint: str, + wallet: str, + address: Optional[str] = None, + owner: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + **params, + ) -> str: + """ + List all created containers. Args: address: address of wallet account @@ -130,16 +178,23 @@ class NeofsCliContainer(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'container list', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container list", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def list_objects(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, - ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: - """List existing objects in container + def list_objects( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + address: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + List existing objects in container. Args: address: address of wallet account @@ -151,18 +206,26 @@ class NeofsCliContainer(NeofsCliCommand): Returns: str: Command string - """ - return self._execute( - 'container list-objects', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container list-objects", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def set_eacl(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, - await_mode: bool = False, table: Optional[str] = None, session: Optional[str] = None, - ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: - """Set a new extended ACL table for the container. + def set_eacl( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + address: Optional[str] = None, + await_mode: bool = False, + table: Optional[str] = None, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Set a new extended ACL table for the container. Container ID in the EACL table will be substituted with the ID from the CLI. Args: @@ -178,9 +241,8 @@ class NeofsCliContainer(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'container set-eacl', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "container set-eacl", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/cli/object.py b/robot/resources/lib/python_keywords/cli_utils/cli/object.py index 085f722e..03e679ff 100644 --- a/robot/resources/lib/python_keywords/cli_utils/cli/object.py +++ b/robot/resources/lib/python_keywords/cli_utils/cli/object.py @@ -4,10 +4,20 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsCliObject(NeofsCliCommand): - def delete(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, - bearer: Optional[str] = None, session: Optional[str] = None, ttl: Optional[int] = None, - xhdr: Optional[dict] = None) -> str: - """Delete object from NeoFS + def delete( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + oid: str, + address: Optional[str] = None, + bearer: Optional[str] = None, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Delete object from NeoFS. Args: address: address of wallet account @@ -22,18 +32,30 @@ class NeofsCliObject(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'object delete', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object delete", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def get(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, - bearer: Optional[str] = None, file: Optional[str] = None, - header: Optional[str] = None, no_progress: bool = False, raw: bool = False, - session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: - """Get object from NeoFS + def get( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + oid: str, + address: Optional[str] = None, + bearer: Optional[str] = None, + file: Optional[str] = None, + header: Optional[str] = None, + no_progress: bool = False, + raw: bool = False, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Get object from NeoFS. Args: address: address of wallet account @@ -52,17 +74,28 @@ class NeofsCliObject(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'object get', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object get", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def hash(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, - bearer: Optional[str] = None, range: Optional[str] = None, salt: Optional[str] = None, - ttl: Optional[int] = None, hash_type: Optional[str] = None, xhdr: Optional[dict] = None) -> str: - """Get object hash + def hash( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + oid: str, + address: Optional[str] = None, + bearer: Optional[str] = None, + range: Optional[str] = None, + salt: Optional[str] = None, + ttl: Optional[int] = None, + hash_type: Optional[str] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Get object hash. Args: address: address of wallet account @@ -79,78 +112,114 @@ class NeofsCliObject(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'object hash', - **{param: param_value for param, param_value in locals().items() if param not in ['self', 'params']} + "object hash", + **{ + param: value for param, value in locals().items() if param not in ["self", "params"] + }, ) - def head(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, address: Optional[str] = None, - bearer: Optional[str] = None, file: Optional[str] = None, - json_mode: bool = False, main_only: bool = False, proto: bool = False, raw: bool = False, - session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: - """Get object header + def head( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + oid: str, + address: Optional[str] = None, + bearer: Optional[str] = None, + file: Optional[str] = None, + json_mode: bool = False, + main_only: bool = False, + proto: bool = False, + raw: bool = False, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Get object header. - Args: - address: address of wallet account - bearer: File with signed JSON or binary encoded bearer token - cid: Container ID - file: File to write object payload to. Default: stdout. - json_mode: Marshal output in JSON - main_only: Return only main fields - oid: Object ID - proto: Marshal output in Protobuf - raw: Set raw request option - rpc_endpoint: remote node address (as 'multiaddr' or ':') - session: path to a JSON-encoded container session token - ttl: TTL value in request meta header (default 2) - wallet: WIF (NEP-2) string or path to the wallet or binary key - xhdr: Request X-Headers in form of Key=Value + Args: + address: address of wallet account + bearer: File with signed JSON or binary encoded bearer token + cid: Container ID + file: File to write object payload to. Default: stdout. + json_mode: Marshal output in JSON + main_only: Return only main fields + oid: Object ID + proto: Marshal output in Protobuf + raw: Set raw request option + rpc_endpoint: remote node address (as 'multiaddr' or ':') + session: path to a JSON-encoded container session token + ttl: TTL value in request meta header (default 2) + wallet: WIF (NEP-2) string or path to the wallet or binary key + xhdr: Request X-Headers in form of Key=Value - - Returns: - str: Command string - - """ + Returns: + str: Command string + """ return self._execute( - 'object head', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object head", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def lock(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, lifetime: int, address: Optional[str] = None, - bearer: Optional[str] = None, session: Optional[str] = None, ttl: Optional[int] = None, - xhdr: Optional[dict] = None) -> str: - """Lock object in container + def lock( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + oid: str, + lifetime: int, + address: Optional[str] = None, + bearer: Optional[str] = None, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Lock object in container. - Args: - address: address of wallet account - bearer: File with signed JSON or binary encoded bearer token - cid: Container ID - oid: Object ID - lifetime: Object lifetime - rpc_endpoint: remote node address (as 'multiaddr' or ':') - session: path to a JSON-encoded container session token - ttl: TTL value in request meta header (default 2) - wallet: WIF (NEP-2) string or path to the wallet or binary key - xhdr: Request X-Headers in form of Key=Value + Args: + address: address of wallet account + bearer: File with signed JSON or binary encoded bearer token + cid: Container ID + oid: Object ID + lifetime: Object lifetime + rpc_endpoint: remote node address (as 'multiaddr' or ':') + session: path to a JSON-encoded container session token + ttl: TTL value in request meta header (default 2) + wallet: WIF (NEP-2) string or path to the wallet or binary key + xhdr: Request X-Headers in form of Key=Value - - Returns: - str: Command string - - """ + Returns: + str: Command string + """ return self._execute( - 'object lock', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object lock", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def put(self, rpc_endpoint: str, wallet: str, cid: str, file: str, address: Optional[str] = None, - attributes: Optional[dict] = None, bearer: Optional[str] = None, disable_filename: bool = False, - disable_timestamp: bool = False, expire_at: Optional[int] = None, no_progress: bool = False, - notify: Optional[str] = None, session: Optional[str] = None, ttl: Optional[int] = None, - xhdr: Optional[dict] = None) -> str: - """Put object to NeoFS + def put( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + file: str, + address: Optional[str] = None, + attributes: Optional[dict] = None, + bearer: Optional[str] = None, + disable_filename: bool = False, + disable_timestamp: bool = False, + expire_at: Optional[int] = None, + no_progress: bool = False, + notify: Optional[str] = None, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Put object to NeoFS. Args: address: address of wallet account @@ -171,69 +240,90 @@ class NeofsCliObject(NeofsCliCommand): Returns: str: Command string - """ return self._execute( - 'object put', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object put", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def range(self, rpc_endpoint: str, wallet: str, cid: str, oid: str, range: str, address: Optional[str] = None, - bearer: Optional[str] = None, file: Optional[str] = None, json_mode: bool = False, raw: bool = False, - session: Optional[str] = None, ttl: Optional[int] = None, xhdr: Optional[dict] = None) -> str: - """Get payload range data of an object + def range( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + oid: str, + range: str, + address: Optional[str] = None, + bearer: Optional[str] = None, + file: Optional[str] = None, + json_mode: bool = False, + raw: bool = False, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Get payload range data of an object. - Args: - address: address of wallet account - bearer: File with signed JSON or binary encoded bearer token - cid: Container ID - file: File to write object payload to. Default: stdout. - json_mode: Marshal output in JSON - oid: Object ID - range: Range to take data from in the form offset:length - raw: Set raw request option - rpc_endpoint: remote node address (as 'multiaddr' or ':') - session: path to a JSON-encoded container session token - ttl: TTL value in request meta header (default 2) - wallet: WIF (NEP-2) string or path to the wallet or binary key - xhdr: Request X-Headers in form of Key=Value + Args: + address: address of wallet account + bearer: File with signed JSON or binary encoded bearer token + cid: Container ID + file: File to write object payload to. Default: stdout. + json_mode: Marshal output in JSON + oid: Object ID + range: Range to take data from in the form offset:length + raw: Set raw request option + rpc_endpoint: remote node address (as 'multiaddr' or ':') + session: path to a JSON-encoded container session token + ttl: TTL value in request meta header (default 2) + wallet: WIF (NEP-2) string or path to the wallet or binary key + xhdr: Request X-Headers in form of Key=Value - - Returns: - str: Command string - - """ + Returns: + str: Command string + """ return self._execute( - 'object range', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object range", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) - def search(self, rpc_endpoint: str, wallet: str, cid: str, address: Optional[str] = None, - bearer: Optional[str] = None, filters: Optional[list] = None, oid: Optional[str] = None, - phy: bool = False, root: bool = False, session: Optional[str] = None, ttl: Optional[int] = None, - xhdr: Optional[dict] = None) -> str: - """Search object + def search( + self, + rpc_endpoint: str, + wallet: str, + cid: str, + address: Optional[str] = None, + bearer: Optional[str] = None, + filters: Optional[list] = None, + oid: Optional[str] = None, + phy: bool = False, + root: bool = False, + session: Optional[str] = None, + ttl: Optional[int] = None, + xhdr: Optional[dict] = None, + ) -> str: + """ + Search object. - Args: - address: address of wallet account - bearer: File with signed JSON or binary encoded bearer token - cid: Container ID - filters: Repeated filter expressions or files with protobuf JSON - oid: Object ID - phy: Search physically stored objects - root: Search for user objects - rpc_endpoint: remote node address (as 'multiaddr' or ':') - session: path to a JSON-encoded container session token - ttl: TTL value in request meta header (default 2) - wallet: WIF (NEP-2) string or path to the wallet or binary key - xhdr: Request X-Headers in form of Key=Value + Args: + address: address of wallet account + bearer: File with signed JSON or binary encoded bearer token + cid: Container ID + filters: Repeated filter expressions or files with protobuf JSON + oid: Object ID + phy: Search physically stored objects + root: Search for user objects + rpc_endpoint: remote node address (as 'multiaddr' or ':') + session: path to a JSON-encoded container session token + ttl: TTL value in request meta header (default 2) + wallet: WIF (NEP-2) string or path to the wallet or binary key + xhdr: Request X-Headers in form of Key=Value - - Returns: - str: Command string - - """ + Returns: + str: Command string + """ return self._execute( - 'object search', - **{param: param_value for param, param_value in locals().items() if param not in ['self']} + "object search", + **{param: value for param, value in locals().items() if param not in ["self"]}, ) diff --git a/robot/resources/lib/python_keywords/cli_utils/cli/version.py b/robot/resources/lib/python_keywords/cli_utils/cli/version.py index 4889449d..f8af747c 100644 --- a/robot/resources/lib/python_keywords/cli_utils/cli/version.py +++ b/robot/resources/lib/python_keywords/cli_utils/cli/version.py @@ -3,10 +3,10 @@ from cli_utils.cli_command import NeofsCliCommand class NeofsCliVersion(NeofsCliCommand): def get(self) -> str: - """Application version and NeoFS API compatibility + """ + Application version and NeoFS API compatibility. Returns: str: Command string - """ - return self._execute('', version=True) + return self._execute("", version=True) diff --git a/robot/resources/lib/python_keywords/cli_utils/cli_command.py b/robot/resources/lib/python_keywords/cli_utils/cli_command.py index 21c7f095..5d72b6a0 100644 --- a/robot/resources/lib/python_keywords/cli_utils/cli_command.py +++ b/robot/resources/lib/python_keywords/cli_utils/cli_command.py @@ -7,31 +7,40 @@ class NeofsCliCommand: neofs_cli_exec: Optional[str] = None timeout: Optional[int] = None __base_params: Optional[str] = None - map_params = {'json_mode': 'json', 'await_mode': 'await', 'hash_type': 'hash', 'doc_type': 'type'} + map_params = { + "json_mode": "json", + "await_mode": "await", + "hash_type": "hash", + "doc_type": "type", + } def __init__(self, neofs_cli_exec: str, timeout: int, **base_params): self.neofs_cli_exec = neofs_cli_exec self.timeout = timeout - self.__base_params = ' '.join([f'--{param} {value}' for param, value in base_params.items() if value]) + self.__base_params = " ".join( + [f"--{param} {value}" for param, value in base_params.items() if value] + ) def _format_command(self, command: str, **params) -> str: param_str = [] for param, value in params.items(): if param in self.map_params.keys(): param = self.map_params[param] - param = param.replace('_', '-') + param = param.replace("_", "-") if not value: continue if isinstance(value, bool): - param_str.append(f'--{param}') + param_str.append(f"--{param}") elif isinstance(value, int): - param_str.append(f'--{param} {value}') + param_str.append(f"--{param} {value}") elif isinstance(value, list): for value_item in value: val_str = str(value_item).replace("'", "\\'") param_str.append(f"--{param} '{val_str}'") elif isinstance(value, dict): - param_str.append(f'--{param} \'{",".join(f"{key}={val}" for key, val in value.items())}\'') + param_str.append( + f"--{param} '{','.join(f'{key}={val}' for key, val in value.items())}'" + ) else: if "'" in str(value): value_str = str(value).replace('"', '\\"') @@ -39,7 +48,7 @@ class NeofsCliCommand: else: param_str.append(f"--{param} '{value}'") - param_str = ' '.join(param_str) + param_str = " ".join(param_str) return f'{self.neofs_cli_exec} {self.__base_params} {command or ""} {param_str}' diff --git a/robot/resources/lib/python_keywords/container_access.py b/robot/resources/lib/python_keywords/container_access.py index 3f777368..9db7ebec 100644 --- a/robot/resources/lib/python_keywords/container_access.py +++ b/robot/resources/lib/python_keywords/container_access.py @@ -1,13 +1,26 @@ from typing import List, Optional from acl import EACLOperation -from python_keywords.object_access import (can_get_object, can_put_object, can_delete_object, can_get_head_object, - can_get_range_hash_of_object, can_get_range_of_object, can_search_object) +from python_keywords.object_access import ( + can_delete_object, + can_get_head_object, + can_get_object, + can_get_range_hash_of_object, + can_get_range_of_object, + can_put_object, + can_search_object, +) -def check_full_access_to_container(wallet: str, cid: str, oid: str, file_name: str, - bearer: Optional[str] = None, wallet_config: Optional[str] = None, - xhdr: Optional[dict] = None): +def check_full_access_to_container( + wallet: str, + cid: str, + oid: str, + file_name: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +): assert can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr) assert can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr) @@ -17,9 +30,15 @@ def check_full_access_to_container(wallet: str, cid: str, oid: str, file_name: s assert can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr) -def check_no_access_to_container(wallet: str, cid: str, oid: str, file_name: str, - bearer: Optional[str] = None, wallet_config: Optional[str] = None, - xhdr: Optional[dict] = None): +def check_no_access_to_container( + wallet: str, + cid: str, + oid: str, + file_name: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +): assert not can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr) assert not can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr) assert not can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr) @@ -29,42 +48,78 @@ def check_no_access_to_container(wallet: str, cid: str, oid: str, file_name: str assert not can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr) -def check_custom_access_to_container(wallet: str, cid: str, oid: str, file_name: str, - deny_operations: Optional[List[EACLOperation]] = None, - ignore_operations: Optional[List[EACLOperation]] = None, - bearer: Optional[str] = None, wallet_config: Optional[str] = None, - xhdr: Optional[dict] = None): +def check_custom_access_to_container( + wallet: str, + cid: str, + oid: str, + file_name: str, + deny_operations: Optional[List[EACLOperation]] = None, + ignore_operations: Optional[List[EACLOperation]] = None, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +): deny_operations = [op.value for op in deny_operations or []] ignore_operations = [op.value for op in ignore_operations or []] checks: dict = {} if EACLOperation.PUT.value not in ignore_operations: - checks[EACLOperation.PUT.value] = can_put_object(wallet, cid, file_name, bearer, wallet_config, xhdr) + checks[EACLOperation.PUT.value] = can_put_object( + wallet, cid, file_name, bearer, wallet_config, xhdr + ) if EACLOperation.HEAD.value not in ignore_operations: - checks[EACLOperation.HEAD.value] = can_get_head_object(wallet, cid, oid, bearer, wallet_config, xhdr) + checks[EACLOperation.HEAD.value] = can_get_head_object( + wallet, cid, oid, bearer, wallet_config, xhdr + ) if EACLOperation.GET_RANGE.value not in ignore_operations: - checks[EACLOperation.GET_RANGE.value] = can_get_range_of_object(wallet, cid, oid, bearer, wallet_config, xhdr) + checks[EACLOperation.GET_RANGE.value] = can_get_range_of_object( + wallet, cid, oid, bearer, wallet_config, xhdr + ) if EACLOperation.GET_RANGE_HASH.value not in ignore_operations: - checks[EACLOperation.GET_RANGE_HASH.value] = can_get_range_hash_of_object(wallet, cid, oid, bearer, - wallet_config, xhdr) + checks[EACLOperation.GET_RANGE_HASH.value] = can_get_range_hash_of_object( + wallet, cid, oid, bearer, wallet_config, xhdr + ) if EACLOperation.SEARCH.value not in ignore_operations: - checks[EACLOperation.SEARCH.value] = can_search_object(wallet, cid, oid, bearer, wallet_config, xhdr) + checks[EACLOperation.SEARCH.value] = can_search_object( + wallet, cid, oid, bearer, wallet_config, xhdr + ) if EACLOperation.GET.value not in ignore_operations: - checks[EACLOperation.GET.value] = can_get_object(wallet, cid, oid, file_name, bearer, wallet_config, xhdr) + checks[EACLOperation.GET.value] = can_get_object( + wallet, cid, oid, file_name, bearer, wallet_config, xhdr + ) if EACLOperation.DELETE.value not in ignore_operations: - checks[EACLOperation.DELETE.value] = can_delete_object(wallet, cid, oid, bearer, wallet_config, xhdr) + checks[EACLOperation.DELETE.value] = can_delete_object( + wallet, cid, oid, bearer, wallet_config, xhdr + ) - failed_checks = ( - [f'allowed {action} failed' for action, success in checks.items() if - not success and action not in deny_operations] + - [f'denied {action} succeeded' for action, success in checks.items() if - success and action in deny_operations]) + failed_checks = [ + f"allowed {action} failed" + for action, success in checks.items() + if not success and action not in deny_operations + ] + [ + f"denied {action} succeeded" + for action, success in checks.items() + if success and action in deny_operations + ] assert not failed_checks, ", ".join(failed_checks) -def check_read_only_container(wallet: str, cid: str, oid: str, file_name: str, - bearer: Optional[str] = None, wallet_config: Optional[str] = None, - xhdr: Optional[dict] = None): - return check_custom_access_to_container(wallet, cid, oid, file_name, - deny_operations=[EACLOperation.PUT, EACLOperation.DELETE], - bearer=bearer, wallet_config=wallet_config, xhdr=xhdr) +def check_read_only_container( + wallet: str, + cid: str, + oid: str, + file_name: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +): + return check_custom_access_to_container( + wallet, + cid, + oid, + file_name, + deny_operations=[EACLOperation.PUT, EACLOperation.DELETE], + bearer=bearer, + wallet_config=wallet_config, + xhdr=xhdr, + ) diff --git a/robot/resources/lib/python_keywords/failover_utils.py b/robot/resources/lib/python_keywords/failover_utils.py index 90d523a8..ef3e6283 100644 --- a/robot/resources/lib/python_keywords/failover_utils.py +++ b/robot/resources/lib/python_keywords/failover_utils.py @@ -3,17 +3,21 @@ from time import sleep from typing import Optional import allure - from common import NEOFS_NETMAP_DICT from python_keywords.node_management import node_healthcheck from storage_policy import get_nodes_with_object -logger = logging.getLogger('NeoLogger') +logger = logging.getLogger("NeoLogger") -@allure.step('Wait for object replication') -def wait_object_replication_on_nodes(wallet: str, cid: str, oid: str, expected_copies: int, - excluded_nodes: Optional[list[str]] = None) -> list[str]: +@allure.step("Wait for object replication") +def wait_object_replication_on_nodes( + wallet: str, + cid: str, + oid: str, + expected_copies: int, + excluded_nodes: Optional[list[str]] = None, +) -> list[str]: excluded_nodes = excluded_nodes or [] sleep_interval, attempts = 10, 18 nodes = [] @@ -22,28 +26,30 @@ def wait_object_replication_on_nodes(wallet: str, cid: str, oid: str, expected_c if len(nodes) == expected_copies: return nodes sleep(sleep_interval) - raise AssertionError(f'Expected {expected_copies} copies of object, but found {len(nodes)}. ' - f'Waiting time {sleep_interval * attempts}') + raise AssertionError( + f"Expected {expected_copies} copies of object, but found {len(nodes)}. " + f"Waiting time {sleep_interval * attempts}" + ) -@allure.step('Wait for storage node returned to cluster') +@allure.step("Wait for storage node returned to cluster") def wait_all_storage_node_returned(): sleep_interval, attempts = 10, 12 for __attempt in range(attempts): if is_all_storage_node_returned(): return sleep(sleep_interval) - raise AssertionError('Storage node(s) is broken') + raise AssertionError("Storage node(s) is broken") def is_all_storage_node_returned() -> bool: - with allure.step('Run health check for all storage nodes'): + with allure.step("Run health check for all storage nodes"): for node_name in NEOFS_NETMAP_DICT.keys(): try: health_check = node_healthcheck(node_name) except Exception as err: - logger.warning(f'Node healthcheck fails with error {err}') + logger.warning(f"Node healthcheck fails with error {err}") return False - if health_check.health_status != 'READY' or health_check.network_status != 'ONLINE': + if health_check.health_status != "READY" or health_check.network_status != "ONLINE": return False return True diff --git a/robot/resources/lib/python_keywords/object_access.py b/robot/resources/lib/python_keywords/object_access.py index 754f8f37..7398ec5a 100644 --- a/robot/resources/lib/python_keywords/object_access.py +++ b/robot/resources/lib/python_keywords/object_access.py @@ -1,99 +1,181 @@ from typing import Optional import allure - from grpc_responses import OBJECT_ACCESS_DENIED, error_matches_status -from python_keywords.neofs_verbs import (delete_object, get_object, get_range, get_range_hash, head_object, put_object, - search_object) +from python_keywords.neofs_verbs import ( + delete_object, + get_object, + get_range, + get_range_hash, + head_object, + put_object, + search_object, +) from python_keywords.utility_keywords import get_file_hash OPERATION_ERROR_TYPE = RuntimeError -def can_get_object(wallet: str, cid: str, oid: str, file_name: str, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None - ) -> bool: - with allure.step('Try get object from container'): +def can_get_object( + wallet: str, + cid: str, + oid: str, + file_name: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +) -> bool: + with allure.step("Try get object from container"): try: - got_file_path = get_object(wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr) + got_file_path = get_object( + wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr + ) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False assert get_file_hash(file_name) == get_file_hash(got_file_path) return True -def can_put_object(wallet: str, cid: str, file_name: str, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None, attributes: Optional[dict] = None, - ) -> bool: - with allure.step('Try put object to container'): +def can_put_object( + wallet: str, + cid: str, + file_name: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, + attributes: Optional[dict] = None, +) -> bool: + with allure.step("Try put object to container"): try: - put_object(wallet, file_name, cid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr, - attributes=attributes) + put_object( + wallet, + file_name, + cid, + bearer=bearer, + wallet_config=wallet_config, + xhdr=xhdr, + attributes=attributes, + ) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False return True -def can_delete_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None - ) -> bool: - with allure.step('Try delete object from container'): +def can_delete_object( + wallet: str, + cid: str, + oid: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +) -> bool: + with allure.step("Try delete object from container"): try: delete_object(wallet, cid, oid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False return True -def can_get_head_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None - ) -> bool: - with allure.step('Try get head of object'): +def can_get_head_object( + wallet: str, + cid: str, + oid: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +) -> bool: + with allure.step("Try get head of object"): try: - head_object(wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr) + head_object( + wallet, cid, oid, bearer_token=bearer, wallet_config=wallet_config, xhdr=xhdr + ) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False return True -def can_get_range_of_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None - ) -> bool: - with allure.step('Try get range of object'): +def can_get_range_of_object( + wallet: str, + cid: str, + oid: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +) -> bool: + with allure.step("Try get range of object"): try: - get_range(wallet, cid, oid, bearer=bearer, range_cut='0:10', wallet_config=wallet_config, - xhdr=xhdr) + get_range( + wallet, + cid, + oid, + bearer=bearer, + range_cut="0:10", + wallet_config=wallet_config, + xhdr=xhdr, + ) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False return True -def can_get_range_hash_of_object(wallet: str, cid: str, oid: str, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None - ) -> bool: - with allure.step('Try get range hash of object'): +def can_get_range_hash_of_object( + wallet: str, + cid: str, + oid: str, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +) -> bool: + with allure.step("Try get range hash of object"): try: - get_range_hash(wallet, cid, oid, bearer_token=bearer, range_cut='0:10', wallet_config=wallet_config, - xhdr=xhdr) + get_range_hash( + wallet, + cid, + oid, + bearer_token=bearer, + range_cut="0:10", + wallet_config=wallet_config, + xhdr=xhdr, + ) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False return True -def can_search_object(wallet: str, cid: str, oid: Optional[str] = None, bearer: Optional[str] = None, - wallet_config: Optional[str] = None, xhdr: Optional[dict] = None - ) -> bool: - with allure.step('Try search object in container'): +def can_search_object( + wallet: str, + cid: str, + oid: Optional[str] = None, + bearer: Optional[str] = None, + wallet_config: Optional[str] = None, + xhdr: Optional[dict] = None, +) -> bool: + with allure.step("Try search object in container"): try: oids = search_object(wallet, cid, bearer=bearer, wallet_config=wallet_config, xhdr=xhdr) except OPERATION_ERROR_TYPE as err: - assert error_matches_status(err, OBJECT_ACCESS_DENIED), f'Expected {err} to match {OBJECT_ACCESS_DENIED}' + assert error_matches_status( + err, OBJECT_ACCESS_DENIED + ), f"Expected {err} to match {OBJECT_ACCESS_DENIED}" return False if oid: return oid in oids diff --git a/robot/resources/lib/python_keywords/storage_group.py b/robot/resources/lib/python_keywords/storage_group.py index 45470cf3..7844db29 100644 --- a/robot/resources/lib/python_keywords/storage_group.py +++ b/robot/resources/lib/python_keywords/storage_group.py @@ -6,22 +6,16 @@ """ import logging +import allure from cli_helpers import _cmd_run -from common import ( - COMPLEX_OBJ_SIZE, - NEOFS_CLI_EXEC, - NEOFS_ENDPOINT, - SIMPLE_OBJ_SIZE, - WALLET_CONFIG, -) +from common import COMPLEX_OBJ_SIZE, NEOFS_CLI_EXEC, NEOFS_ENDPOINT, SIMPLE_OBJ_SIZE, WALLET_CONFIG from complex_object_actions import get_link_object from neofs_verbs import head_object -import allure - logger = logging.getLogger("NeoLogger") -@allure.step('Put Storagegroup') + +@allure.step("Put Storagegroup") def put_storagegroup( wallet: str, cid: str, @@ -55,7 +49,7 @@ def put_storagegroup( return oid -@allure.step('List Storagegroup') +@allure.step("List Storagegroup") def list_storagegroup( wallet: str, cid: str, bearer_token: str = "", wallet_config: str = WALLET_CONFIG ): @@ -82,7 +76,7 @@ def list_storagegroup( return found_objects -@allure.step('Get Storagegroup') +@allure.step("Get Storagegroup") def get_storagegroup( wallet: str, cid: str, @@ -128,7 +122,7 @@ def get_storagegroup( return sg_dict -@allure.step('Delete Storagegroup') +@allure.step("Delete Storagegroup") def delete_storagegroup( wallet: str, cid: str, @@ -159,7 +153,7 @@ def delete_storagegroup( return tombstone_id -@allure.step('Verify list operation over Storagegroup') +@allure.step("Verify list operation over Storagegroup") def verify_list_storage_group( wallet: str, cid: str, @@ -173,7 +167,7 @@ def verify_list_storage_group( assert storagegroup in storage_groups -@allure.step('Verify get operation over Storagegroup') +@allure.step("Verify get operation over Storagegroup") def verify_get_storage_group( wallet: str, cid: str, diff --git a/robot/variables/common.py b/robot/variables/common.py index 120bdb80..c32a7ac9 100644 --- a/robot/variables/common.py +++ b/robot/variables/common.py @@ -9,9 +9,9 @@ CONTAINER_WAIT_INTERVAL = "1m" SIMPLE_OBJ_SIZE = int(os.getenv("SIMPLE_OBJ_SIZE", "1000")) COMPLEX_OBJ_SIZE = int(os.getenv("COMPLEX_OBJ_SIZE", "2000")) -MAINNET_BLOCK_TIME = os.getenv('MAINNET_BLOCK_TIME', "1s") -MAINNET_TIMEOUT = os.getenv('MAINNET_TIMEOUT', "1min") -MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", '1s') +MAINNET_BLOCK_TIME = os.getenv("MAINNET_BLOCK_TIME", "1s") +MAINNET_TIMEOUT = os.getenv("MAINNET_TIMEOUT", "1min") +MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", "1s") NEOFS_CONTRACT_CACHE_TIMEOUT = os.getenv("NEOFS_CONTRACT_CACHE_TIMEOUT", "30s") # Time interval that allows a GC pass on storage node (this includes GC sleep interval @@ -20,11 +20,11 @@ STORAGE_GC_TIME = os.getenv("STORAGE_GC_TIME", "75s") NEOFS_ENDPOINT = os.getenv("NEOFS_ENDPOINT", "s01.neofs.devenv:8080") -NEO_MAINNET_ENDPOINT = os.getenv("NEO_MAINNET_ENDPOINT", 'http://main-chain.neofs.devenv:30333') -MORPH_ENDPOINT = os.getenv("MORPH_ENDPOINT", 'http://morph-chain.neofs.devenv:30333') -HTTP_GATE = os.getenv("HTTP_GATE", 'http://http.neofs.devenv') -S3_GATE = os.getenv("S3_GATE", 'https://s3.neofs.devenv:8080') -GAS_HASH = '0xd2a4cff31913016155e38e474a2c06d08be276cf' +NEO_MAINNET_ENDPOINT = os.getenv("NEO_MAINNET_ENDPOINT", "http://main-chain.neofs.devenv:30333") +MORPH_ENDPOINT = os.getenv("MORPH_ENDPOINT", "http://morph-chain.neofs.devenv:30333") +HTTP_GATE = os.getenv("HTTP_GATE", "http://http.neofs.devenv") +S3_GATE = os.getenv("S3_GATE", "https://s3.neofs.devenv:8080") +GAS_HASH = "0xd2a4cff31913016155e38e474a2c06d08be276cf" NEOFS_CONTRACT = os.getenv("NEOFS_IR_CONTRACTS_NEOFS") @@ -43,53 +43,67 @@ STORAGE_CONTROL_ENDPOINT_1 = os.getenv("STORAGE_CONTROL_ENDPOINT_1", "s01.neofs. STORAGE_CONTROL_ENDPOINT_2 = os.getenv("STORAGE_CONTROL_ENDPOINT_2", "s02.neofs.devenv:8081") STORAGE_CONTROL_ENDPOINT_3 = os.getenv("STORAGE_CONTROL_ENDPOINT_3", "s03.neofs.devenv:8081") STORAGE_CONTROL_ENDPOINT_4 = os.getenv("STORAGE_CONTROL_ENDPOINT_4", "s04.neofs.devenv:8081") -STORAGE_CONTROL_ENDPOINT_PRIVATE = os.getenv("STORAGE_CONTROL_ENDPOINT_PRIVATE", "false").lower() == "true" +STORAGE_CONTROL_ENDPOINT_PRIVATE = ( + os.getenv("STORAGE_CONTROL_ENDPOINT_PRIVATE", "false").lower() == "true" +) -STORAGE_WALLET_PATH_1 = os.getenv("STORAGE_WALLET_PATH_1", f"{DEVENV_PATH}/services/storage/wallet01.json") -STORAGE_WALLET_PATH_2 = os.getenv("STORAGE_WALLET_PATH_2", f"{DEVENV_PATH}/services/storage/wallet02.json") -STORAGE_WALLET_PATH_3 = os.getenv("STORAGE_WALLET_PATH_3", f"{DEVENV_PATH}/services/storage/wallet03.json") -STORAGE_WALLET_PATH_4 = os.getenv("STORAGE_WALLET_PATH_4", f"{DEVENV_PATH}/services/storage/wallet04.json") +STORAGE_WALLET_PATH_1 = os.getenv( + "STORAGE_WALLET_PATH_1", f"{DEVENV_PATH}/services/storage/wallet01.json" +) +STORAGE_WALLET_PATH_2 = os.getenv( + "STORAGE_WALLET_PATH_2", f"{DEVENV_PATH}/services/storage/wallet02.json" +) +STORAGE_WALLET_PATH_3 = os.getenv( + "STORAGE_WALLET_PATH_3", f"{DEVENV_PATH}/services/storage/wallet03.json" +) +STORAGE_WALLET_PATH_4 = os.getenv( + "STORAGE_WALLET_PATH_4", f"{DEVENV_PATH}/services/storage/wallet04.json" +) STORAGE_WALLET_PATH = STORAGE_WALLET_PATH_1 STORAGE_WALLET_PASS = os.getenv("STORAGE_WALLET_PASS", "") STORAGE_WALLET_CONFIG = f"{CLI_CONFIGS_PATH}/empty_passwd.yml" NEOFS_NETMAP_DICT = { - 's01': { - 'rpc': STORAGE_RPC_ENDPOINT_1, - 'control': STORAGE_CONTROL_ENDPOINT_1, - 'wallet_path': STORAGE_WALLET_PATH_1, - 'UN-LOCODE': 'RU MOW' + "s01": { + "rpc": STORAGE_RPC_ENDPOINT_1, + "control": STORAGE_CONTROL_ENDPOINT_1, + "wallet_path": STORAGE_WALLET_PATH_1, + "UN-LOCODE": "RU MOW", }, - 's02': { - 'rpc': STORAGE_RPC_ENDPOINT_2, - 'control': STORAGE_CONTROL_ENDPOINT_2, - 'wallet_path': STORAGE_WALLET_PATH_2, - 'UN-LOCODE': 'RU LED' + "s02": { + "rpc": STORAGE_RPC_ENDPOINT_2, + "control": STORAGE_CONTROL_ENDPOINT_2, + "wallet_path": STORAGE_WALLET_PATH_2, + "UN-LOCODE": "RU LED", }, - 's03': { - 'rpc': STORAGE_RPC_ENDPOINT_3, - 'control': STORAGE_CONTROL_ENDPOINT_3, - 'wallet_path': STORAGE_WALLET_PATH_3, - 'UN-LOCODE': 'SE STO' + "s03": { + "rpc": STORAGE_RPC_ENDPOINT_3, + "control": STORAGE_CONTROL_ENDPOINT_3, + "wallet_path": STORAGE_WALLET_PATH_3, + "UN-LOCODE": "SE STO", }, - 's04': { - 'rpc': STORAGE_RPC_ENDPOINT_4, - 'control': STORAGE_CONTROL_ENDPOINT_4, - 'wallet_path': STORAGE_WALLET_PATH_4, - 'UN-LOCODE': 'FI HEL' + "s04": { + "rpc": STORAGE_RPC_ENDPOINT_4, + "control": STORAGE_CONTROL_ENDPOINT_4, + "wallet_path": STORAGE_WALLET_PATH_4, + "UN-LOCODE": "FI HEL", }, } -NEOFS_NETMAP = [i['rpc'] for i in NEOFS_NETMAP_DICT.values()] -NEOGO_EXECUTABLE = os.getenv('NEOGO_EXECUTABLE', 'neo-go') -NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli') +NEOFS_NETMAP = [i["rpc"] for i in NEOFS_NETMAP_DICT.values()] +NEOGO_EXECUTABLE = os.getenv("NEOGO_EXECUTABLE", "neo-go") +NEOFS_CLI_EXEC = os.getenv("NEOFS_CLI_EXEC", "neofs-cli") # Config of wallet with empty password. We use it for: # - for wallet of user on behalf of whom we are running tests # - for wallets of storage nodes WALLET_CONFIG = f"{CLI_CONFIGS_PATH}/empty_passwd.yml" -MAINNET_WALLET_PATH = os.getenv("MAINNET_WALLET_PATH", f"{DEVENV_PATH}/services/chain/node-wallet.json") -MAINNET_WALLET_CONFIG = os.getenv("MAINNET_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml") +MAINNET_WALLET_PATH = os.getenv( + "MAINNET_WALLET_PATH", f"{DEVENV_PATH}/services/chain/node-wallet.json" +) +MAINNET_WALLET_CONFIG = os.getenv( + "MAINNET_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml" +) MAINNET_SINGLE_ADDR = os.getenv("MAINNET_SINGLE_ADDR", "NfgHwwTi3wHAS8aFAN243C5vGbkYDpqLHP") MAINNET_WALLET_PASS = os.getenv("MAINNET_WALLET_PASS", "one") @@ -97,7 +111,9 @@ IR_WALLET_PATH = os.getenv("IR_WALLET_PATH", f"{DEVENV_PATH}/services/ir/az.json IR_WALLET_CONFIG = os.getenv("IR_WALLET_CONFIG", f"{CLI_CONFIGS_PATH}/one_wallet_password.yml") IR_WALLET_PASS = os.getenv("IR_WALLET_PASS", "one") -S3_GATE_WALLET_PATH = os.getenv("S3_GATE_WALLET_PATH", f"{DEVENV_PATH}/services/s3_gate/wallet.json") +S3_GATE_WALLET_PATH = os.getenv( + "S3_GATE_WALLET_PATH", f"{DEVENV_PATH}/services/s3_gate/wallet.json" +) S3_GATE_WALLET_PASS = os.getenv("S3_GATE_WALLET_PASS", "s3") # Parameters that control SSH connection to storage node @@ -109,7 +125,7 @@ STORAGE_NODE_SSH_PRIVATE_KEY_PATH = os.getenv("STORAGE_NODE_SSH_PRIVATE_KEY_PATH STORAGE_NODE_BIN_PATH = os.getenv("STORAGE_NODE_BIN_PATH", f"{DEVENV_PATH}/vendor") # Path and config for neofs-adm utility. Optional if tests are running against devenv -NEOFS_ADM_EXEC = os.getenv('NEOFS_ADM_EXEC', 'neofs-adm') +NEOFS_ADM_EXEC = os.getenv("NEOFS_ADM_EXEC", "neofs-adm") NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH") diff --git a/robot/variables/eacl_object_filters.py b/robot/variables/eacl_object_filters.py index 1fc96141..d953a1e3 100644 --- a/robot/variables/eacl_object_filters.py +++ b/robot/variables/eacl_object_filters.py @@ -1,21 +1,23 @@ -EACL_OBJ_FILTERS = {'$Object:objectID': 'objectID', - '$Object:containerID': 'containerID', - '$Object:ownerID': 'ownerID', - '$Object:creationEpoch': 'creationEpoch', - '$Object:payloadLength': 'payloadLength', - '$Object:payloadHash': 'payloadHash', - '$Object:objectType': 'objectType', - '$Object:homomorphicHash': 'homomorphicHash', - '$Object:version': 'version'} +EACL_OBJ_FILTERS = { + "$Object:objectID": "objectID", + "$Object:containerID": "containerID", + "$Object:ownerID": "ownerID", + "$Object:creationEpoch": "creationEpoch", + "$Object:payloadLength": "payloadLength", + "$Object:payloadHash": "payloadHash", + "$Object:objectType": "objectType", + "$Object:homomorphicHash": "homomorphicHash", + "$Object:version": "version", +} VERB_FILTER_DEP = { - '$Object:objectID': ['GET', 'HEAD', 'DELETE', 'RANGE', 'RANGEHASH'], - '$Object:containerID': ['GET', 'PUT', 'HEAD', 'DELETE', 'SEARCH', 'RANGE', 'RANGEHASH'], - '$Object:ownerID': ['GET', 'HEAD'], - '$Object:creationEpoch': ['GET', 'PUT', 'HEAD'], - '$Object:payloadLength': ['GET', 'PUT', 'HEAD'], - '$Object:payloadHash': ['GET', 'PUT', 'HEAD'], - '$Object:objectType': ['GET', 'PUT', 'HEAD'], - '$Object:homomorphicHash': ['GET', 'PUT', 'HEAD'], - '$Object:version': ['GET', 'PUT', 'HEAD'] - } + "$Object:objectID": ["GET", "HEAD", "DELETE", "RANGE", "RANGEHASH"], + "$Object:containerID": ["GET", "PUT", "HEAD", "DELETE", "SEARCH", "RANGE", "RANGEHASH"], + "$Object:ownerID": ["GET", "HEAD"], + "$Object:creationEpoch": ["GET", "PUT", "HEAD"], + "$Object:payloadLength": ["GET", "PUT", "HEAD"], + "$Object:payloadHash": ["GET", "PUT", "HEAD"], + "$Object:objectType": ["GET", "PUT", "HEAD"], + "$Object:homomorphicHash": ["GET", "PUT", "HEAD"], + "$Object:version": ["GET", "PUT", "HEAD"], +}