From a8a00c1c5326a5c53860708fe736a215d5dbe3a3 Mon Sep 17 00:00:00 2001 From: Yulia Kovshova Date: Tue, 20 Sep 2022 18:03:52 +0300 Subject: [PATCH] [#297] remove robot.logger Signed-off-by: Yulia Kovshova --- pytest_tests/testsuites/conftest.py | 16 +- .../lib/python_keywords/cli_helpers.py | 66 ++-- .../python_keywords/complex_object_actions.py | 95 ++--- .../lib/python_keywords/container.py | 135 ++++--- robot/resources/lib/python_keywords/epoch.py | 45 ++- .../lib/python_keywords/http_gate.py | 115 +++--- .../lib/python_keywords/neofs_verbs.py | 11 +- .../lib/python_keywords/node_management.py | 201 ++++++----- .../lib/python_keywords/payment_neogo.py | 102 +++--- .../lib/python_keywords/s3_gate_bucket.py | 153 ++++---- .../lib/python_keywords/s3_gate_object.py | 328 +++++++++++------- .../lib/python_keywords/session_token.py | 10 +- .../lib/python_keywords/storage_policy.py | 149 ++++---- .../lib/python_keywords/tombstone.py | 47 +-- .../lib/python_keywords/utility_keywords.py | 51 ++- 15 files changed, 811 insertions(+), 713 deletions(-) diff --git a/pytest_tests/testsuites/conftest.py b/pytest_tests/testsuites/conftest.py index 2c0d7f3a..e88d2a99 100644 --- a/pytest_tests/testsuites/conftest.py +++ b/pytest_tests/testsuites/conftest.py @@ -4,6 +4,7 @@ import re import shutil from datetime import datetime +import allure import pytest import wallet from cli_helpers import _cmd_run @@ -13,18 +14,14 @@ from common import ( FREE_STORAGE, INFRASTRUCTURE_TYPE, MAINNET_WALLET_PATH, - NEOFS_NETMAP_DICT + NEOFS_NETMAP_DICT, ) from env_properties import save_env_properties from payment_neogo import neofs_deposit, transfer_mainnet_gas from python_keywords.node_management import node_healthcheck -from robot.api import deco from service_helper import get_storage_service_helper from wallet import init_wallet -import allure - - logger = logging.getLogger("NeoLogger") @@ -106,10 +103,7 @@ def run_health_check(collect_logs): failed_nodes = [] for node_name in NEOFS_NETMAP_DICT.keys(): health_check = node_healthcheck(node_name) - if ( - health_check.health_status != "READY" - or health_check.network_status != "ONLINE" - ): + if health_check.health_status != "READY" or health_check.network_status != "ONLINE": failed_nodes.append(node_name) if failed_nodes: @@ -121,9 +115,7 @@ def run_health_check(collect_logs): def prepare_wallet_and_deposit(prepare_tmp_dir): wallet_path, addr, _ = wallet.init_wallet(ASSETS_DIR) logger.info(f"Init wallet: {wallet_path},\naddr: {addr}") - allure.attach.file( - wallet_path, os.path.basename(wallet_path), allure.attachment_type.JSON - ) + allure.attach.file(wallet_path, os.path.basename(wallet_path), allure.attachment_type.JSON) if not FREE_STORAGE: deposit = 30 diff --git a/robot/resources/lib/python_keywords/cli_helpers.py b/robot/resources/lib/python_keywords/cli_helpers.py index ea1ed2e4..a1b81f2b 100644 --- a/robot/resources/lib/python_keywords/cli_helpers.py +++ b/robot/resources/lib/python_keywords/cli_helpers.py @@ -4,6 +4,7 @@ Helper functions to use with `neofs-cli`, `neo-go` and other CLIs. """ import json +import logging import subprocess import sys from contextlib import suppress @@ -13,7 +14,8 @@ from typing import Union import allure import pexpect -from robot.api import logger + +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False COLOR_GREEN = "\033[92m" @@ -30,10 +32,15 @@ def _cmd_run(cmd: str, timeout: int = 30) -> str: try: logger.info(f"{COLOR_GREEN}Executing command: {cmd}{COLOR_OFF}") start_time = datetime.utcnow() - compl_proc = subprocess.run(cmd, check=True, universal_newlines=True, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - timeout=timeout, - shell=True) + compl_proc = subprocess.run( + cmd, + check=True, + universal_newlines=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + timeout=timeout, + shell=True, + ) output = compl_proc.stdout return_code = compl_proc.returncode end_time = datetime.utcnow() @@ -42,26 +49,27 @@ def _cmd_run(cmd: str, timeout: int = 30) -> str: return output except subprocess.CalledProcessError as exc: - logger.info(f"Command: {cmd}\n" - f"Error:\nreturn code: {exc.returncode} " - f"\nOutput: {exc.output}") + logger.info( + f"Command: {cmd}\n" f"Error:\nreturn code: {exc.returncode} " f"\nOutput: {exc.output}" + ) end_time = datetime.now() return_code, cmd_output = subprocess.getstatusoutput(cmd) _attach_allure_log(cmd, cmd_output, return_code, start_time, end_time) - raise RuntimeError(f"Command: {cmd}\n" - f"Error:\nreturn code: {exc.returncode}\n" - f"Output: {exc.output}") from exc + raise RuntimeError( + f"Command: {cmd}\n" f"Error:\nreturn code: {exc.returncode}\n" f"Output: {exc.output}" + ) from exc except OSError as exc: - raise RuntimeError(f"Command: {cmd}\n" - f"Output: {exc.strerror}") from exc + raise RuntimeError(f"Command: {cmd}\n" f"Output: {exc.strerror}") from exc except Exception as exc: return_code, cmd_output = subprocess.getstatusoutput(cmd) end_time = datetime.now() _attach_allure_log(cmd, cmd_output, return_code, start_time, end_time) - logger.info(f"Command: {cmd}\n" - f"Error:\nreturn code: {return_code}\n" - f"Output: {exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}") + logger.info( + f"Command: {cmd}\n" + f"Error:\nreturn code: {return_code}\n" + f"Output: {exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}" + ) raise @@ -69,7 +77,7 @@ def _run_with_passwd(cmd: str) -> str: child = pexpect.spawn(cmd) child.delaybeforesend = 1 child.expect(".*") - child.sendline('\r') + child.sendline("\r") if sys.platform == "darwin": child.expect(pexpect.EOF) cmd = child.before @@ -90,7 +98,7 @@ def _configure_aws_cli(cmd: str, key_id: str, access_key: str, out_format: str = child.sendline(access_key) child.expect("Default region name.*") - child.sendline('') + child.sendline("") child.expect("Default output format.*") child.sendline(out_format) @@ -102,26 +110,24 @@ def _configure_aws_cli(cmd: str, key_id: str, access_key: str, out_format: str = return cmd.decode() -def _attach_allure_log(cmd: str, output: str, return_code: int, start_time: datetime, - end_time: datetime) -> None: +def _attach_allure_log( + cmd: str, output: str, return_code: int, start_time: datetime, end_time: datetime +) -> None: command_attachment = ( f"COMMAND: '{cmd}'\n" - f'OUTPUT:\n {output}\n' - f'RC: {return_code}\n' - f'Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}' + f"OUTPUT:\n {output}\n" + f"RC: {return_code}\n" + f"Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}" ) with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'): - allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT) + allure.attach(command_attachment, "Command execution", allure.attachment_type.TEXT) def log_command_execution(cmd: str, output: Union[str, dict]) -> None: - logger.info(f'{cmd}: {output}') + logger.info(f"{cmd}: {output}") with suppress(Exception): json_output = json.dumps(output, indent=4, sort_keys=True) output = json_output - command_attachment = ( - f"COMMAND: '{cmd}'\n" - f'OUTPUT:\n {output}\n' - ) + command_attachment = f"COMMAND: '{cmd}'\n" f"OUTPUT:\n {output}\n" with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'): - allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT) + allure.attach(command_attachment, "Command execution", allure.attachment_type.TEXT) diff --git a/robot/resources/lib/python_keywords/complex_object_actions.py b/robot/resources/lib/python_keywords/complex_object_actions.py index 945183ec..acd8a6fc 100644 --- a/robot/resources/lib/python_keywords/complex_object_actions.py +++ b/robot/resources/lib/python_keywords/complex_object_actions.py @@ -10,73 +10,74 @@ first non-null response. """ +import logging + import allure +import neofs_verbs from common import NEOFS_NETMAP, WALLET_CONFIG -import neofs_verbs - -from robot.api import logger -from robot.libraries.BuiltIn import BuiltIn - -import neofs_verbs -from common import NEOFS_NETMAP +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -@allure.step('Get Link Object') -def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str = "", - wallet_config: str = WALLET_CONFIG): +@allure.step("Get Link Object") +def get_link_object( + wallet: str, cid: str, oid: str, bearer_token: str = "", wallet_config: str = WALLET_CONFIG +): """ - Args: - wallet (str): path to the wallet on whose behalf the Storage Nodes - are requested - cid (str): Container ID which stores the Large Object - oid (str): Large Object ID - bearer_token (optional, str): path to Bearer token file - wallet_config (optional, str): path to the neofs-cli config file - Returns: - (str): Link Object ID - When no Link Object ID is found after all Storage Nodes polling, - the function throws a native robot error. + Args: + wallet (str): path to the wallet on whose behalf the Storage Nodes + are requested + cid (str): Container ID which stores the Large Object + oid (str): Large Object ID + bearer_token (optional, str): path to Bearer token file + wallet_config (optional, str): path to the neofs-cli config file + Returns: + (str): Link Object ID + When no Link Object ID is found after all Storage Nodes polling, + the function throws a native robot error. """ for node in NEOFS_NETMAP: try: - resp = neofs_verbs.head_object(wallet, cid, oid, - endpoint=node, - is_raw=True, - is_direct=True, - bearer_token=bearer_token, - wallet_config=wallet_config) - if resp['link']: - return resp['link'] + resp = neofs_verbs.head_object( + wallet, + cid, + oid, + endpoint=node, + is_raw=True, + is_direct=True, + bearer_token=bearer_token, + wallet_config=wallet_config, + ) + if resp["link"]: + return resp["link"] except Exception: logger.info(f"No Link Object found on {node}; continue") - BuiltIn().fail(f"No Link Object for {cid}/{oid} found among all Storage Nodes") + logger.error(f"No Link Object for {cid}/{oid} found among all Storage Nodes") return None -@allure.step('Get Last Object') +@allure.step("Get Last Object") def get_last_object(wallet: str, cid: str, oid: str): """ - Args: - wallet (str): path to the wallet on whose behalf the Storage Nodes - are requested - cid (str): Container ID which stores the Large Object - oid (str): Large Object ID - Returns: - (str): Last Object ID - When no Last Object ID is found after all Storage Nodes polling, - the function throws a native robot error. + Args: + wallet (str): path to the wallet on whose behalf the Storage Nodes + are requested + cid (str): Container ID which stores the Large Object + oid (str): Large Object ID + Returns: + (str): Last Object ID + When no Last Object ID is found after all Storage Nodes polling, + the function throws a native robot error. """ for node in NEOFS_NETMAP: try: - resp = neofs_verbs.head_object(wallet, cid, oid, - endpoint=node, - is_raw=True, - is_direct=True) - if resp['lastPart']: - return resp['lastPart'] + resp = neofs_verbs.head_object( + wallet, cid, oid, endpoint=node, is_raw=True, is_direct=True + ) + if resp["lastPart"]: + return resp["lastPart"] except Exception: logger.info(f"No Last Object found on {node}; continue") - BuiltIn().fail(f"No Last Object for {cid}/{oid} found among all Storage Nodes") + logger.error(f"No Last Object for {cid}/{oid} found among all Storage Nodes") return None diff --git a/robot/resources/lib/python_keywords/container.py b/robot/resources/lib/python_keywords/container.py index ec14169e..27bea6fc 100644 --- a/robot/resources/lib/python_keywords/container.py +++ b/robot/resources/lib/python_keywords/container.py @@ -6,50 +6,69 @@ import allure import json +import logging from time import sleep from typing import Optional, Union +import allure import json_transformers from cli_utils import NeofsCli from common import NEOFS_ENDPOINT, WALLET_CONFIG -from robot.api import logger + +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False DEFAULT_PLACEMENT_RULE = "REP 2 IN X CBF 1 SELECT 4 FROM * AS X" -@allure.step('Create Container') -def create_container(wallet: str, rule: str = DEFAULT_PLACEMENT_RULE, basic_acl: str = '', - attributes: Optional[dict] = None, session_token: str = '', - session_wallet: str = '', name: str = None, options: dict = None, - await_mode: bool = True, wait_for_creation: bool = True) -> str: +@allure.step("Create Container") +def create_container( + wallet: str, + rule: str = DEFAULT_PLACEMENT_RULE, + basic_acl: str = "", + attributes: Optional[dict] = None, + session_token: str = "", + session_wallet: str = "", + name: str = None, + options: dict = None, + await_mode: bool = True, + wait_for_creation: bool = True, +) -> str: """ - A wrapper for `neofs-cli container create` call. + A wrapper for `neofs-cli container create` call. - Args: - wallet (str): a wallet on whose behalf a container is created - rule (optional, str): placement rule for container - basic_acl (optional, str): an ACL for container, will be - appended to `--basic-acl` key - attributes (optional, dict): container attributes , will be - appended to `--attributes` key - session_token (optional, str): a path to session token file - session_wallet(optional, str): a path to the wallet which signed - the session token; this parameter makes sense - when paired with `session_token` - options (optional, dict): any other options to pass to the call - name (optional, str): container name attribute - await_mode (bool): block execution until container is persisted - wait_for_creation (): Wait for container shows in container list + Args: + wallet (str): a wallet on whose behalf a container is created + rule (optional, str): placement rule for container + basic_acl (optional, str): an ACL for container, will be + appended to `--basic-acl` key + attributes (optional, dict): container attributes , will be + appended to `--attributes` key + session_token (optional, str): a path to session token file + session_wallet(optional, str): a path to the wallet which signed + the session token; this parameter makes sense + when paired with `session_token` + options (optional, dict): any other options to pass to the call + name (optional, str): container name attribute + await_mode (bool): block execution until container is persisted + wait_for_creation (): Wait for container shows in container list - Returns: - (str): CID of the created container + Returns: + (str): CID of the created container """ cli = NeofsCli(config=WALLET_CONFIG, timeout=60) - output = cli.container.create(rpc_endpoint=NEOFS_ENDPOINT, wallet=session_wallet if session_wallet else wallet, - policy=rule, basic_acl=basic_acl, attributes=attributes, name=name, - session=session_token, await_mode=await_mode, **options or {}) + output = cli.container.create( + rpc_endpoint=NEOFS_ENDPOINT, + wallet=session_wallet if session_wallet else wallet, + policy=rule, + basic_acl=basic_acl, + attributes=attributes, + name=name, + session=session_token, + await_mode=await_mode, + **options or {}, + ) cid = _parse_cid(output) @@ -68,7 +87,9 @@ def wait_for_container_creation(wallet: str, cid: str, attempts: int = 15, sleep return logger.info(f"There is no {cid} in {containers} yet; sleep {sleep_interval} and continue") sleep(sleep_interval) - raise RuntimeError(f"After {attempts * sleep_interval} seconds container {cid} hasn't been persisted; exiting") + raise RuntimeError( + f"After {attempts * sleep_interval} seconds container {cid} hasn't been persisted; exiting" + ) def wait_for_container_deletion(wallet: str, cid: str, attempts: int = 30, sleep_interval: int = 1): @@ -78,21 +99,21 @@ def wait_for_container_deletion(wallet: str, cid: str, attempts: int = 30, sleep sleep(sleep_interval) continue except Exception as err: - if 'container not found' not in str(err): + if "container not found" not in str(err): raise AssertionError(f'Expected "container not found" in error, got\n{err}') return - raise AssertionError(f'Expected container deleted during {attempts * sleep_interval} sec.') + raise AssertionError(f"Expected container deleted during {attempts * sleep_interval} sec.") @allure.step('List Containers') def list_containers(wallet: str) -> list[str]: """ - A wrapper for `neofs-cli container list` call. It returns all the - available containers for the given wallet. - Args: - wallet (str): a wallet on whose behalf we list the containers - Returns: - (list): list of containers + A wrapper for `neofs-cli container list` call. It returns all the + available containers for the given wallet. + Args: + wallet (str): a wallet on whose behalf we list the containers + Returns: + (list): list of containers """ cli = NeofsCli(config=WALLET_CONFIG) output = cli.container.list(rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet) @@ -103,27 +124,29 @@ def list_containers(wallet: str) -> list[str]: @allure.step('Get Container') def get_container(wallet: str, cid: str, json_mode: bool = True) -> Union[dict, str]: """ - A wrapper for `neofs-cli container get` call. It extracts container's - attributes and rearranges them into a more compact view. - Args: - wallet (str): path to a wallet on whose behalf we get the container - cid (str): ID of the container to get - json_mode (bool): return container in JSON format - Returns: - (dict, str): dict of container attributes + A wrapper for `neofs-cli container get` call. It extracts container's + attributes and rearranges them into a more compact view. + Args: + wallet (str): path to a wallet on whose behalf we get the container + cid (str): ID of the container to get + json_mode (bool): return container in JSON format + Returns: + (dict, str): dict of container attributes """ cli = NeofsCli(config=WALLET_CONFIG) - output = cli.container.get(rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet, cid=cid, json_mode=json_mode) + output = cli.container.get( + rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet, cid=cid, json_mode=json_mode + ) if not json_mode: return output container_info = json.loads(output) attributes = dict() - for attr in container_info['attributes']: - attributes[attr['key']] = attr['value'] - container_info['attributes'] = attributes - container_info['ownerID'] = json_transformers.json_reencode(container_info['ownerID']['value']) + for attr in container_info["attributes"]: + attributes[attr["key"]] = attr["value"] + container_info["attributes"] = attributes + container_info["ownerID"] = json_transformers.json_reencode(container_info["ownerID"]["value"]) return container_info @@ -132,12 +155,12 @@ def get_container(wallet: str, cid: str, json_mode: bool = True) -> Union[dict, # https://github.com/nspcc-dev/neofs-contract/issues/121 def delete_container(wallet: str, cid: str, force: bool = False) -> None: """ - A wrapper for `neofs-cli container delete` call. - Args: - wallet (str): path to a wallet on whose behalf we delete the container - cid (str): ID of the container to delete - force (bool): do not check whether container contains locks and remove immediately - This function doesn't return anything. + A wrapper for `neofs-cli container delete` call. + Args: + wallet (str): path to a wallet on whose behalf we delete the container + cid (str): ID of the container to delete + force (bool): do not check whether container contains locks and remove immediately + This function doesn't return anything. """ cli = NeofsCli(config=WALLET_CONFIG) @@ -160,7 +183,7 @@ def _parse_cid(output: str) -> str: """ try: # taking first line from command's output - first_line = output.split('\n')[0] + first_line = output.split("\n")[0] except Exception: logger.error(f"Got empty output: {output}") splitted = first_line.split(": ") diff --git a/robot/resources/lib/python_keywords/epoch.py b/robot/resources/lib/python_keywords/epoch.py index 0a6c51f6..c1bab1c9 100644 --- a/robot/resources/lib/python_keywords/epoch.py +++ b/robot/resources/lib/python_keywords/epoch.py @@ -1,34 +1,40 @@ #!/usr/bin/python3.9 -import allure -from robot.api import logger +import logging +import allure import contract import wrappers -from common import (IR_WALLET_PATH, IR_WALLET_PASS, MORPH_ENDPOINT, NEOFS_ADM_EXEC, - NEOFS_ADM_CONFIG_PATH) +from common import ( + IR_WALLET_PASS, + IR_WALLET_PATH, + MORPH_ENDPOINT, + NEOFS_ADM_CONFIG_PATH, + NEOFS_ADM_EXEC, +) +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -@allure.step('Get Epoch') +@allure.step("Get Epoch") def get_epoch(): - epoch = int(contract.testinvoke_contract( - contract.get_netmap_contract_hash(MORPH_ENDPOINT), - "epoch", - MORPH_ENDPOINT) + epoch = int( + contract.testinvoke_contract( + contract.get_netmap_contract_hash(MORPH_ENDPOINT), "epoch", MORPH_ENDPOINT + ) ) logger.info(f"Got epoch {epoch}") return epoch -@allure.step('Tick Epoch') +@allure.step("Tick Epoch") def tick_epoch(): if NEOFS_ADM_EXEC and NEOFS_ADM_CONFIG_PATH: # If neofs-adm is available, then we tick epoch with it (to be consistent with UAT tests) cmd = f"{NEOFS_ADM_EXEC} morph force-new-epoch -c {NEOFS_ADM_CONFIG_PATH}" logger.info(f"Executing shell command: {cmd}") - out = '' - err = '' + out = "" + err = "" try: out = wrappers.run_sh(cmd) logger.info(f"Command completed with output: {out}") @@ -37,11 +43,11 @@ def tick_epoch(): err = str(exc) raise RuntimeError("Failed to tick epoch") from exc finally: - allure.attach(( - f'COMMAND: {cmd}\n' - f'OUTPUT:\n {out}\n' - f'ERROR: {err}\n' - ), 'Tick Epoch', allure.attachment_type.TEXT) + allure.attach( + (f"COMMAND: {cmd}\n" f"OUTPUT:\n {out}\n" f"ERROR: {err}\n"), + "Tick Epoch", + allure.attachment_type.TEXT, + ) return # Otherwise we tick epoch using transaction @@ -49,4 +55,7 @@ def tick_epoch(): return contract.invoke_contract_multisig( contract.get_netmap_contract_hash(MORPH_ENDPOINT), f"newEpoch int:{cur_epoch + 1}", - IR_WALLET_PATH, IR_WALLET_PASS, MORPH_ENDPOINT) + IR_WALLET_PATH, + IR_WALLET_PASS, + MORPH_ENDPOINT, + ) diff --git a/robot/resources/lib/python_keywords/http_gate.py b/robot/resources/lib/python_keywords/http_gate.py index 6588aac2..cb4633e4 100644 --- a/robot/resources/lib/python_keywords/http_gate.py +++ b/robot/resources/lib/python_keywords/http_gate.py @@ -1,9 +1,9 @@ #!/usr/bin/python3 -import allure +import logging +import os import re import shutil -import sys import uuid import zipfile from urllib.parse import quote_plus @@ -12,36 +12,32 @@ import allure import requests from cli_helpers import _cmd_run from common import HTTP_GATE -from robot.api import logger -from robot.libraries.BuiltIn import BuiltIn +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -if "pytest" in sys.modules: - import os - - ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/") -else: - ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}") +ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/") -@allure.step('Get via HTTP Gate') +@allure.step("Get via HTTP Gate") def get_via_http_gate(cid: str, oid: str): """ This function gets given object from HTTP gate :param cid: CID to get object from :param oid: object OID """ - request = f'{HTTP_GATE}/get/{cid}/{oid}' + request = f"{HTTP_GATE}/get/{cid}/{oid}" resp = requests.get(request, stream=True) if not resp.ok: - raise Exception(f"""Failed to get object via HTTP gate: + raise Exception( + f"""Failed to get object via HTTP gate: request: {resp.request.path_url}, response: {resp.text}, - status code: {resp.status_code} {resp.reason}""") + status code: {resp.status_code} {resp.reason}""" + ) - logger.info(f'Request: {request}') + logger.info(f"Request: {request}") _attach_allure_step(request, resp.status_code) filename = f"{ASSETS_DIR}/{cid}_{oid}" @@ -50,36 +46,38 @@ def get_via_http_gate(cid: str, oid: str): return filename -@allure.step('Get via Zip HTTP Gate') +@allure.step("Get via Zip HTTP Gate") def get_via_zip_http_gate(cid: str, prefix: str): """ This function gets given object from HTTP gate :param cid: CID to get object from :param prefix: common prefix """ - request = f'{HTTP_GATE}/zip/{cid}/{prefix}' + request = f"{HTTP_GATE}/zip/{cid}/{prefix}" resp = requests.get(request, stream=True) if not resp.ok: - raise Exception(f"""Failed to get object via HTTP gate: + raise Exception( + f"""Failed to get object via HTTP gate: request: {resp.request.path_url}, response: {resp.text}, - status code: {resp.status_code} {resp.reason}""") + status code: {resp.status_code} {resp.reason}""" + ) - logger.info(f'Request: {request}') + logger.info(f"Request: {request}") _attach_allure_step(request, resp.status_code) - filename = f'{ASSETS_DIR}/{cid}_archive.zip' - with open(filename, 'wb') as get_file: + filename = f"{ASSETS_DIR}/{cid}_archive.zip" + with open(filename, "wb") as get_file: shutil.copyfileobj(resp.raw, get_file) - with zipfile.ZipFile(filename, 'r') as zip_ref: + with zipfile.ZipFile(filename, "r") as zip_ref: zip_ref.extractall(ASSETS_DIR) - return f'{ASSETS_DIR}/{prefix}' + return f"{ASSETS_DIR}/{prefix}" -@allure.step('Get via HTTP Gate by attribute') +@allure.step("Get via HTTP Gate by attribute") def get_via_http_gate_by_attribute(cid: str, attribute: dict): """ This function gets given object from HTTP gate @@ -88,16 +86,18 @@ def get_via_http_gate_by_attribute(cid: str, attribute: dict): """ attr_name = list(attribute.keys())[0] attr_value = quote_plus(str(attribute.get(attr_name))) - request = f'{HTTP_GATE}/get_by_attribute/{cid}/{quote_plus(str(attr_name))}/{attr_value}' + request = f"{HTTP_GATE}/get_by_attribute/{cid}/{quote_plus(str(attr_name))}/{attr_value}" resp = requests.get(request, stream=True) if not resp.ok: - raise Exception(f"""Failed to get object via HTTP gate: + raise Exception( + f"""Failed to get object via HTTP gate: request: {resp.request.path_url}, response: {resp.text}, - status code: {resp.status_code} {resp.reason}""") + status code: {resp.status_code} {resp.reason}""" + ) - logger.info(f'Request: {request}') + logger.info(f"Request: {request}") _attach_allure_step(request, resp.status_code) filename = f"{ASSETS_DIR}/{cid}_{str(uuid.uuid4())}" @@ -106,7 +106,7 @@ def get_via_http_gate_by_attribute(cid: str, attribute: dict): return filename -@allure.step('Upload via HTTP Gate') +@allure.step("Upload via HTTP Gate") def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str: """ This function upload given object through HTTP gate @@ -114,41 +114,43 @@ def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str: :param path: File path to upload :param headers: Object header """ - request = f'{HTTP_GATE}/upload/{cid}' - files = {'upload_file': open(path, 'rb')} - body = { - 'filename': path - } + request = f"{HTTP_GATE}/upload/{cid}" + files = {"upload_file": open(path, "rb")} + body = {"filename": path} resp = requests.post(request, files=files, data=body, headers=headers) if not resp.ok: - raise Exception(f"""Failed to get object via HTTP gate: + raise Exception( + f"""Failed to get object via HTTP gate: request: {resp.request.path_url}, response: {resp.text}, - status code: {resp.status_code} {resp.reason}""") + status code: {resp.status_code} {resp.reason}""" + ) - logger.info(f'Request: {request}') - _attach_allure_step(request, resp.json(), req_type='POST') + logger.info(f"Request: {request}") + _attach_allure_step(request, resp.json(), req_type="POST") - assert resp.json().get('object_id'), f'OID found in response {resp}' + assert resp.json().get("object_id"), f"OID found in response {resp}" - return resp.json().get('object_id') + return resp.json().get("object_id") -@allure.step('Upload via HTTP Gate using Curl') -def upload_via_http_gate_curl(cid: str, filepath: str, large_object=False, headers: dict = None) -> str: +@allure.step("Upload via HTTP Gate using Curl") +def upload_via_http_gate_curl( + cid: str, filepath: str, large_object=False, headers: dict = None +) -> str: """ This function upload given object through HTTP gate using curl utility. :param cid: CID to get object from :param filepath: File path to upload :param headers: Object header """ - request = f'{HTTP_GATE}/upload/{cid}' - files = f'file=@{filepath};filename={os.path.basename(filepath)}' - cmd = f'curl -F \'{files}\' {request}' + request = f"{HTTP_GATE}/upload/{cid}" + files = f"file=@{filepath};filename={os.path.basename(filepath)}" + cmd = f"curl -F '{files}' {request}" if large_object: - files = f'file=@pipe;filename={os.path.basename(filepath)}' - cmd = f'mkfifo pipe;cat {filepath} > pipe & curl --no-buffer -F \'{files}\' {request}' + files = f"file=@pipe;filename={os.path.basename(filepath)}" + cmd = f"mkfifo pipe;cat {filepath} > pipe & curl --no-buffer -F '{files}' {request}" output = _cmd_run(cmd) oid_re = re.search(r'"object_id": "(.*)"', output) if not oid_re: @@ -156,26 +158,23 @@ def upload_via_http_gate_curl(cid: str, filepath: str, large_object=False, heade return oid_re.group(1) -@allure.step('Get via HTTP Gate using Curl') +@allure.step("Get via HTTP Gate using Curl") def get_via_http_curl(cid: str, oid: str) -> str: """ This function gets given object from HTTP gate using curl utility. :param cid: CID to get object from :param oid: object OID """ - request = f'{HTTP_GATE}/get/{cid}/{oid}' + request = f"{HTTP_GATE}/get/{cid}/{oid}" filename = f"{ASSETS_DIR}/{cid}_{oid}_{str(uuid.uuid4())}" - cmd = f'curl {request} > {filename}' + cmd = f"curl {request} > {filename}" _cmd_run(cmd) return filename -def _attach_allure_step(request: str, status_code: int, req_type='GET'): - command_attachment = ( - f"REQUEST: '{request}'\n" - f'RESPONSE:\n {status_code}\n' - ) - with allure.step(f'{req_type} Request'): - allure.attach(command_attachment, f'{req_type} Request', allure.attachment_type.TEXT) +def _attach_allure_step(request: str, status_code: int, req_type="GET"): + command_attachment = f"REQUEST: '{request}'\n" f"RESPONSE:\n {status_code}\n" + with allure.step(f"{req_type} Request"): + allure.attach(command_attachment, f"{req_type} Request", allure.attachment_type.TEXT) diff --git a/robot/resources/lib/python_keywords/neofs_verbs.py b/robot/resources/lib/python_keywords/neofs_verbs.py index 103a5504..b0c1c122 100644 --- a/robot/resources/lib/python_keywords/neofs_verbs.py +++ b/robot/resources/lib/python_keywords/neofs_verbs.py @@ -6,16 +6,18 @@ import allure import json +import logging import random import re import uuid from typing import Optional +import allure import json_transformers from cli_utils import NeofsCli from common import ASSETS_DIR, NEOFS_ENDPOINT, NEOFS_NETMAP, WALLET_CONFIG -from robot.api import logger +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False @@ -307,10 +309,7 @@ def search_object( cid=cid, bearer=bearer, xhdr=xhdr, - filters=[ - f"{filter_key} EQ {filter_val}" - for filter_key, filter_val in filters.items() - ] + filters=[f"{filter_key} EQ {filter_val}" for filter_key, filter_val in filters.items()] if filters else None, session=session, @@ -325,7 +324,7 @@ def search_object( f"is equal for expected list '{expected_objects_list}'", ) else: - logger.warn( + logger.warning( f"Found object list {found_objects} ", f"is not equal to expected list '{expected_objects_list}'", ) diff --git a/robot/resources/lib/python_keywords/node_management.py b/robot/resources/lib/python_keywords/node_management.py index e5aca957..eca4fbf7 100644 --- a/robot/resources/lib/python_keywords/node_management.py +++ b/robot/resources/lib/python_keywords/node_management.py @@ -4,20 +4,21 @@ This module contains keywords for tests that check management of storage nodes. """ -import allure +import logging import random import re import time from dataclasses import dataclass from typing import Optional +import allure from common import MAINNET_BLOCK_TIME, MORPH_BLOCK_TIME, NEOFS_NETMAP_DICT, STORAGE_WALLET_PASS from data_formatters import get_wallet_public_key from epoch import tick_epoch -from robot.api import logger from service_helper import get_storage_service_helper from utility import robot_time_to_int +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False @@ -27,26 +28,26 @@ class HealthStatus: health_status: str = None @staticmethod - def from_stdout(output: str) -> 'HealthStatus': + def from_stdout(output: str) -> "HealthStatus": network, health = None, None - for line in output.split('\n'): - if 'Network status' in line: - network = line.split(':')[-1].strip() - if 'Health status' in line: - health = line.split(':')[-1].strip() + for line in output.split("\n"): + if "Network status" in line: + network = line.split(":")[-1].strip() + if "Health status" in line: + health = line.split(":")[-1].strip() return HealthStatus(network, health) -@allure.step('Stop Nodes') +@allure.step("Stop Nodes") def stop_nodes(number: int, nodes: list) -> list: """ - The function shuts down the given number of randomly - selected nodes in docker. - Args: - number (int): the number of nodes to shut down - nodes (list): the list of nodes for possible shut down - Returns: - (list): the list of nodes which have been shut down + The function shuts down the given number of randomly + selected nodes in docker. + Args: + number (int): the number of nodes to shut down + nodes (list): the list of nodes for possible shut down + Returns: + (list): the list of nodes which have been shut down """ helper = get_storage_service_helper() nodes_to_stop = random.sample(nodes, number) @@ -55,140 +56,140 @@ def stop_nodes(number: int, nodes: list) -> list: return nodes_to_stop -@allure.step('Start Nodes') +@allure.step("Start Nodes") def start_nodes(nodes: list) -> None: """ - The function raises the given nodes. - Args: - nodes (list): the list of nodes to raise - Returns: - (void) + The function raises the given nodes. + Args: + nodes (list): the list of nodes to raise + Returns: + (void) """ helper = get_storage_service_helper() for node in nodes: helper.start(node) -@allure.step('Get control endpoint and wallet') -def get_control_endpoint_and_wallet(endpoint_number: str = ''): +@allure.step("Get control endpoint and wallet") +def get_control_endpoint_and_wallet(endpoint_number: str = ""): """ - Gets control endpoint for a random or given node + Gets control endpoint for a random or given node - Args: - endpoint_number (optional, str): the number of the node - in the form of 's01', 's02', etc. - given in NEOFS_NETMAP_DICT as keys - Returns: - (str): the number of the node - (str): endpoint control for the node - (str): the wallet of the respective node + Args: + endpoint_number (optional, str): the number of the node + in the form of 's01', 's02', etc. + given in NEOFS_NETMAP_DICT as keys + Returns: + (str): the number of the node + (str): endpoint control for the node + (str): the wallet of the respective node """ - if endpoint_number == '': + if endpoint_number == "": endpoint_num = random.choice(list(NEOFS_NETMAP_DICT.keys())) - logger.info(f'Random node chosen: {endpoint_num}') + logger.info(f"Random node chosen: {endpoint_num}") else: endpoint_num = endpoint_number - endpoint_values = NEOFS_NETMAP_DICT[f'{endpoint_num}'] - endpoint_control = endpoint_values['control'] - wallet = endpoint_values['wallet_path'] + endpoint_values = NEOFS_NETMAP_DICT[f"{endpoint_num}"] + endpoint_control = endpoint_values["control"] + wallet = endpoint_values["wallet_path"] return endpoint_num, endpoint_control, wallet -@allure.step('Get Locode') +@allure.step("Get Locode") def get_locode(): endpoint_values = random.choice(list(NEOFS_NETMAP_DICT.values())) - locode = endpoint_values['UN-LOCODE'] - logger.info(f'Random locode chosen: {locode}') + locode = endpoint_values["UN-LOCODE"] + logger.info(f"Random locode chosen: {locode}") return locode -@allure.step('Healthcheck for node') +@allure.step("Healthcheck for node") def node_healthcheck(node_name: str) -> HealthStatus: """ - The function returns node's health status. - Args: - node_name str: node name to use for netmap snapshot operation - Returns: - health status as HealthStatus object. + The function returns node's health status. + Args: + node_name str: node name to use for netmap snapshot operation + Returns: + health status as HealthStatus object. """ command = "control healthcheck" output = _run_control_command(node_name, command) return HealthStatus.from_stdout(output) -@allure.step('Set status for node') +@allure.step("Set status for node") def node_set_status(node_name: str, status: str, retries: int = 0) -> None: """ - The function sets particular status for given node. - Args: - node_name str: node name to use for netmap snapshot operation - status str: online or offline. - retries (optional, int): number of retry attempts if it didn't work from the first time - Returns: - (void) + The function sets particular status for given node. + Args: + node_name str: node name to use for netmap snapshot operation + status str: online or offline. + retries (optional, int): number of retry attempts if it didn't work from the first time + Returns: + (void) """ command = f"control set-status --status {status}" _run_control_command(node_name, command, retries) -@allure.step('Get netmap snapshot') +@allure.step("Get netmap snapshot") def get_netmap_snapshot(node_name: Optional[str] = None) -> str: """ - The function returns string representation of netmap-snapshot. - Args: - node_name str: node name to use for netmap snapshot operation - Returns: - string representation of netmap-snapshot + The function returns string representation of netmap-snapshot. + Args: + node_name str: node name to use for netmap snapshot operation + Returns: + string representation of netmap-snapshot """ node_name = node_name or list(NEOFS_NETMAP_DICT)[0] command = "control netmap-snapshot" return _run_control_command(node_name, command) -@allure.step('Shard list for node') +@allure.step("Shard list for node") def node_shard_list(node_name: str) -> list[str]: """ - The function returns list of shards for particular node. - Args: - node_name str: node name to use for netmap snapshot operation - Returns: - list of shards. + The function returns list of shards for particular node. + Args: + node_name str: node name to use for netmap snapshot operation + Returns: + list of shards. """ command = "control shards list" output = _run_control_command(node_name, command) - return re.findall(r'Shard (.*):', output) + return re.findall(r"Shard (.*):", output) -@allure.step('Shard list for node') +@allure.step("Shard list for node") def node_shard_set_mode(node_name: str, shard: str, mode: str) -> str: """ - The function sets mode for node's particular shard. - Args: - node_name str: node name to use for netmap snapshot operation - Returns: - health status as HealthStatus object. + The function sets mode for node's particular shard. + Args: + node_name str: node name to use for netmap snapshot operation + Returns: + health status as HealthStatus object. """ command = f"control shards set-mode --id {shard} --mode {mode}" return _run_control_command(node_name, command) -@allure.step('Drop object from node {node_name}') +@allure.step("Drop object from node {node_name}") def drop_object(node_name: str, cid: str, oid: str) -> str: """ - The function drops object from particular node. - Args: - node_name str: node name to use for netmap snapshot operation - Returns: - health status as HealthStatus object. + The function drops object from particular node. + Args: + node_name str: node name to use for netmap snapshot operation + Returns: + health status as HealthStatus object. """ command = f"control drop-objects -o {cid}/{oid}" return _run_control_command(node_name, command) -@allure.step('Delete data of node {node_name}') +@allure.step("Delete data of node {node_name}") def delete_node_data(node_name: str) -> None: helper = get_storage_service_helper() helper.stop_node(node_name) @@ -196,27 +197,25 @@ def delete_node_data(node_name: str) -> None: time.sleep(robot_time_to_int(MORPH_BLOCK_TIME)) -@allure.step('Exclude node {node_to_include} from network map') +@allure.step("Exclude node {node_to_include} from network map") def exclude_node_from_network_map(node_to_exclude, alive_node): - node_wallet_path = NEOFS_NETMAP_DICT[node_to_exclude]['wallet_path'] - node_netmap_key = get_wallet_public_key( - node_wallet_path, - STORAGE_WALLET_PASS, - format="base58" - ) + node_wallet_path = NEOFS_NETMAP_DICT[node_to_exclude]["wallet_path"] + node_netmap_key = get_wallet_public_key(node_wallet_path, STORAGE_WALLET_PASS, format="base58") - node_set_status(node_to_exclude, status='offline') + node_set_status(node_to_exclude, status="offline") time.sleep(robot_time_to_int(MORPH_BLOCK_TIME)) tick_epoch() snapshot = get_netmap_snapshot(node_name=alive_node) - assert node_netmap_key not in snapshot, f'Expected node with key {node_netmap_key} not in network map' + assert ( + node_netmap_key not in snapshot + ), f"Expected node with key {node_netmap_key} not in network map" -@allure.step('Include node {node_to_include} into network map') +@allure.step("Include node {node_to_include} into network map") def include_node_to_network_map(node_to_include: str, alive_node: str) -> None: - node_set_status(node_to_include, status='online') + node_set_status(node_to_include, status="online") time.sleep(robot_time_to_int(MORPH_BLOCK_TIME)) tick_epoch() @@ -224,20 +223,16 @@ def include_node_to_network_map(node_to_include: str, alive_node: str) -> None: check_node_in_map(node_to_include, alive_node) -@allure.step('Check node {node_name} in network map') +@allure.step("Check node {node_name} in network map") def check_node_in_map(node_name: str, alive_node: str = None): alive_node = alive_node or node_name - node_wallet_path = NEOFS_NETMAP_DICT[node_name]['wallet_path'] - node_netmap_key = get_wallet_public_key( - node_wallet_path, - STORAGE_WALLET_PASS, - format="base58" - ) + node_wallet_path = NEOFS_NETMAP_DICT[node_name]["wallet_path"] + node_netmap_key = get_wallet_public_key(node_wallet_path, STORAGE_WALLET_PASS, format="base58") - logger.info(f'Node {node_name} netmap key: {node_netmap_key}') + logger.info(f"Node {node_name} netmap key: {node_netmap_key}") snapshot = get_netmap_snapshot(node_name=alive_node) - assert node_netmap_key in snapshot, f'Expected node with key {node_netmap_key} in network map' + assert node_netmap_key in snapshot, f"Expected node with key {node_netmap_key} in network map" def _run_control_command(node_name: str, command: str, retries: int = 0) -> str: @@ -247,6 +242,6 @@ def _run_control_command(node_name: str, command: str, retries: int = 0) -> str: return helper.run_control_command(node_name, command) except AssertionError as err: if attempt < retries: - logger.warn(f'Command {command} failed with error {err} and will be retried') + logger.warning(f"Command {command} failed with error {err} and will be retried") continue - raise AssertionError(f'Command {command} failed with error {err}') from err + raise AssertionError(f"Command {command} failed with error {err}") from err diff --git a/robot/resources/lib/python_keywords/payment_neogo.py b/robot/resources/lib/python_keywords/payment_neogo.py index 170f8aaa..9e25a6f8 100644 --- a/robot/resources/lib/python_keywords/payment_neogo.py +++ b/robot/resources/lib/python_keywords/payment_neogo.py @@ -1,33 +1,41 @@ #!/usr/bin/python3 -import allure +import logging import re import time -from neo3 import wallet -from robot.api import logger - +import allure import contract import converters import rpc_client -from common import (GAS_HASH, MAINNET_SINGLE_ADDR, MAINNET_WALLET_PATH, MAINNET_WALLET_PASS, - MORPH_ENDPOINT, NEO_MAINNET_ENDPOINT, NEOFS_CONTRACT, NEOGO_EXECUTABLE) +from common import ( + GAS_HASH, + MAINNET_SINGLE_ADDR, + MAINNET_WALLET_PASS, + MAINNET_WALLET_PATH, + MORPH_ENDPOINT, + NEO_MAINNET_ENDPOINT, + NEOFS_CONTRACT, + NEOGO_EXECUTABLE, +) from converters import load_wallet +from neo3 import wallet from wallet import nep17_transfer from wrappers import run_sh_with_passwd_contract +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -EMPTY_PASSWORD = '' -TX_PERSIST_TIMEOUT = 15 # seconds -ASSET_POWER_MAINCHAIN = 10 ** 8 -ASSET_POWER_SIDECHAIN = 10 ** 12 +EMPTY_PASSWORD = "" +TX_PERSIST_TIMEOUT = 15 # seconds +ASSET_POWER_MAINCHAIN = 10**8 +ASSET_POWER_SIDECHAIN = 10**12 morph_rpc_cli = rpc_client.RPCClient(MORPH_ENDPOINT) mainnet_rpc_cli = rpc_client.RPCClient(NEO_MAINNET_ENDPOINT) -@allure.step('Withdraw Mainnet Gas') +@allure.step("Withdraw Mainnet Gas") def withdraw_mainnet_gas(wlt: str, amount: int): address = _address_from_wallet(wlt, EMPTY_PASSWORD) scripthash = wallet.Account.address_to_script_hash(address) @@ -39,10 +47,10 @@ def withdraw_mainnet_gas(wlt: str, amount: int): ) logger.info(f"Executing command: {cmd}") - raw_out = run_sh_with_passwd_contract('', cmd, expect_confirmation=True) - out = raw_out.decode('utf-8') + raw_out = run_sh_with_passwd_contract("", cmd, expect_confirmation=True) + out = raw_out.decode("utf-8") logger.info(f"Command completed with output: {out}") - m = re.match(r'^Sent invocation transaction (\w{64})$', out) + m = re.match(r"^Sent invocation transaction (\w{64})$", out) if m is None: raise Exception("Can not get Tx.") tx = m.group(1) @@ -72,36 +80,33 @@ def transaction_accepted(tx_id: str): return False -@allure.step('Get NeoFS Balance') +@allure.step("Get NeoFS Balance") def get_balance(wallet_path: str): """ This function returns NeoFS balance for given wallet. """ wlt = load_wallet(wallet_path) acc = wlt.accounts[-1] - payload = [ - { - 'type': 'Hash160', - 'value': str(acc.script_hash) - } - ] + payload = [{"type": "Hash160", "value": str(acc.script_hash)}] try: resp = morph_rpc_cli.invoke_function( - contract.get_balance_contract_hash(MORPH_ENDPOINT), - 'balanceOf', - payload + contract.get_balance_contract_hash(MORPH_ENDPOINT), "balanceOf", payload ) logger.info(f"Got response \n{resp}") - value = int(resp['stack'][0]['value']) + value = int(resp["stack"][0]["value"]) return value / ASSET_POWER_SIDECHAIN except Exception as out: logger.error(f"failed to get wallet balance: {out}") raise out -@allure.step('Transfer Mainnet Gas') -def transfer_mainnet_gas(wallet_to: str, amount: int, wallet_password: str = EMPTY_PASSWORD, - wallet_path: str = MAINNET_WALLET_PATH): +@allure.step("Transfer Mainnet Gas") +def transfer_mainnet_gas( + wallet_to: str, + amount: int, + wallet_password: str = EMPTY_PASSWORD, + wallet_path: str = MAINNET_WALLET_PATH, +): """ This function transfer GAS in main chain from mainnet wallet to the provided wallet. If the wallet contains more than one address, @@ -117,15 +122,20 @@ def transfer_mainnet_gas(wallet_to: str, amount: int, wallet_password: str = EMP """ address_to = _address_from_wallet(wallet_to, wallet_password) - txid = nep17_transfer(wallet_path, address_to, amount, NEO_MAINNET_ENDPOINT, - wallet_pass=MAINNET_WALLET_PASS, addr_from=MAINNET_SINGLE_ADDR) + txid = nep17_transfer( + wallet_path, + address_to, + amount, + NEO_MAINNET_ENDPOINT, + wallet_pass=MAINNET_WALLET_PASS, + addr_from=MAINNET_SINGLE_ADDR, + ) if not transaction_accepted(txid): raise AssertionError(f"TX {txid} hasn't been processed") -@allure.step('NeoFS Deposit') -def neofs_deposit(wallet_to: str, amount: int, - wallet_password: str = EMPTY_PASSWORD): +@allure.step("NeoFS Deposit") +def neofs_deposit(wallet_to: str, amount: int, wallet_password: str = EMPTY_PASSWORD): """ Transferring GAS from given wallet to NeoFS contract address. """ @@ -135,8 +145,14 @@ def neofs_deposit(wallet_to: str, amount: int, address_to = _address_from_wallet(wallet_to, wallet_password) - txid = nep17_transfer(wallet_to, deposit_addr, amount, NEO_MAINNET_ENDPOINT, - wallet_pass=wallet_password, addr_from=address_to) + txid = nep17_transfer( + wallet_to, + deposit_addr, + amount, + NEO_MAINNET_ENDPOINT, + wallet_pass=wallet_password, + addr_from=address_to, + ) if not transaction_accepted(txid): raise AssertionError(f"TX {txid} hasn't been processed") @@ -156,21 +172,21 @@ def _address_from_wallet(wlt: str, wallet_password: str): return address -@allure.step('Get Mainnet Balance') +@allure.step("Get Mainnet Balance") def get_mainnet_balance(address: str): resp = mainnet_rpc_cli.get_nep17_balances(address=address) logger.info(f"Got getnep17balances response: {resp}") - for balance in resp['balance']: - if balance['assethash'] == GAS_HASH: - return float(balance['amount'])/ASSET_POWER_MAINCHAIN + for balance in resp["balance"]: + if balance["assethash"] == GAS_HASH: + return float(balance["amount"]) / ASSET_POWER_MAINCHAIN return float(0) -@allure.step('Get Sidechain Balance') +@allure.step("Get Sidechain Balance") def get_sidechain_balance(address: str): resp = morph_rpc_cli.get_nep17_balances(address=address) logger.info(f"Got getnep17balances response: {resp}") - for balance in resp['balance']: - if balance['assethash'] == GAS_HASH: - return float(balance['amount'])/ASSET_POWER_SIDECHAIN + for balance in resp["balance"]: + if balance["assethash"] == GAS_HASH: + return float(balance["amount"]) / ASSET_POWER_SIDECHAIN return float(0) diff --git a/robot/resources/lib/python_keywords/s3_gate_bucket.py b/robot/resources/lib/python_keywords/s3_gate_bucket.py index 09239368..98174baf 100644 --- a/robot/resources/lib/python_keywords/s3_gate_bucket.py +++ b/robot/resources/lib/python_keywords/s3_gate_bucket.py @@ -2,6 +2,7 @@ import allure import json +import logging import os import re import uuid @@ -9,12 +10,12 @@ from enum import Enum from time import sleep from typing import Optional +import allure import boto3 import urllib3 from botocore.exceptions import ClientError -from robot.api import logger from cli_helpers import _run_with_passwd, log_command_execution -from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS +from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PASS, S3_GATE_WALLET_PATH from data_formatters import get_wallet_public_key ########################################################## @@ -22,10 +23,10 @@ from data_formatters import get_wallet_public_key # boto library produces on requests to S3-gate in dev-env. urllib3.disable_warnings() ########################################################## - +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -CREDENTIALS_CREATE_TIMEOUT = '30s' -NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate') +CREDENTIALS_CREATE_TIMEOUT = "30s" +NEOFS_EXEC = os.getenv("NEOFS_EXEC", "neofs-authmate") # Artificial delay that we add after object deletion and container creation # Delay is added because sometimes immediately after deletion object still appears @@ -33,179 +34,193 @@ NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate') # TODO: remove after https://github.com/nspcc-dev/neofs-s3-gw/issues/610 is fixed S3_SYNC_WAIT_TIME = 5 + class VersioningStatus(Enum): - ENABLED = 'Enabled' - SUSPENDED = 'Suspended' + ENABLED = "Enabled" + SUSPENDED = "Suspended" -@allure.step('Init S3 Credentials') +@allure.step("Init S3 Credentials") def init_s3_credentials(wallet_path, s3_bearer_rules_file: Optional[str] = None): bucket = str(uuid.uuid4()) - s3_bearer_rules = s3_bearer_rules_file or 'robot/resources/files/s3_bearer_rules.json' + s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json" gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS) cmd = ( - f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} ' - f'issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} ' - f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} ' - f'--bearer-rules {s3_bearer_rules}' + f"{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} " + f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} " + f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} " + f"--bearer-rules {s3_bearer_rules}" ) - logger.info(f'Executing command: {cmd}') + logger.info(f"Executing command: {cmd}") try: output = _run_with_passwd(cmd) - logger.info(f'Command completed with output: {output}') + logger.info(f"Command completed with output: {output}") # output contains some debug info and then several JSON structures, so we find each # JSON structure by curly brackets (naive approach, but works while JSON is not nested) # and then we take JSON containing secret_access_key - json_blocks = re.findall(r'\{.*?\}', output, re.DOTALL) + json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL) for json_block in json_blocks: try: parsed_json_block = json.loads(json_block) - if 'secret_access_key' in parsed_json_block: + if "secret_access_key" in parsed_json_block: return ( - parsed_json_block['container_id'], + parsed_json_block["container_id"], bucket, - parsed_json_block['access_key_id'], - parsed_json_block['secret_access_key'], - parsed_json_block['owner_private_key'] + parsed_json_block["access_key_id"], + parsed_json_block["secret_access_key"], + parsed_json_block["owner_private_key"], ) except json.JSONDecodeError: - raise AssertionError(f'Could not parse info from output\n{output}') - raise AssertionError(f'Could not find AWS credentials in output:\n{output}') + raise AssertionError(f"Could not parse info from output\n{output}") + raise AssertionError(f"Could not find AWS credentials in output:\n{output}") except Exception as exc: - raise RuntimeError(f'Failed to init s3 credentials because of error\n{exc}') from exc + raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc -@allure.step('Config S3 client') +@allure.step("Config S3 client") def config_s3_client(access_key_id: str, secret_access_key: str): try: session = boto3.session.Session() s3_client = session.client( - service_name='s3', + service_name="s3", aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key, - endpoint_url=S3_GATE, verify=False + endpoint_url=S3_GATE, + verify=False, ) return s3_client except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Create bucket S3') +@allure.step("Create bucket S3") def create_bucket_s3(s3_client): bucket_name = str(uuid.uuid4()) try: s3_bucket = s3_client.create_bucket(Bucket=bucket_name) - log_command_execution(f'Created S3 bucket {bucket_name}', s3_bucket) + log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket) sleep(S3_SYNC_WAIT_TIME) return bucket_name except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('List buckets S3') +@allure.step("List buckets S3") def list_buckets_s3(s3_client): found_buckets = [] try: response = s3_client.list_buckets() - log_command_execution('S3 List buckets result', response) + log_command_execution("S3 List buckets result", response) - for bucket in response['Buckets']: - found_buckets.append(bucket['Name']) + for bucket in response["Buckets"]: + found_buckets.append(bucket["Name"]) return found_buckets except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Delete bucket S3') +@allure.step("Delete bucket S3") def delete_bucket_s3(s3_client, bucket: str): try: response = s3_client.delete_bucket(Bucket=bucket) - log_command_execution('S3 Delete bucket result', response) + log_command_execution("S3 Delete bucket result", response) sleep(S3_SYNC_WAIT_TIME) return response except ClientError as err: - log_command_execution('S3 Delete bucket error', str(err)) - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + log_command_execution("S3 Delete bucket error", str(err)) + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Head bucket S3') +@allure.step("Head bucket S3") def head_bucket(s3_client, bucket: str): try: response = s3_client.head_bucket(Bucket=bucket) - log_command_execution('S3 Head bucket result', response) + log_command_execution("S3 Head bucket result", response) return response except ClientError as err: - log_command_execution('S3 Head bucket error', str(err)) - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + log_command_execution("S3 Head bucket error", str(err)) + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Set bucket versioning status') +@allure.step("Set bucket versioning status") def set_bucket_versioning(s3_client, bucket_name: str, status: VersioningStatus) -> None: try: - response = s3_client.put_bucket_versioning(Bucket=bucket_name, VersioningConfiguration={'Status': status.value}) - log_command_execution('S3 Set bucket versioning to', response) + response = s3_client.put_bucket_versioning( + Bucket=bucket_name, VersioningConfiguration={"Status": status.value} + ) + log_command_execution("S3 Set bucket versioning to", response) except ClientError as err: - raise Exception(f'Got error during set bucket versioning: {err}') from err + raise Exception(f"Got error during set bucket versioning: {err}") from err -@allure.step('Get bucket versioning status') +@allure.step("Get bucket versioning status") def get_bucket_versioning_status(s3_client, bucket_name: str) -> str: try: response = s3_client.get_bucket_versioning(Bucket=bucket_name) - status = response.get('Status') - log_command_execution('S3 Got bucket versioning status', response) + status = response.get("Status") + log_command_execution("S3 Got bucket versioning status", response) return status except ClientError as err: - raise Exception(f'Got error during get bucket versioning status: {err}') from err + raise Exception(f"Got error during get bucket versioning status: {err}") from err -@allure.step('Put bucket tagging') +@allure.step("Put bucket tagging") def put_bucket_tagging(s3_client, bucket_name: str, tags: list): try: - tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags] - tagging = {'TagSet': tags} + tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags] + tagging = {"TagSet": tags} response = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tagging) - log_command_execution('S3 Put bucket tagging', response) + log_command_execution("S3 Put bucket tagging", response) except ClientError as err: - raise Exception(f'Got error during put bucket tagging: {err}') from err + raise Exception(f"Got error during put bucket tagging: {err}") from err -@allure.step('Get bucket tagging') +@allure.step("Get bucket tagging") def get_bucket_tagging(s3_client, bucket_name: str) -> list: try: response = s3_client.get_bucket_tagging(Bucket=bucket_name) - log_command_execution('S3 Get bucket tagging', response) - return response.get('TagSet') + log_command_execution("S3 Get bucket tagging", response) + return response.get("TagSet") except ClientError as err: - raise Exception(f'Got error during get bucket tagging: {err}') from err + raise Exception(f"Got error during get bucket tagging: {err}") from err -@allure.step('Delete bucket tagging') +@allure.step("Delete bucket tagging") def delete_bucket_tagging(s3_client, bucket_name: str) -> None: try: response = s3_client.delete_bucket_tagging(Bucket=bucket_name) - log_command_execution('S3 Delete bucket tagging', response) + log_command_execution("S3 Delete bucket tagging", response) except ClientError as err: - raise Exception(f'Got error during delete bucket tagging: {err}') from err + raise Exception(f"Got error during delete bucket tagging: {err}") from err diff --git a/robot/resources/lib/python_keywords/s3_gate_object.py b/robot/resources/lib/python_keywords/s3_gate_object.py index 32f78781..0ab544b2 100644 --- a/robot/resources/lib/python_keywords/s3_gate_object.py +++ b/robot/resources/lib/python_keywords/s3_gate_object.py @@ -1,15 +1,15 @@ #!/usr/bin/python3.9 -import allure +import logging import os import uuid from enum import Enum from time import sleep from typing import Optional +import allure import urllib3 from botocore.exceptions import ClientError -from robot.api import logger from cli_helpers import log_command_execution from python_keywords.aws_cli_client import AwsCliClient from python_keywords.s3_gate_bucket import S3_SYNC_WAIT_TIME @@ -19,128 +19,143 @@ from python_keywords.s3_gate_bucket import S3_SYNC_WAIT_TIME # boto library produces on requests to S3-gate in dev-env. urllib3.disable_warnings() ########################################################## - +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -CREDENTIALS_CREATE_TIMEOUT = '30s' +CREDENTIALS_CREATE_TIMEOUT = "30s" -ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/') +ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/") class VersioningStatus(Enum): - ENABLED = 'Enabled' - SUSPENDED = 'Suspended' + ENABLED = "Enabled" + SUSPENDED = "Suspended" -@allure.step('List objects S3 v2') +@allure.step("List objects S3 v2") def list_objects_s3_v2(s3_client, bucket: str) -> list: try: response = s3_client.list_objects_v2(Bucket=bucket) - content = response.get('Contents', []) - log_command_execution('S3 v2 List objects result', response) + content = response.get("Contents", []) + log_command_execution("S3 v2 List objects result", response) obj_list = [] for obj in content: - obj_list.append(obj['Key']) - logger.info(f'Found s3 objects: {obj_list}') + obj_list.append(obj["Key"]) + logger.info(f"Found s3 objects: {obj_list}") return obj_list except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('List objects S3') +@allure.step("List objects S3") def list_objects_s3(s3_client, bucket: str) -> list: try: response = s3_client.list_objects(Bucket=bucket) - content = response.get('Contents', []) - log_command_execution('S3 List objects result', response) + content = response.get("Contents", []) + log_command_execution("S3 List objects result", response) obj_list = [] for obj in content: - obj_list.append(obj['Key']) - logger.info(f'Found s3 objects: {obj_list}') + obj_list.append(obj["Key"]) + logger.info(f"Found s3 objects: {obj_list}") return obj_list except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('List objects versions S3') +@allure.step("List objects versions S3") def list_objects_versions_s3(s3_client, bucket: str) -> list: try: response = s3_client.list_object_versions(Bucket=bucket) - versions = response.get('Versions', []) - log_command_execution('S3 List objects versions result', response) + versions = response.get("Versions", []) + log_command_execution("S3 List objects versions result", response) return versions except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Put object S3') +@allure.step("Put object S3") def put_object_s3(s3_client, bucket: str, filepath: str): filename = os.path.basename(filepath) if isinstance(s3_client, AwsCliClient): file_content = filepath else: - with open(filepath, 'rb') as put_file: + with open(filepath, "rb") as put_file: file_content = put_file.read() try: response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename) - log_command_execution('S3 Put object result', response) - return response.get('VersionId') + log_command_execution("S3 Put object result", response) + return response.get("VersionId") except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Head object S3') +@allure.step("Head object S3") def head_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None): try: - params = {'Bucket': bucket, 'Key': object_key} + params = {"Bucket": bucket, "Key": object_key} if version_id: - params['VersionId'] = version_id + params["VersionId"] = version_id response = s3_client.head_object(**params) - log_command_execution('S3 Head object result', response) + log_command_execution("S3 Head object result", response) return response except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Delete object S3') +@allure.step("Delete object S3") def delete_object_s3(s3_client, bucket, object_key, version_id: str = None): try: - params = {'Bucket': bucket, 'Key': object_key} + params = {"Bucket": bucket, "Key": object_key} if version_id: - params['VersionId'] = version_id + params["VersionId"] = version_id response = s3_client.delete_object(**params) - log_command_execution('S3 Delete object result', response) + log_command_execution("S3 Delete object result", response) sleep(S3_SYNC_WAIT_TIME) return response except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Delete objects S3') +@allure.step("Delete objects S3") def delete_objects_s3(s3_client, bucket: str, object_keys: list): try: response = s3_client.delete_objects(Bucket=bucket, Delete=_make_objs_dict(object_keys)) - log_command_execution('S3 Delete objects result', response) + log_command_execution("S3 Delete objects result", response) sleep(S3_SYNC_WAIT_TIME) return response except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Delete object versions S3') + +@allure.step("Delete object versions S3") def delete_object_versions_s3(s3_client, bucket: str, object_versions: list): try: # Build deletion list in S3 format @@ -154,187 +169,230 @@ def delete_object_versions_s3(s3_client, bucket: str, object_versions: list): ] } response = s3_client.delete_objects(Bucket=bucket, Delete=delete_list) - log_command_execution('S3 Delete objects result', response) + log_command_execution("S3 Delete objects result", response) return response except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Copy object S3') +@allure.step("Copy object S3") def copy_object_s3(s3_client, bucket, object_key, bucket_dst=None): - filename = f'{os.getcwd()}/{uuid.uuid4()}' + filename = f"{os.getcwd()}/{uuid.uuid4()}" try: - response = s3_client.copy_object(Bucket=bucket_dst or bucket, - CopySource=f'{bucket}/{object_key}', - Key=filename) - log_command_execution('S3 Copy objects result', response) + response = s3_client.copy_object( + Bucket=bucket_dst or bucket, CopySource=f"{bucket}/{object_key}", Key=filename + ) + log_command_execution("S3 Copy objects result", response) return filename except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Get object S3') +@allure.step("Get object S3") def get_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None): - filename = f'{ASSETS_DIR}/{uuid.uuid4()}' + filename = f"{ASSETS_DIR}/{uuid.uuid4()}" try: - params = {'Bucket': bucket, 'Key': object_key} + params = {"Bucket": bucket, "Key": object_key} if version_id: - params['VersionId'] = version_id + params["VersionId"] = version_id if isinstance(s3_client, AwsCliClient): - params['file_path'] = filename + params["file_path"] = filename response = s3_client.get_object(**params) - log_command_execution('S3 Get objects result', response) + log_command_execution("S3 Get objects result", response) if not isinstance(s3_client, AwsCliClient): - with open(f'{filename}', 'wb') as get_file: - chunk = response['Body'].read(1024) + with open(f"{filename}", "wb") as get_file: + chunk = response["Body"].read(1024) while chunk: get_file.write(chunk) - chunk = response['Body'].read(1024) + chunk = response["Body"].read(1024) return filename except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Create multipart upload S3') +@allure.step("Create multipart upload S3") def create_multipart_upload_s3(s3_client, bucket_name: str, object_key: str) -> str: try: response = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_key) - log_command_execution('S3 Created multipart upload', response) - assert response.get('UploadId'), f'Expected UploadId in response:\n{response}' + log_command_execution("S3 Created multipart upload", response) + assert response.get("UploadId"), f"Expected UploadId in response:\n{response}" - return response.get('UploadId') + return response.get("UploadId") except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('List multipart uploads S3') +@allure.step("List multipart uploads S3") def list_multipart_uploads_s3(s3_client, bucket_name: str) -> Optional[list[dict]]: try: response = s3_client.list_multipart_uploads(Bucket=bucket_name) - log_command_execution('S3 List multipart upload', response) + log_command_execution("S3 List multipart upload", response) - return response.get('Uploads') + return response.get("Uploads") except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Abort multipart upload S3') +@allure.step("Abort multipart upload S3") def abort_multipart_uploads_s3(s3_client, bucket_name: str, object_key: str, upload_id: str): try: - response = s3_client.abort_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id) - log_command_execution('S3 Abort multipart upload', response) + response = s3_client.abort_multipart_upload( + Bucket=bucket_name, Key=object_key, UploadId=upload_id + ) + log_command_execution("S3 Abort multipart upload", response) except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Upload part S3') -def upload_part_s3(s3_client, bucket_name: str, object_key: str, upload_id: str, part_num: int, filepath: str) -> str: +@allure.step("Upload part S3") +def upload_part_s3( + s3_client, bucket_name: str, object_key: str, upload_id: str, part_num: int, filepath: str +) -> str: if isinstance(s3_client, AwsCliClient): file_content = filepath else: - with open(filepath, 'rb') as put_file: + with open(filepath, "rb") as put_file: file_content = put_file.read() try: - response = s3_client.upload_part(UploadId=upload_id, Bucket=bucket_name, Key=object_key, PartNumber=part_num, - Body=file_content) - log_command_execution('S3 Upload part', response) - assert response.get('ETag'), f'Expected ETag in response:\n{response}' + response = s3_client.upload_part( + UploadId=upload_id, + Bucket=bucket_name, + Key=object_key, + PartNumber=part_num, + Body=file_content, + ) + log_command_execution("S3 Upload part", response) + assert response.get("ETag"), f"Expected ETag in response:\n{response}" - return response.get('ETag') + return response.get("ETag") except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('List parts S3') +@allure.step("List parts S3") def list_parts_s3(s3_client, bucket_name: str, object_key: str, upload_id: str) -> list[dict]: try: response = s3_client.list_parts(UploadId=upload_id, Bucket=bucket_name, Key=object_key) - log_command_execution('S3 List part', response) - assert response.get('Parts'), f'Expected Parts in response:\n{response}' + log_command_execution("S3 List part", response) + assert response.get("Parts"), f"Expected Parts in response:\n{response}" - return response.get('Parts') + return response.get("Parts") except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Complete multipart upload S3') -def complete_multipart_upload_s3(s3_client, bucket_name: str, object_key: str, upload_id: str, - parts: list): +@allure.step("Complete multipart upload S3") +def complete_multipart_upload_s3( + s3_client, bucket_name: str, object_key: str, upload_id: str, parts: list +): try: - parts = [{'ETag': etag, 'PartNumber': part_num} for part_num, etag in parts] - response = s3_client.complete_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id, - MultipartUpload={'Parts': parts}) - log_command_execution('S3 Complete multipart upload', response) + parts = [{"ETag": etag, "PartNumber": part_num} for part_num, etag in parts] + response = s3_client.complete_multipart_upload( + Bucket=bucket_name, Key=object_key, UploadId=upload_id, MultipartUpload={"Parts": parts} + ) + log_command_execution("S3 Complete multipart upload", response) except ClientError as err: - raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err -@allure.step('Put object tagging') +@allure.step("Put object tagging") def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list): try: - tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags] - tagging = {'TagSet': tags} + tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags] + tagging = {"TagSet": tags} s3_client.put_object_tagging(Bucket=bucket_name, Key=object_key, Tagging=tagging) - log_command_execution('S3 Put object tagging', str(tags)) + log_command_execution("S3 Put object tagging", str(tags)) except ClientError as err: - raise Exception(f'Got error during put object tagging: {err}') from err + raise Exception(f"Got error during put object tagging: {err}") from err -@allure.step('Get object tagging') +@allure.step("Get object tagging") def get_object_tagging(s3_client, bucket_name: str, object_key: str) -> list: try: response = s3_client.get_object_tagging(Bucket=bucket_name, Key=object_key) - log_command_execution('S3 Get object tagging', response) - return response.get('TagSet') + log_command_execution("S3 Get object tagging", response) + return response.get("TagSet") except ClientError as err: - raise Exception(f'Got error during get object tagging: {err}') from err + raise Exception(f"Got error during get object tagging: {err}") from err -@allure.step('Delete object tagging') +@allure.step("Delete object tagging") def delete_object_tagging(s3_client, bucket_name: str, object_key: str): try: response = s3_client.delete_object_tagging(Bucket=bucket_name, Key=object_key) - log_command_execution('S3 Delete object tagging', response) + log_command_execution("S3 Delete object tagging", response) except ClientError as err: - raise Exception(f'Got error during delete object tagging: {err}') from err + raise Exception(f"Got error during delete object tagging: {err}") from err -@allure.step('Get object tagging') -def get_object_attributes(s3_client, bucket_name: str, object_key: str, *attributes: str, version_id: str = None, - max_parts: int = None, part_number: int = None, get_full_resp=True) -> dict: +@allure.step("Get object tagging") +def get_object_attributes( + s3_client, + bucket_name: str, + object_key: str, + *attributes: str, + version_id: str = None, + max_parts: int = None, + part_number: int = None, + get_full_resp=True, +) -> dict: try: if not isinstance(s3_client, AwsCliClient): - logger.warn('Method get_object_attributes is not supported by boto3 client') + logger.warning("Method get_object_attributes is not supported by boto3 client") return {} - response = s3_client.get_object_attributes(bucket_name, object_key, *attributes, version_id=version_id, - max_parts=max_parts, part_number=part_number) - log_command_execution('S3 Get object attributes', response) + response = s3_client.get_object_attributes( + bucket_name, + object_key, + *attributes, + version_id=version_id, + max_parts=max_parts, + part_number=part_number, + ) + log_command_execution("S3 Get object attributes", response) for attr in attributes: - assert attr in response, f'Expected attribute {attr} in {response}' + assert attr in response, f"Expected attribute {attr} in {response}" if get_full_resp: return response @@ -342,13 +400,13 @@ def get_object_attributes(s3_client, bucket_name: str, object_key: str, *attribu return response.get(attributes[0]) except ClientError as err: - raise Exception(f'Got error during get object attributes: {err}') from err + raise Exception(f"Got error during get object attributes: {err}") from err def _make_objs_dict(key_names): objs_list = [] for key in key_names: - obj_dict = {'Key': key} + obj_dict = {"Key": key} objs_list.append(obj_dict) - objs_dict = {'Objects': objs_list} + objs_dict = {"Objects": objs_list} return objs_dict diff --git a/robot/resources/lib/python_keywords/session_token.py b/robot/resources/lib/python_keywords/session_token.py index c4e3b53c..d8b413fe 100644 --- a/robot/resources/lib/python_keywords/session_token.py +++ b/robot/resources/lib/python_keywords/session_token.py @@ -7,15 +7,17 @@ import allure import base64 import json +import logging import os import uuid +import allure import json_transformers from cli_helpers import _cmd_run, _run_with_passwd from common import ASSETS_DIR, NEOFS_ENDPOINT, WALLET_CONFIG from neo3 import wallet -from robot.api import logger +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False # path to neofs-cli executable @@ -45,9 +47,9 @@ def generate_session_token(owner: str, session_wallet: str, cid: str = "") -> st with open(session_wallet) as fout: session_wlt_content = json.load(fout) session_wlt = wallet.Wallet.from_json(session_wlt_content, password="") - pub_key_64 = base64.b64encode( - bytes.fromhex(str(session_wlt.accounts[0].public_key)) - ).decode("utf-8") + pub_key_64 = base64.b64encode(bytes.fromhex(str(session_wlt.accounts[0].public_key))).decode( + "utf-8" + ) session_token = { "body": { diff --git a/robot/resources/lib/python_keywords/storage_policy.py b/robot/resources/lib/python_keywords/storage_policy.py index 37026ddb..f906ff4c 100644 --- a/robot/resources/lib/python_keywords/storage_policy.py +++ b/robot/resources/lib/python_keywords/storage_policy.py @@ -5,61 +5,62 @@ that storage policies are respected. """ -import allure +import logging from typing import Optional -from robot.api import logger - +import allure import complex_object_actions import neofs_verbs from common import NEOFS_NETMAP from grpc_responses import OBJECT_NOT_FOUND, error_matches_status +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False -@allure.step('Get Object Copies') +@allure.step("Get Object Copies") def get_object_copies(complexity: str, wallet: str, cid: str, oid: str): """ - The function performs requests to all nodes of the container and - finds out if they store a copy of the object. The procedure is - different for simple and complex object, so the function requires - a sign of object complexity. - Args: - complexity (str): the tag of object size and complexity, - [Simple|Complex] - wallet (str): the path to the wallet on whose behalf the - copies are got - cid (str): ID of the container - oid (str): ID of the Object - Returns: - (int): the number of object copies in the container + The function performs requests to all nodes of the container and + finds out if they store a copy of the object. The procedure is + different for simple and complex object, so the function requires + a sign of object complexity. + Args: + complexity (str): the tag of object size and complexity, + [Simple|Complex] + wallet (str): the path to the wallet on whose behalf the + copies are got + cid (str): ID of the container + oid (str): ID of the Object + Returns: + (int): the number of object copies in the container """ - return (get_simple_object_copies(wallet, cid, oid) if complexity == "Simple" - else get_complex_object_copies(wallet, cid, oid)) + return ( + get_simple_object_copies(wallet, cid, oid) + if complexity == "Simple" + else get_complex_object_copies(wallet, cid, oid) + ) -@allure.step('Get Simple Object Copies') +@allure.step("Get Simple Object Copies") def get_simple_object_copies(wallet: str, cid: str, oid: str): """ - To figure out the number of a simple object copies, only direct - HEAD requests should be made to the every node of the container. - We consider non-empty HEAD response as a stored object copy. - Args: - wallet (str): the path to the wallet on whose behalf the - copies are got - cid (str): ID of the container - oid (str): ID of the Object - Returns: - (int): the number of object copies in the container + To figure out the number of a simple object copies, only direct + HEAD requests should be made to the every node of the container. + We consider non-empty HEAD response as a stored object copy. + Args: + wallet (str): the path to the wallet on whose behalf the + copies are got + cid (str): ID of the container + oid (str): ID of the Object + Returns: + (int): the number of object copies in the container """ copies = 0 for node in NEOFS_NETMAP: try: - response = neofs_verbs.head_object(wallet, cid, oid, - endpoint=node, - is_direct=True) + response = neofs_verbs.head_object(wallet, cid, oid, endpoint=node, is_direct=True) if response: logger.info(f"Found object {oid} on node {node}") copies += 1 @@ -69,39 +70,41 @@ def get_simple_object_copies(wallet: str, cid: str, oid: str): return copies -@allure.step('Get Complex Object Copies') +@allure.step("Get Complex Object Copies") def get_complex_object_copies(wallet: str, cid: str, oid: str): """ - To figure out the number of a complex object copies, we firstly - need to retrieve its Last object. We consider that the number of - complex object copies is equal to the number of its last object - copies. When we have the Last object ID, the task is reduced - to getting simple object copies. - Args: - wallet (str): the path to the wallet on whose behalf the - copies are got - cid (str): ID of the container - oid (str): ID of the Object - Returns: - (int): the number of object copies in the container + To figure out the number of a complex object copies, we firstly + need to retrieve its Last object. We consider that the number of + complex object copies is equal to the number of its last object + copies. When we have the Last object ID, the task is reduced + to getting simple object copies. + Args: + wallet (str): the path to the wallet on whose behalf the + copies are got + cid (str): ID of the container + oid (str): ID of the Object + Returns: + (int): the number of object copies in the container """ last_oid = complex_object_actions.get_last_object(wallet, cid, oid) return get_simple_object_copies(wallet, cid, last_oid) -@allure.step('Get Nodes With Object') -def get_nodes_with_object(wallet: str, cid: str, oid: str, skip_nodes: Optional[list[str]] = None) -> list[str]: +@allure.step("Get Nodes With Object") +def get_nodes_with_object( + wallet: str, cid: str, oid: str, skip_nodes: Optional[list[str]] = None +) -> list[str]: """ - The function returns list of nodes which store - the given object. - Args: - wallet (str): the path to the wallet on whose behalf - we request the nodes - cid (str): ID of the container which store the object - oid (str): object ID - skip_nodes (list): list of nodes that should be excluded from check - Returns: - (list): nodes which store the object + The function returns list of nodes which store + the given object. + Args: + wallet (str): the path to the wallet on whose behalf + we request the nodes + cid (str): ID of the container which store the object + oid (str): object ID + skip_nodes (list): list of nodes that should be excluded from check + Returns: + (list): nodes which store the object """ nodes_to_search = NEOFS_NETMAP if skip_nodes: @@ -110,9 +113,7 @@ def get_nodes_with_object(wallet: str, cid: str, oid: str, skip_nodes: Optional[ nodes_list = [] for node in nodes_to_search: try: - res = neofs_verbs.head_object(wallet, cid, oid, - endpoint=node, - is_direct=True) + res = neofs_verbs.head_object(wallet, cid, oid, endpoint=node, is_direct=True) if res is not None: logger.info(f"Found object {oid} on node {node}") nodes_list.append(node) @@ -122,30 +123,28 @@ def get_nodes_with_object(wallet: str, cid: str, oid: str, skip_nodes: Optional[ return nodes_list -@allure.step('Get Nodes Without Object') +@allure.step("Get Nodes Without Object") def get_nodes_without_object(wallet: str, cid: str, oid: str): """ - The function returns list of nodes which do not store - the given object. - Args: - wallet (str): the path to the wallet on whose behalf - we request the nodes - cid (str): ID of the container which store the object - oid (str): object ID - Returns: - (list): nodes which do not store the object + The function returns list of nodes which do not store + the given object. + Args: + wallet (str): the path to the wallet on whose behalf + we request the nodes + cid (str): ID of the container which store the object + oid (str): object ID + Returns: + (list): nodes which do not store the object """ nodes_list = [] for node in NEOFS_NETMAP: try: - res = neofs_verbs.head_object(wallet, cid, oid, - endpoint=node, - is_direct=True) + res = neofs_verbs.head_object(wallet, cid, oid, endpoint=node, is_direct=True) if res is None: nodes_list.append(node) except Exception as err: if error_matches_status(err, OBJECT_NOT_FOUND): nodes_list.append(node) else: - raise Exception(f'Got error {err} on head object command') from err + raise Exception(f"Got error {err} on head object command") from err return nodes_list diff --git a/robot/resources/lib/python_keywords/tombstone.py b/robot/resources/lib/python_keywords/tombstone.py index 35645a99..1c518577 100644 --- a/robot/resources/lib/python_keywords/tombstone.py +++ b/robot/resources/lib/python_keywords/tombstone.py @@ -3,46 +3,33 @@ import allure import json +import allure import neofs_verbs from neo3 import wallet -from robot.libraries.BuiltIn import BuiltIn ROBOT_AUTO_KEYWORDS = False -@allure.step('Verify Head Tombstone') +@allure.step("Verify Head Tombstone") def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str): header = neofs_verbs.head_object(wallet_path, cid, oid_ts) - header = header['header'] - - BuiltIn().should_be_equal(header["containerID"], cid, - msg="Tombstone Header CID is wrong") + header = header["header"] + assert header["containerID"] == cid, "Tombstone Header CID is wrong" wlt_data = dict() - with open(wallet_path, 'r') as fout: + with open(wallet_path, "r") as fout: wlt_data = json.loads(fout.read()) - wlt = wallet.Wallet.from_json(wlt_data, password='') + wlt = wallet.Wallet.from_json(wlt_data, password="") addr = wlt.accounts[0].address - BuiltIn().should_be_equal(header["ownerID"], addr, - msg="Tombstone Owner ID is wrong") - - BuiltIn().should_be_equal(header["objectType"], 'TOMBSTONE', - msg="Header Type isn't Tombstone") - - BuiltIn().should_be_equal( - header["sessionToken"]["body"]["object"]["verb"], 'DELETE', - msg="Header Session Type isn't DELETE" - ) - - BuiltIn().should_be_equal( - header["sessionToken"]["body"]["object"]["address"]["containerID"], - cid, - msg="Header Session ID is wrong" - ) - - BuiltIn().should_be_equal( - header["sessionToken"]["body"]["object"]["address"]["objectID"], - oid, - msg="Header Session OID is wrong" - ) + assert header["ownerID"] == addr, "Tombstone Owner ID is wrong" + assert header["objectType"] == "TOMBSTONE", "Header Type isn't Tombstone" + assert ( + header["sessionToken"]["body"]["object"]["verb"] == "DELETE" + ), "Header Session Type isn't DELETE" + assert ( + header["sessionToken"]["body"]["object"]["address"]["containerID"] == cid + ), "Header Session ID is wrong" + assert ( + header["sessionToken"]["body"]["object"]["address"]["objectID"] == oid + ), "Header Session OID is wrong" diff --git a/robot/resources/lib/python_keywords/utility_keywords.py b/robot/resources/lib/python_keywords/utility_keywords.py index 57b04b66..1f8d3b05 100644 --- a/robot/resources/lib/python_keywords/utility_keywords.py +++ b/robot/resources/lib/python_keywords/utility_keywords.py @@ -2,18 +2,19 @@ import allure import hashlib +import logging import os import tarfile -from typing import Tuple import uuid +from typing import Tuple +import allure import docker import wallet -from common import ASSETS_DIR, SIMPLE_OBJ_SIZE from cli_helpers import _cmd_run -from robot.api import logger -from robot.libraries.BuiltIn import BuiltIn +from common import ASSETS_DIR, SIMPLE_OBJ_SIZE +logger = logging.getLogger("NeoLogger") ROBOT_AUTO_KEYWORDS = False @@ -26,14 +27,14 @@ def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str: (str): the path to the generated file """ file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}" - with open(file_path, 'wb') as fout: + with open(file_path, "wb") as fout: fout.write(os.urandom(size)) logger.info(f"file with size {size} bytes has been generated: {file_path}") return file_path -@allure.step('Generate file') +@allure.step("Generate file") def generate_file_and_file_hash(size: int) -> Tuple[str, str]: """ Function generates a binary file with the specified size in bytes @@ -50,7 +51,7 @@ def generate_file_and_file_hash(size: int) -> Tuple[str, str]: return file_path, file_hash -@allure.step('Get File Hash') +@allure.step("Get File Hash") def get_file_hash(filename: str, len: int = None): """ This function generates hash for the specified file. @@ -69,23 +70,22 @@ def get_file_hash(filename: str, len: int = None): return file_hash.hexdigest() -@allure.step('Generate Wallet') +@allure.step("Generate Wallet") def generate_wallet(): return wallet.init_wallet(ASSETS_DIR) -@allure.step('Get Docker Logs') +@allure.step("Get Docker Logs") def get_container_logs(testcase_name: str) -> None: - client = docker.APIClient(base_url='unix://var/run/docker.sock') - logs_dir = BuiltIn().get_variable_value("${OUTPUT_DIR}") + client = docker.APIClient(base_url="unix://var/run/docker.sock") + logs_dir = os.getenv("${OUTPUT_DIR}") tar_name = f"{logs_dir}/dockerlogs({testcase_name}).tar.gz" tar = tarfile.open(tar_name, "w:gz") for container in client.containers(): - container_name = container['Names'][0][1:] - if (client.inspect_container(container_name)['Config']['Domainname'] - == "neofs.devenv"): + container_name = container["Names"][0][1:] + if client.inspect_container(container_name)["Config"]["Domainname"] == "neofs.devenv": file_name = f"{logs_dir}/docker_log_{container_name}" - with open(file_name, 'wb') as out: + with open(file_name, "wb") as out: out.write(client.logs(container_name)) logger.info(f"Collected logs from container {container_name}") tar.add(file_name) @@ -93,10 +93,10 @@ def get_container_logs(testcase_name: str) -> None: tar.close() -@allure.step('Make Up') +@allure.step("Make Up") def make_up(services: list = [], config_dict: dict = {}): test_path = os.getcwd() - dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env') + dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env") os.chdir(dev_path) if len(services) > 0: @@ -104,33 +104,30 @@ def make_up(services: list = [], config_dict: dict = {}): if config_dict != {}: with open(f"{dev_path}/.int_test.env", "a") as out: for key, value in config_dict.items(): - out.write(f'{key}={value}') - cmd = f'make up/{service}' + out.write(f"{key}={value}") + cmd = f"make up/{service}" _cmd_run(cmd) else: - cmd = ( - f'make up/basic;' - f'make update.max_object_size val={SIMPLE_OBJ_SIZE}' - ) + cmd = f"make up/basic;" f"make update.max_object_size val={SIMPLE_OBJ_SIZE}" _cmd_run(cmd, timeout=120) os.chdir(test_path) -@allure.step('Make Down') +@allure.step("Make Down") def make_down(services: list = []): test_path = os.getcwd() - dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env') + dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env") os.chdir(dev_path) if len(services) > 0: for service in services: - cmd = f'make down/{service}' + cmd = f"make down/{service}" _cmd_run(cmd) with open(f"{dev_path}/.int_test.env", "w"): pass else: - cmd = 'make down; make clean' + cmd = "make down; make clean" _cmd_run(cmd, timeout=60) os.chdir(test_path)