[#297] remove robot.logger

Signed-off-by: Yulia Kovshova <y.kovshova@yadro.com>
This commit is contained in:
Юлия Ковшова 2022-09-20 18:03:52 +03:00 committed by Julia Kovshova
parent 035175894d
commit a8a00c1c53
15 changed files with 811 additions and 713 deletions

View file

@ -4,6 +4,7 @@ import re
import shutil import shutil
from datetime import datetime from datetime import datetime
import allure
import pytest import pytest
import wallet import wallet
from cli_helpers import _cmd_run from cli_helpers import _cmd_run
@ -13,18 +14,14 @@ from common import (
FREE_STORAGE, FREE_STORAGE,
INFRASTRUCTURE_TYPE, INFRASTRUCTURE_TYPE,
MAINNET_WALLET_PATH, MAINNET_WALLET_PATH,
NEOFS_NETMAP_DICT NEOFS_NETMAP_DICT,
) )
from env_properties import save_env_properties from env_properties import save_env_properties
from payment_neogo import neofs_deposit, transfer_mainnet_gas from payment_neogo import neofs_deposit, transfer_mainnet_gas
from python_keywords.node_management import node_healthcheck from python_keywords.node_management import node_healthcheck
from robot.api import deco
from service_helper import get_storage_service_helper from service_helper import get_storage_service_helper
from wallet import init_wallet from wallet import init_wallet
import allure
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")
@ -106,10 +103,7 @@ def run_health_check(collect_logs):
failed_nodes = [] failed_nodes = []
for node_name in NEOFS_NETMAP_DICT.keys(): for node_name in NEOFS_NETMAP_DICT.keys():
health_check = node_healthcheck(node_name) health_check = node_healthcheck(node_name)
if ( if health_check.health_status != "READY" or health_check.network_status != "ONLINE":
health_check.health_status != "READY"
or health_check.network_status != "ONLINE"
):
failed_nodes.append(node_name) failed_nodes.append(node_name)
if failed_nodes: if failed_nodes:
@ -121,9 +115,7 @@ def run_health_check(collect_logs):
def prepare_wallet_and_deposit(prepare_tmp_dir): def prepare_wallet_and_deposit(prepare_tmp_dir):
wallet_path, addr, _ = wallet.init_wallet(ASSETS_DIR) wallet_path, addr, _ = wallet.init_wallet(ASSETS_DIR)
logger.info(f"Init wallet: {wallet_path},\naddr: {addr}") logger.info(f"Init wallet: {wallet_path},\naddr: {addr}")
allure.attach.file( allure.attach.file(wallet_path, os.path.basename(wallet_path), allure.attachment_type.JSON)
wallet_path, os.path.basename(wallet_path), allure.attachment_type.JSON
)
if not FREE_STORAGE: if not FREE_STORAGE:
deposit = 30 deposit = 30

View file

@ -4,6 +4,7 @@
Helper functions to use with `neofs-cli`, `neo-go` and other CLIs. Helper functions to use with `neofs-cli`, `neo-go` and other CLIs.
""" """
import json import json
import logging
import subprocess import subprocess
import sys import sys
from contextlib import suppress from contextlib import suppress
@ -13,7 +14,8 @@ from typing import Union
import allure import allure
import pexpect import pexpect
from robot.api import logger
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
COLOR_GREEN = "\033[92m" COLOR_GREEN = "\033[92m"
@ -30,10 +32,15 @@ def _cmd_run(cmd: str, timeout: int = 30) -> str:
try: try:
logger.info(f"{COLOR_GREEN}Executing command: {cmd}{COLOR_OFF}") logger.info(f"{COLOR_GREEN}Executing command: {cmd}{COLOR_OFF}")
start_time = datetime.utcnow() start_time = datetime.utcnow()
compl_proc = subprocess.run(cmd, check=True, universal_newlines=True, compl_proc = subprocess.run(
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cmd,
timeout=timeout, check=True,
shell=True) universal_newlines=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
timeout=timeout,
shell=True,
)
output = compl_proc.stdout output = compl_proc.stdout
return_code = compl_proc.returncode return_code = compl_proc.returncode
end_time = datetime.utcnow() end_time = datetime.utcnow()
@ -42,26 +49,27 @@ def _cmd_run(cmd: str, timeout: int = 30) -> str:
return output return output
except subprocess.CalledProcessError as exc: except subprocess.CalledProcessError as exc:
logger.info(f"Command: {cmd}\n" logger.info(
f"Error:\nreturn code: {exc.returncode} " f"Command: {cmd}\n" f"Error:\nreturn code: {exc.returncode} " f"\nOutput: {exc.output}"
f"\nOutput: {exc.output}") )
end_time = datetime.now() end_time = datetime.now()
return_code, cmd_output = subprocess.getstatusoutput(cmd) return_code, cmd_output = subprocess.getstatusoutput(cmd)
_attach_allure_log(cmd, cmd_output, return_code, start_time, end_time) _attach_allure_log(cmd, cmd_output, return_code, start_time, end_time)
raise RuntimeError(f"Command: {cmd}\n" raise RuntimeError(
f"Error:\nreturn code: {exc.returncode}\n" f"Command: {cmd}\n" f"Error:\nreturn code: {exc.returncode}\n" f"Output: {exc.output}"
f"Output: {exc.output}") from exc ) from exc
except OSError as exc: except OSError as exc:
raise RuntimeError(f"Command: {cmd}\n" raise RuntimeError(f"Command: {cmd}\n" f"Output: {exc.strerror}") from exc
f"Output: {exc.strerror}") from exc
except Exception as exc: except Exception as exc:
return_code, cmd_output = subprocess.getstatusoutput(cmd) return_code, cmd_output = subprocess.getstatusoutput(cmd)
end_time = datetime.now() end_time = datetime.now()
_attach_allure_log(cmd, cmd_output, return_code, start_time, end_time) _attach_allure_log(cmd, cmd_output, return_code, start_time, end_time)
logger.info(f"Command: {cmd}\n" logger.info(
f"Error:\nreturn code: {return_code}\n" f"Command: {cmd}\n"
f"Output: {exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}") f"Error:\nreturn code: {return_code}\n"
f"Output: {exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}"
)
raise raise
@ -69,7 +77,7 @@ def _run_with_passwd(cmd: str) -> str:
child = pexpect.spawn(cmd) child = pexpect.spawn(cmd)
child.delaybeforesend = 1 child.delaybeforesend = 1
child.expect(".*") child.expect(".*")
child.sendline('\r') child.sendline("\r")
if sys.platform == "darwin": if sys.platform == "darwin":
child.expect(pexpect.EOF) child.expect(pexpect.EOF)
cmd = child.before cmd = child.before
@ -90,7 +98,7 @@ def _configure_aws_cli(cmd: str, key_id: str, access_key: str, out_format: str =
child.sendline(access_key) child.sendline(access_key)
child.expect("Default region name.*") child.expect("Default region name.*")
child.sendline('') child.sendline("")
child.expect("Default output format.*") child.expect("Default output format.*")
child.sendline(out_format) child.sendline(out_format)
@ -102,26 +110,24 @@ def _configure_aws_cli(cmd: str, key_id: str, access_key: str, out_format: str =
return cmd.decode() return cmd.decode()
def _attach_allure_log(cmd: str, output: str, return_code: int, start_time: datetime, def _attach_allure_log(
end_time: datetime) -> None: cmd: str, output: str, return_code: int, start_time: datetime, end_time: datetime
) -> None:
command_attachment = ( command_attachment = (
f"COMMAND: '{cmd}'\n" f"COMMAND: '{cmd}'\n"
f'OUTPUT:\n {output}\n' f"OUTPUT:\n {output}\n"
f'RC: {return_code}\n' f"RC: {return_code}\n"
f'Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}' f"Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}"
) )
with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'): with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'):
allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT) allure.attach(command_attachment, "Command execution", allure.attachment_type.TEXT)
def log_command_execution(cmd: str, output: Union[str, dict]) -> None: def log_command_execution(cmd: str, output: Union[str, dict]) -> None:
logger.info(f'{cmd}: {output}') logger.info(f"{cmd}: {output}")
with suppress(Exception): with suppress(Exception):
json_output = json.dumps(output, indent=4, sort_keys=True) json_output = json.dumps(output, indent=4, sort_keys=True)
output = json_output output = json_output
command_attachment = ( command_attachment = f"COMMAND: '{cmd}'\n" f"OUTPUT:\n {output}\n"
f"COMMAND: '{cmd}'\n"
f'OUTPUT:\n {output}\n'
)
with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'): with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'):
allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT) allure.attach(command_attachment, "Command execution", allure.attachment_type.TEXT)

View file

@ -10,73 +10,74 @@
first non-null response. first non-null response.
""" """
import logging
import allure import allure
import neofs_verbs
from common import NEOFS_NETMAP, WALLET_CONFIG from common import NEOFS_NETMAP, WALLET_CONFIG
import neofs_verbs
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
import neofs_verbs
from common import NEOFS_NETMAP
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@allure.step('Get Link Object') @allure.step("Get Link Object")
def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str = "", def get_link_object(
wallet_config: str = WALLET_CONFIG): wallet: str, cid: str, oid: str, bearer_token: str = "", wallet_config: str = WALLET_CONFIG
):
""" """
Args: Args:
wallet (str): path to the wallet on whose behalf the Storage Nodes wallet (str): path to the wallet on whose behalf the Storage Nodes
are requested are requested
cid (str): Container ID which stores the Large Object cid (str): Container ID which stores the Large Object
oid (str): Large Object ID oid (str): Large Object ID
bearer_token (optional, str): path to Bearer token file bearer_token (optional, str): path to Bearer token file
wallet_config (optional, str): path to the neofs-cli config file wallet_config (optional, str): path to the neofs-cli config file
Returns: Returns:
(str): Link Object ID (str): Link Object ID
When no Link Object ID is found after all Storage Nodes polling, When no Link Object ID is found after all Storage Nodes polling,
the function throws a native robot error. the function throws a native robot error.
""" """
for node in NEOFS_NETMAP: for node in NEOFS_NETMAP:
try: try:
resp = neofs_verbs.head_object(wallet, cid, oid, resp = neofs_verbs.head_object(
endpoint=node, wallet,
is_raw=True, cid,
is_direct=True, oid,
bearer_token=bearer_token, endpoint=node,
wallet_config=wallet_config) is_raw=True,
if resp['link']: is_direct=True,
return resp['link'] bearer_token=bearer_token,
wallet_config=wallet_config,
)
if resp["link"]:
return resp["link"]
except Exception: except Exception:
logger.info(f"No Link Object found on {node}; continue") logger.info(f"No Link Object found on {node}; continue")
BuiltIn().fail(f"No Link Object for {cid}/{oid} found among all Storage Nodes") logger.error(f"No Link Object for {cid}/{oid} found among all Storage Nodes")
return None return None
@allure.step('Get Last Object') @allure.step("Get Last Object")
def get_last_object(wallet: str, cid: str, oid: str): def get_last_object(wallet: str, cid: str, oid: str):
""" """
Args: Args:
wallet (str): path to the wallet on whose behalf the Storage Nodes wallet (str): path to the wallet on whose behalf the Storage Nodes
are requested are requested
cid (str): Container ID which stores the Large Object cid (str): Container ID which stores the Large Object
oid (str): Large Object ID oid (str): Large Object ID
Returns: Returns:
(str): Last Object ID (str): Last Object ID
When no Last Object ID is found after all Storage Nodes polling, When no Last Object ID is found after all Storage Nodes polling,
the function throws a native robot error. the function throws a native robot error.
""" """
for node in NEOFS_NETMAP: for node in NEOFS_NETMAP:
try: try:
resp = neofs_verbs.head_object(wallet, cid, oid, resp = neofs_verbs.head_object(
endpoint=node, wallet, cid, oid, endpoint=node, is_raw=True, is_direct=True
is_raw=True, )
is_direct=True) if resp["lastPart"]:
if resp['lastPart']: return resp["lastPart"]
return resp['lastPart']
except Exception: except Exception:
logger.info(f"No Last Object found on {node}; continue") logger.info(f"No Last Object found on {node}; continue")
BuiltIn().fail(f"No Last Object for {cid}/{oid} found among all Storage Nodes") logger.error(f"No Last Object for {cid}/{oid} found among all Storage Nodes")
return None return None

View file

@ -6,50 +6,69 @@
import allure import allure
import json import json
import logging
from time import sleep from time import sleep
from typing import Optional, Union from typing import Optional, Union
import allure
import json_transformers import json_transformers
from cli_utils import NeofsCli from cli_utils import NeofsCli
from common import NEOFS_ENDPOINT, WALLET_CONFIG from common import NEOFS_ENDPOINT, WALLET_CONFIG
from robot.api import logger
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
DEFAULT_PLACEMENT_RULE = "REP 2 IN X CBF 1 SELECT 4 FROM * AS X" DEFAULT_PLACEMENT_RULE = "REP 2 IN X CBF 1 SELECT 4 FROM * AS X"
@allure.step('Create Container') @allure.step("Create Container")
def create_container(wallet: str, rule: str = DEFAULT_PLACEMENT_RULE, basic_acl: str = '', def create_container(
attributes: Optional[dict] = None, session_token: str = '', wallet: str,
session_wallet: str = '', name: str = None, options: dict = None, rule: str = DEFAULT_PLACEMENT_RULE,
await_mode: bool = True, wait_for_creation: bool = True) -> str: basic_acl: str = "",
attributes: Optional[dict] = None,
session_token: str = "",
session_wallet: str = "",
name: str = None,
options: dict = None,
await_mode: bool = True,
wait_for_creation: bool = True,
) -> str:
""" """
A wrapper for `neofs-cli container create` call. A wrapper for `neofs-cli container create` call.
Args: Args:
wallet (str): a wallet on whose behalf a container is created wallet (str): a wallet on whose behalf a container is created
rule (optional, str): placement rule for container rule (optional, str): placement rule for container
basic_acl (optional, str): an ACL for container, will be basic_acl (optional, str): an ACL for container, will be
appended to `--basic-acl` key appended to `--basic-acl` key
attributes (optional, dict): container attributes , will be attributes (optional, dict): container attributes , will be
appended to `--attributes` key appended to `--attributes` key
session_token (optional, str): a path to session token file session_token (optional, str): a path to session token file
session_wallet(optional, str): a path to the wallet which signed session_wallet(optional, str): a path to the wallet which signed
the session token; this parameter makes sense the session token; this parameter makes sense
when paired with `session_token` when paired with `session_token`
options (optional, dict): any other options to pass to the call options (optional, dict): any other options to pass to the call
name (optional, str): container name attribute name (optional, str): container name attribute
await_mode (bool): block execution until container is persisted await_mode (bool): block execution until container is persisted
wait_for_creation (): Wait for container shows in container list wait_for_creation (): Wait for container shows in container list
Returns: Returns:
(str): CID of the created container (str): CID of the created container
""" """
cli = NeofsCli(config=WALLET_CONFIG, timeout=60) cli = NeofsCli(config=WALLET_CONFIG, timeout=60)
output = cli.container.create(rpc_endpoint=NEOFS_ENDPOINT, wallet=session_wallet if session_wallet else wallet, output = cli.container.create(
policy=rule, basic_acl=basic_acl, attributes=attributes, name=name, rpc_endpoint=NEOFS_ENDPOINT,
session=session_token, await_mode=await_mode, **options or {}) wallet=session_wallet if session_wallet else wallet,
policy=rule,
basic_acl=basic_acl,
attributes=attributes,
name=name,
session=session_token,
await_mode=await_mode,
**options or {},
)
cid = _parse_cid(output) cid = _parse_cid(output)
@ -68,7 +87,9 @@ def wait_for_container_creation(wallet: str, cid: str, attempts: int = 15, sleep
return return
logger.info(f"There is no {cid} in {containers} yet; sleep {sleep_interval} and continue") logger.info(f"There is no {cid} in {containers} yet; sleep {sleep_interval} and continue")
sleep(sleep_interval) sleep(sleep_interval)
raise RuntimeError(f"After {attempts * sleep_interval} seconds container {cid} hasn't been persisted; exiting") raise RuntimeError(
f"After {attempts * sleep_interval} seconds container {cid} hasn't been persisted; exiting"
)
def wait_for_container_deletion(wallet: str, cid: str, attempts: int = 30, sleep_interval: int = 1): def wait_for_container_deletion(wallet: str, cid: str, attempts: int = 30, sleep_interval: int = 1):
@ -78,21 +99,21 @@ def wait_for_container_deletion(wallet: str, cid: str, attempts: int = 30, sleep
sleep(sleep_interval) sleep(sleep_interval)
continue continue
except Exception as err: except Exception as err:
if 'container not found' not in str(err): if "container not found" not in str(err):
raise AssertionError(f'Expected "container not found" in error, got\n{err}') raise AssertionError(f'Expected "container not found" in error, got\n{err}')
return return
raise AssertionError(f'Expected container deleted during {attempts * sleep_interval} sec.') raise AssertionError(f"Expected container deleted during {attempts * sleep_interval} sec.")
@allure.step('List Containers') @allure.step('List Containers')
def list_containers(wallet: str) -> list[str]: def list_containers(wallet: str) -> list[str]:
""" """
A wrapper for `neofs-cli container list` call. It returns all the A wrapper for `neofs-cli container list` call. It returns all the
available containers for the given wallet. available containers for the given wallet.
Args: Args:
wallet (str): a wallet on whose behalf we list the containers wallet (str): a wallet on whose behalf we list the containers
Returns: Returns:
(list): list of containers (list): list of containers
""" """
cli = NeofsCli(config=WALLET_CONFIG) cli = NeofsCli(config=WALLET_CONFIG)
output = cli.container.list(rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet) output = cli.container.list(rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet)
@ -103,27 +124,29 @@ def list_containers(wallet: str) -> list[str]:
@allure.step('Get Container') @allure.step('Get Container')
def get_container(wallet: str, cid: str, json_mode: bool = True) -> Union[dict, str]: def get_container(wallet: str, cid: str, json_mode: bool = True) -> Union[dict, str]:
""" """
A wrapper for `neofs-cli container get` call. It extracts container's A wrapper for `neofs-cli container get` call. It extracts container's
attributes and rearranges them into a more compact view. attributes and rearranges them into a more compact view.
Args: Args:
wallet (str): path to a wallet on whose behalf we get the container wallet (str): path to a wallet on whose behalf we get the container
cid (str): ID of the container to get cid (str): ID of the container to get
json_mode (bool): return container in JSON format json_mode (bool): return container in JSON format
Returns: Returns:
(dict, str): dict of container attributes (dict, str): dict of container attributes
""" """
cli = NeofsCli(config=WALLET_CONFIG) cli = NeofsCli(config=WALLET_CONFIG)
output = cli.container.get(rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet, cid=cid, json_mode=json_mode) output = cli.container.get(
rpc_endpoint=NEOFS_ENDPOINT, wallet=wallet, cid=cid, json_mode=json_mode
)
if not json_mode: if not json_mode:
return output return output
container_info = json.loads(output) container_info = json.loads(output)
attributes = dict() attributes = dict()
for attr in container_info['attributes']: for attr in container_info["attributes"]:
attributes[attr['key']] = attr['value'] attributes[attr["key"]] = attr["value"]
container_info['attributes'] = attributes container_info["attributes"] = attributes
container_info['ownerID'] = json_transformers.json_reencode(container_info['ownerID']['value']) container_info["ownerID"] = json_transformers.json_reencode(container_info["ownerID"]["value"])
return container_info return container_info
@ -132,12 +155,12 @@ def get_container(wallet: str, cid: str, json_mode: bool = True) -> Union[dict,
# https://github.com/nspcc-dev/neofs-contract/issues/121 # https://github.com/nspcc-dev/neofs-contract/issues/121
def delete_container(wallet: str, cid: str, force: bool = False) -> None: def delete_container(wallet: str, cid: str, force: bool = False) -> None:
""" """
A wrapper for `neofs-cli container delete` call. A wrapper for `neofs-cli container delete` call.
Args: Args:
wallet (str): path to a wallet on whose behalf we delete the container wallet (str): path to a wallet on whose behalf we delete the container
cid (str): ID of the container to delete cid (str): ID of the container to delete
force (bool): do not check whether container contains locks and remove immediately force (bool): do not check whether container contains locks and remove immediately
This function doesn't return anything. This function doesn't return anything.
""" """
cli = NeofsCli(config=WALLET_CONFIG) cli = NeofsCli(config=WALLET_CONFIG)
@ -160,7 +183,7 @@ def _parse_cid(output: str) -> str:
""" """
try: try:
# taking first line from command's output # taking first line from command's output
first_line = output.split('\n')[0] first_line = output.split("\n")[0]
except Exception: except Exception:
logger.error(f"Got empty output: {output}") logger.error(f"Got empty output: {output}")
splitted = first_line.split(": ") splitted = first_line.split(": ")

View file

@ -1,34 +1,40 @@
#!/usr/bin/python3.9 #!/usr/bin/python3.9
import allure import logging
from robot.api import logger
import allure
import contract import contract
import wrappers import wrappers
from common import (IR_WALLET_PATH, IR_WALLET_PASS, MORPH_ENDPOINT, NEOFS_ADM_EXEC, from common import (
NEOFS_ADM_CONFIG_PATH) IR_WALLET_PASS,
IR_WALLET_PATH,
MORPH_ENDPOINT,
NEOFS_ADM_CONFIG_PATH,
NEOFS_ADM_EXEC,
)
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@allure.step('Get Epoch') @allure.step("Get Epoch")
def get_epoch(): def get_epoch():
epoch = int(contract.testinvoke_contract( epoch = int(
contract.get_netmap_contract_hash(MORPH_ENDPOINT), contract.testinvoke_contract(
"epoch", contract.get_netmap_contract_hash(MORPH_ENDPOINT), "epoch", MORPH_ENDPOINT
MORPH_ENDPOINT) )
) )
logger.info(f"Got epoch {epoch}") logger.info(f"Got epoch {epoch}")
return epoch return epoch
@allure.step('Tick Epoch') @allure.step("Tick Epoch")
def tick_epoch(): def tick_epoch():
if NEOFS_ADM_EXEC and NEOFS_ADM_CONFIG_PATH: if NEOFS_ADM_EXEC and NEOFS_ADM_CONFIG_PATH:
# If neofs-adm is available, then we tick epoch with it (to be consistent with UAT tests) # If neofs-adm is available, then we tick epoch with it (to be consistent with UAT tests)
cmd = f"{NEOFS_ADM_EXEC} morph force-new-epoch -c {NEOFS_ADM_CONFIG_PATH}" cmd = f"{NEOFS_ADM_EXEC} morph force-new-epoch -c {NEOFS_ADM_CONFIG_PATH}"
logger.info(f"Executing shell command: {cmd}") logger.info(f"Executing shell command: {cmd}")
out = '' out = ""
err = '' err = ""
try: try:
out = wrappers.run_sh(cmd) out = wrappers.run_sh(cmd)
logger.info(f"Command completed with output: {out}") logger.info(f"Command completed with output: {out}")
@ -37,11 +43,11 @@ def tick_epoch():
err = str(exc) err = str(exc)
raise RuntimeError("Failed to tick epoch") from exc raise RuntimeError("Failed to tick epoch") from exc
finally: finally:
allure.attach(( allure.attach(
f'COMMAND: {cmd}\n' (f"COMMAND: {cmd}\n" f"OUTPUT:\n {out}\n" f"ERROR: {err}\n"),
f'OUTPUT:\n {out}\n' "Tick Epoch",
f'ERROR: {err}\n' allure.attachment_type.TEXT,
), 'Tick Epoch', allure.attachment_type.TEXT) )
return return
# Otherwise we tick epoch using transaction # Otherwise we tick epoch using transaction
@ -49,4 +55,7 @@ def tick_epoch():
return contract.invoke_contract_multisig( return contract.invoke_contract_multisig(
contract.get_netmap_contract_hash(MORPH_ENDPOINT), contract.get_netmap_contract_hash(MORPH_ENDPOINT),
f"newEpoch int:{cur_epoch + 1}", f"newEpoch int:{cur_epoch + 1}",
IR_WALLET_PATH, IR_WALLET_PASS, MORPH_ENDPOINT) IR_WALLET_PATH,
IR_WALLET_PASS,
MORPH_ENDPOINT,
)

View file

@ -1,9 +1,9 @@
#!/usr/bin/python3 #!/usr/bin/python3
import allure import logging
import os
import re import re
import shutil import shutil
import sys
import uuid import uuid
import zipfile import zipfile
from urllib.parse import quote_plus from urllib.parse import quote_plus
@ -12,36 +12,32 @@ import allure
import requests import requests
from cli_helpers import _cmd_run from cli_helpers import _cmd_run
from common import HTTP_GATE from common import HTTP_GATE
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
if "pytest" in sys.modules: ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
import os
ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
else:
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
@allure.step('Get via HTTP Gate') @allure.step("Get via HTTP Gate")
def get_via_http_gate(cid: str, oid: str): def get_via_http_gate(cid: str, oid: str):
""" """
This function gets given object from HTTP gate This function gets given object from HTTP gate
:param cid: CID to get object from :param cid: CID to get object from
:param oid: object OID :param oid: object OID
""" """
request = f'{HTTP_GATE}/get/{cid}/{oid}' request = f"{HTTP_GATE}/get/{cid}/{oid}"
resp = requests.get(request, stream=True) resp = requests.get(request, stream=True)
if not resp.ok: if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate: raise Exception(
f"""Failed to get object via HTTP gate:
request: {resp.request.path_url}, request: {resp.request.path_url},
response: {resp.text}, response: {resp.text},
status code: {resp.status_code} {resp.reason}""") status code: {resp.status_code} {resp.reason}"""
)
logger.info(f'Request: {request}') logger.info(f"Request: {request}")
_attach_allure_step(request, resp.status_code) _attach_allure_step(request, resp.status_code)
filename = f"{ASSETS_DIR}/{cid}_{oid}" filename = f"{ASSETS_DIR}/{cid}_{oid}"
@ -50,36 +46,38 @@ def get_via_http_gate(cid: str, oid: str):
return filename return filename
@allure.step('Get via Zip HTTP Gate') @allure.step("Get via Zip HTTP Gate")
def get_via_zip_http_gate(cid: str, prefix: str): def get_via_zip_http_gate(cid: str, prefix: str):
""" """
This function gets given object from HTTP gate This function gets given object from HTTP gate
:param cid: CID to get object from :param cid: CID to get object from
:param prefix: common prefix :param prefix: common prefix
""" """
request = f'{HTTP_GATE}/zip/{cid}/{prefix}' request = f"{HTTP_GATE}/zip/{cid}/{prefix}"
resp = requests.get(request, stream=True) resp = requests.get(request, stream=True)
if not resp.ok: if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate: raise Exception(
f"""Failed to get object via HTTP gate:
request: {resp.request.path_url}, request: {resp.request.path_url},
response: {resp.text}, response: {resp.text},
status code: {resp.status_code} {resp.reason}""") status code: {resp.status_code} {resp.reason}"""
)
logger.info(f'Request: {request}') logger.info(f"Request: {request}")
_attach_allure_step(request, resp.status_code) _attach_allure_step(request, resp.status_code)
filename = f'{ASSETS_DIR}/{cid}_archive.zip' filename = f"{ASSETS_DIR}/{cid}_archive.zip"
with open(filename, 'wb') as get_file: with open(filename, "wb") as get_file:
shutil.copyfileobj(resp.raw, get_file) shutil.copyfileobj(resp.raw, get_file)
with zipfile.ZipFile(filename, 'r') as zip_ref: with zipfile.ZipFile(filename, "r") as zip_ref:
zip_ref.extractall(ASSETS_DIR) zip_ref.extractall(ASSETS_DIR)
return f'{ASSETS_DIR}/{prefix}' return f"{ASSETS_DIR}/{prefix}"
@allure.step('Get via HTTP Gate by attribute') @allure.step("Get via HTTP Gate by attribute")
def get_via_http_gate_by_attribute(cid: str, attribute: dict): def get_via_http_gate_by_attribute(cid: str, attribute: dict):
""" """
This function gets given object from HTTP gate This function gets given object from HTTP gate
@ -88,16 +86,18 @@ def get_via_http_gate_by_attribute(cid: str, attribute: dict):
""" """
attr_name = list(attribute.keys())[0] attr_name = list(attribute.keys())[0]
attr_value = quote_plus(str(attribute.get(attr_name))) attr_value = quote_plus(str(attribute.get(attr_name)))
request = f'{HTTP_GATE}/get_by_attribute/{cid}/{quote_plus(str(attr_name))}/{attr_value}' request = f"{HTTP_GATE}/get_by_attribute/{cid}/{quote_plus(str(attr_name))}/{attr_value}"
resp = requests.get(request, stream=True) resp = requests.get(request, stream=True)
if not resp.ok: if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate: raise Exception(
f"""Failed to get object via HTTP gate:
request: {resp.request.path_url}, request: {resp.request.path_url},
response: {resp.text}, response: {resp.text},
status code: {resp.status_code} {resp.reason}""") status code: {resp.status_code} {resp.reason}"""
)
logger.info(f'Request: {request}') logger.info(f"Request: {request}")
_attach_allure_step(request, resp.status_code) _attach_allure_step(request, resp.status_code)
filename = f"{ASSETS_DIR}/{cid}_{str(uuid.uuid4())}" filename = f"{ASSETS_DIR}/{cid}_{str(uuid.uuid4())}"
@ -106,7 +106,7 @@ def get_via_http_gate_by_attribute(cid: str, attribute: dict):
return filename return filename
@allure.step('Upload via HTTP Gate') @allure.step("Upload via HTTP Gate")
def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str: def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str:
""" """
This function upload given object through HTTP gate This function upload given object through HTTP gate
@ -114,41 +114,43 @@ def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str:
:param path: File path to upload :param path: File path to upload
:param headers: Object header :param headers: Object header
""" """
request = f'{HTTP_GATE}/upload/{cid}' request = f"{HTTP_GATE}/upload/{cid}"
files = {'upload_file': open(path, 'rb')} files = {"upload_file": open(path, "rb")}
body = { body = {"filename": path}
'filename': path
}
resp = requests.post(request, files=files, data=body, headers=headers) resp = requests.post(request, files=files, data=body, headers=headers)
if not resp.ok: if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate: raise Exception(
f"""Failed to get object via HTTP gate:
request: {resp.request.path_url}, request: {resp.request.path_url},
response: {resp.text}, response: {resp.text},
status code: {resp.status_code} {resp.reason}""") status code: {resp.status_code} {resp.reason}"""
)
logger.info(f'Request: {request}') logger.info(f"Request: {request}")
_attach_allure_step(request, resp.json(), req_type='POST') _attach_allure_step(request, resp.json(), req_type="POST")
assert resp.json().get('object_id'), f'OID found in response {resp}' assert resp.json().get("object_id"), f"OID found in response {resp}"
return resp.json().get('object_id') return resp.json().get("object_id")
@allure.step('Upload via HTTP Gate using Curl') @allure.step("Upload via HTTP Gate using Curl")
def upload_via_http_gate_curl(cid: str, filepath: str, large_object=False, headers: dict = None) -> str: def upload_via_http_gate_curl(
cid: str, filepath: str, large_object=False, headers: dict = None
) -> str:
""" """
This function upload given object through HTTP gate using curl utility. This function upload given object through HTTP gate using curl utility.
:param cid: CID to get object from :param cid: CID to get object from
:param filepath: File path to upload :param filepath: File path to upload
:param headers: Object header :param headers: Object header
""" """
request = f'{HTTP_GATE}/upload/{cid}' request = f"{HTTP_GATE}/upload/{cid}"
files = f'file=@{filepath};filename={os.path.basename(filepath)}' files = f"file=@{filepath};filename={os.path.basename(filepath)}"
cmd = f'curl -F \'{files}\' {request}' cmd = f"curl -F '{files}' {request}"
if large_object: if large_object:
files = f'file=@pipe;filename={os.path.basename(filepath)}' files = f"file=@pipe;filename={os.path.basename(filepath)}"
cmd = f'mkfifo pipe;cat {filepath} > pipe & curl --no-buffer -F \'{files}\' {request}' cmd = f"mkfifo pipe;cat {filepath} > pipe & curl --no-buffer -F '{files}' {request}"
output = _cmd_run(cmd) output = _cmd_run(cmd)
oid_re = re.search(r'"object_id": "(.*)"', output) oid_re = re.search(r'"object_id": "(.*)"', output)
if not oid_re: if not oid_re:
@ -156,26 +158,23 @@ def upload_via_http_gate_curl(cid: str, filepath: str, large_object=False, heade
return oid_re.group(1) return oid_re.group(1)
@allure.step('Get via HTTP Gate using Curl') @allure.step("Get via HTTP Gate using Curl")
def get_via_http_curl(cid: str, oid: str) -> str: def get_via_http_curl(cid: str, oid: str) -> str:
""" """
This function gets given object from HTTP gate using curl utility. This function gets given object from HTTP gate using curl utility.
:param cid: CID to get object from :param cid: CID to get object from
:param oid: object OID :param oid: object OID
""" """
request = f'{HTTP_GATE}/get/{cid}/{oid}' request = f"{HTTP_GATE}/get/{cid}/{oid}"
filename = f"{ASSETS_DIR}/{cid}_{oid}_{str(uuid.uuid4())}" filename = f"{ASSETS_DIR}/{cid}_{oid}_{str(uuid.uuid4())}"
cmd = f'curl {request} > {filename}' cmd = f"curl {request} > {filename}"
_cmd_run(cmd) _cmd_run(cmd)
return filename return filename
def _attach_allure_step(request: str, status_code: int, req_type='GET'): def _attach_allure_step(request: str, status_code: int, req_type="GET"):
command_attachment = ( command_attachment = f"REQUEST: '{request}'\n" f"RESPONSE:\n {status_code}\n"
f"REQUEST: '{request}'\n" with allure.step(f"{req_type} Request"):
f'RESPONSE:\n {status_code}\n' allure.attach(command_attachment, f"{req_type} Request", allure.attachment_type.TEXT)
)
with allure.step(f'{req_type} Request'):
allure.attach(command_attachment, f'{req_type} Request', allure.attachment_type.TEXT)

View file

@ -6,16 +6,18 @@
import allure import allure
import json import json
import logging
import random import random
import re import re
import uuid import uuid
from typing import Optional from typing import Optional
import allure
import json_transformers import json_transformers
from cli_utils import NeofsCli from cli_utils import NeofsCli
from common import ASSETS_DIR, NEOFS_ENDPOINT, NEOFS_NETMAP, WALLET_CONFIG from common import ASSETS_DIR, NEOFS_ENDPOINT, NEOFS_NETMAP, WALLET_CONFIG
from robot.api import logger
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@ -307,10 +309,7 @@ def search_object(
cid=cid, cid=cid,
bearer=bearer, bearer=bearer,
xhdr=xhdr, xhdr=xhdr,
filters=[ filters=[f"{filter_key} EQ {filter_val}" for filter_key, filter_val in filters.items()]
f"{filter_key} EQ {filter_val}"
for filter_key, filter_val in filters.items()
]
if filters if filters
else None, else None,
session=session, session=session,
@ -325,7 +324,7 @@ def search_object(
f"is equal for expected list '{expected_objects_list}'", f"is equal for expected list '{expected_objects_list}'",
) )
else: else:
logger.warn( logger.warning(
f"Found object list {found_objects} ", f"Found object list {found_objects} ",
f"is not equal to expected list '{expected_objects_list}'", f"is not equal to expected list '{expected_objects_list}'",
) )

View file

@ -4,20 +4,21 @@
This module contains keywords for tests that check management of storage nodes. This module contains keywords for tests that check management of storage nodes.
""" """
import allure import logging
import random import random
import re import re
import time import time
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional from typing import Optional
import allure
from common import MAINNET_BLOCK_TIME, MORPH_BLOCK_TIME, NEOFS_NETMAP_DICT, STORAGE_WALLET_PASS from common import MAINNET_BLOCK_TIME, MORPH_BLOCK_TIME, NEOFS_NETMAP_DICT, STORAGE_WALLET_PASS
from data_formatters import get_wallet_public_key from data_formatters import get_wallet_public_key
from epoch import tick_epoch from epoch import tick_epoch
from robot.api import logger
from service_helper import get_storage_service_helper from service_helper import get_storage_service_helper
from utility import robot_time_to_int from utility import robot_time_to_int
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@ -27,26 +28,26 @@ class HealthStatus:
health_status: str = None health_status: str = None
@staticmethod @staticmethod
def from_stdout(output: str) -> 'HealthStatus': def from_stdout(output: str) -> "HealthStatus":
network, health = None, None network, health = None, None
for line in output.split('\n'): for line in output.split("\n"):
if 'Network status' in line: if "Network status" in line:
network = line.split(':')[-1].strip() network = line.split(":")[-1].strip()
if 'Health status' in line: if "Health status" in line:
health = line.split(':')[-1].strip() health = line.split(":")[-1].strip()
return HealthStatus(network, health) return HealthStatus(network, health)
@allure.step('Stop Nodes') @allure.step("Stop Nodes")
def stop_nodes(number: int, nodes: list) -> list: def stop_nodes(number: int, nodes: list) -> list:
""" """
The function shuts down the given number of randomly The function shuts down the given number of randomly
selected nodes in docker. selected nodes in docker.
Args: Args:
number (int): the number of nodes to shut down number (int): the number of nodes to shut down
nodes (list): the list of nodes for possible shut down nodes (list): the list of nodes for possible shut down
Returns: Returns:
(list): the list of nodes which have been shut down (list): the list of nodes which have been shut down
""" """
helper = get_storage_service_helper() helper = get_storage_service_helper()
nodes_to_stop = random.sample(nodes, number) nodes_to_stop = random.sample(nodes, number)
@ -55,140 +56,140 @@ def stop_nodes(number: int, nodes: list) -> list:
return nodes_to_stop return nodes_to_stop
@allure.step('Start Nodes') @allure.step("Start Nodes")
def start_nodes(nodes: list) -> None: def start_nodes(nodes: list) -> None:
""" """
The function raises the given nodes. The function raises the given nodes.
Args: Args:
nodes (list): the list of nodes to raise nodes (list): the list of nodes to raise
Returns: Returns:
(void) (void)
""" """
helper = get_storage_service_helper() helper = get_storage_service_helper()
for node in nodes: for node in nodes:
helper.start(node) helper.start(node)
@allure.step('Get control endpoint and wallet') @allure.step("Get control endpoint and wallet")
def get_control_endpoint_and_wallet(endpoint_number: str = ''): def get_control_endpoint_and_wallet(endpoint_number: str = ""):
""" """
Gets control endpoint for a random or given node Gets control endpoint for a random or given node
Args: Args:
endpoint_number (optional, str): the number of the node endpoint_number (optional, str): the number of the node
in the form of 's01', 's02', etc. in the form of 's01', 's02', etc.
given in NEOFS_NETMAP_DICT as keys given in NEOFS_NETMAP_DICT as keys
Returns: Returns:
(str): the number of the node (str): the number of the node
(str): endpoint control for the node (str): endpoint control for the node
(str): the wallet of the respective node (str): the wallet of the respective node
""" """
if endpoint_number == '': if endpoint_number == "":
endpoint_num = random.choice(list(NEOFS_NETMAP_DICT.keys())) endpoint_num = random.choice(list(NEOFS_NETMAP_DICT.keys()))
logger.info(f'Random node chosen: {endpoint_num}') logger.info(f"Random node chosen: {endpoint_num}")
else: else:
endpoint_num = endpoint_number endpoint_num = endpoint_number
endpoint_values = NEOFS_NETMAP_DICT[f'{endpoint_num}'] endpoint_values = NEOFS_NETMAP_DICT[f"{endpoint_num}"]
endpoint_control = endpoint_values['control'] endpoint_control = endpoint_values["control"]
wallet = endpoint_values['wallet_path'] wallet = endpoint_values["wallet_path"]
return endpoint_num, endpoint_control, wallet return endpoint_num, endpoint_control, wallet
@allure.step('Get Locode') @allure.step("Get Locode")
def get_locode(): def get_locode():
endpoint_values = random.choice(list(NEOFS_NETMAP_DICT.values())) endpoint_values = random.choice(list(NEOFS_NETMAP_DICT.values()))
locode = endpoint_values['UN-LOCODE'] locode = endpoint_values["UN-LOCODE"]
logger.info(f'Random locode chosen: {locode}') logger.info(f"Random locode chosen: {locode}")
return locode return locode
@allure.step('Healthcheck for node') @allure.step("Healthcheck for node")
def node_healthcheck(node_name: str) -> HealthStatus: def node_healthcheck(node_name: str) -> HealthStatus:
""" """
The function returns node's health status. The function returns node's health status.
Args: Args:
node_name str: node name to use for netmap snapshot operation node_name str: node name to use for netmap snapshot operation
Returns: Returns:
health status as HealthStatus object. health status as HealthStatus object.
""" """
command = "control healthcheck" command = "control healthcheck"
output = _run_control_command(node_name, command) output = _run_control_command(node_name, command)
return HealthStatus.from_stdout(output) return HealthStatus.from_stdout(output)
@allure.step('Set status for node') @allure.step("Set status for node")
def node_set_status(node_name: str, status: str, retries: int = 0) -> None: def node_set_status(node_name: str, status: str, retries: int = 0) -> None:
""" """
The function sets particular status for given node. The function sets particular status for given node.
Args: Args:
node_name str: node name to use for netmap snapshot operation node_name str: node name to use for netmap snapshot operation
status str: online or offline. status str: online or offline.
retries (optional, int): number of retry attempts if it didn't work from the first time retries (optional, int): number of retry attempts if it didn't work from the first time
Returns: Returns:
(void) (void)
""" """
command = f"control set-status --status {status}" command = f"control set-status --status {status}"
_run_control_command(node_name, command, retries) _run_control_command(node_name, command, retries)
@allure.step('Get netmap snapshot') @allure.step("Get netmap snapshot")
def get_netmap_snapshot(node_name: Optional[str] = None) -> str: def get_netmap_snapshot(node_name: Optional[str] = None) -> str:
""" """
The function returns string representation of netmap-snapshot. The function returns string representation of netmap-snapshot.
Args: Args:
node_name str: node name to use for netmap snapshot operation node_name str: node name to use for netmap snapshot operation
Returns: Returns:
string representation of netmap-snapshot string representation of netmap-snapshot
""" """
node_name = node_name or list(NEOFS_NETMAP_DICT)[0] node_name = node_name or list(NEOFS_NETMAP_DICT)[0]
command = "control netmap-snapshot" command = "control netmap-snapshot"
return _run_control_command(node_name, command) return _run_control_command(node_name, command)
@allure.step('Shard list for node') @allure.step("Shard list for node")
def node_shard_list(node_name: str) -> list[str]: def node_shard_list(node_name: str) -> list[str]:
""" """
The function returns list of shards for particular node. The function returns list of shards for particular node.
Args: Args:
node_name str: node name to use for netmap snapshot operation node_name str: node name to use for netmap snapshot operation
Returns: Returns:
list of shards. list of shards.
""" """
command = "control shards list" command = "control shards list"
output = _run_control_command(node_name, command) output = _run_control_command(node_name, command)
return re.findall(r'Shard (.*):', output) return re.findall(r"Shard (.*):", output)
@allure.step('Shard list for node') @allure.step("Shard list for node")
def node_shard_set_mode(node_name: str, shard: str, mode: str) -> str: def node_shard_set_mode(node_name: str, shard: str, mode: str) -> str:
""" """
The function sets mode for node's particular shard. The function sets mode for node's particular shard.
Args: Args:
node_name str: node name to use for netmap snapshot operation node_name str: node name to use for netmap snapshot operation
Returns: Returns:
health status as HealthStatus object. health status as HealthStatus object.
""" """
command = f"control shards set-mode --id {shard} --mode {mode}" command = f"control shards set-mode --id {shard} --mode {mode}"
return _run_control_command(node_name, command) return _run_control_command(node_name, command)
@allure.step('Drop object from node {node_name}') @allure.step("Drop object from node {node_name}")
def drop_object(node_name: str, cid: str, oid: str) -> str: def drop_object(node_name: str, cid: str, oid: str) -> str:
""" """
The function drops object from particular node. The function drops object from particular node.
Args: Args:
node_name str: node name to use for netmap snapshot operation node_name str: node name to use for netmap snapshot operation
Returns: Returns:
health status as HealthStatus object. health status as HealthStatus object.
""" """
command = f"control drop-objects -o {cid}/{oid}" command = f"control drop-objects -o {cid}/{oid}"
return _run_control_command(node_name, command) return _run_control_command(node_name, command)
@allure.step('Delete data of node {node_name}') @allure.step("Delete data of node {node_name}")
def delete_node_data(node_name: str) -> None: def delete_node_data(node_name: str) -> None:
helper = get_storage_service_helper() helper = get_storage_service_helper()
helper.stop_node(node_name) helper.stop_node(node_name)
@ -196,27 +197,25 @@ def delete_node_data(node_name: str) -> None:
time.sleep(robot_time_to_int(MORPH_BLOCK_TIME)) time.sleep(robot_time_to_int(MORPH_BLOCK_TIME))
@allure.step('Exclude node {node_to_include} from network map') @allure.step("Exclude node {node_to_include} from network map")
def exclude_node_from_network_map(node_to_exclude, alive_node): def exclude_node_from_network_map(node_to_exclude, alive_node):
node_wallet_path = NEOFS_NETMAP_DICT[node_to_exclude]['wallet_path'] node_wallet_path = NEOFS_NETMAP_DICT[node_to_exclude]["wallet_path"]
node_netmap_key = get_wallet_public_key( node_netmap_key = get_wallet_public_key(node_wallet_path, STORAGE_WALLET_PASS, format="base58")
node_wallet_path,
STORAGE_WALLET_PASS,
format="base58"
)
node_set_status(node_to_exclude, status='offline') node_set_status(node_to_exclude, status="offline")
time.sleep(robot_time_to_int(MORPH_BLOCK_TIME)) time.sleep(robot_time_to_int(MORPH_BLOCK_TIME))
tick_epoch() tick_epoch()
snapshot = get_netmap_snapshot(node_name=alive_node) snapshot = get_netmap_snapshot(node_name=alive_node)
assert node_netmap_key not in snapshot, f'Expected node with key {node_netmap_key} not in network map' assert (
node_netmap_key not in snapshot
), f"Expected node with key {node_netmap_key} not in network map"
@allure.step('Include node {node_to_include} into network map') @allure.step("Include node {node_to_include} into network map")
def include_node_to_network_map(node_to_include: str, alive_node: str) -> None: def include_node_to_network_map(node_to_include: str, alive_node: str) -> None:
node_set_status(node_to_include, status='online') node_set_status(node_to_include, status="online")
time.sleep(robot_time_to_int(MORPH_BLOCK_TIME)) time.sleep(robot_time_to_int(MORPH_BLOCK_TIME))
tick_epoch() tick_epoch()
@ -224,20 +223,16 @@ def include_node_to_network_map(node_to_include: str, alive_node: str) -> None:
check_node_in_map(node_to_include, alive_node) check_node_in_map(node_to_include, alive_node)
@allure.step('Check node {node_name} in network map') @allure.step("Check node {node_name} in network map")
def check_node_in_map(node_name: str, alive_node: str = None): def check_node_in_map(node_name: str, alive_node: str = None):
alive_node = alive_node or node_name alive_node = alive_node or node_name
node_wallet_path = NEOFS_NETMAP_DICT[node_name]['wallet_path'] node_wallet_path = NEOFS_NETMAP_DICT[node_name]["wallet_path"]
node_netmap_key = get_wallet_public_key( node_netmap_key = get_wallet_public_key(node_wallet_path, STORAGE_WALLET_PASS, format="base58")
node_wallet_path,
STORAGE_WALLET_PASS,
format="base58"
)
logger.info(f'Node {node_name} netmap key: {node_netmap_key}') logger.info(f"Node {node_name} netmap key: {node_netmap_key}")
snapshot = get_netmap_snapshot(node_name=alive_node) snapshot = get_netmap_snapshot(node_name=alive_node)
assert node_netmap_key in snapshot, f'Expected node with key {node_netmap_key} in network map' assert node_netmap_key in snapshot, f"Expected node with key {node_netmap_key} in network map"
def _run_control_command(node_name: str, command: str, retries: int = 0) -> str: def _run_control_command(node_name: str, command: str, retries: int = 0) -> str:
@ -247,6 +242,6 @@ def _run_control_command(node_name: str, command: str, retries: int = 0) -> str:
return helper.run_control_command(node_name, command) return helper.run_control_command(node_name, command)
except AssertionError as err: except AssertionError as err:
if attempt < retries: if attempt < retries:
logger.warn(f'Command {command} failed with error {err} and will be retried') logger.warning(f"Command {command} failed with error {err} and will be retried")
continue continue
raise AssertionError(f'Command {command} failed with error {err}') from err raise AssertionError(f"Command {command} failed with error {err}") from err

View file

@ -1,33 +1,41 @@
#!/usr/bin/python3 #!/usr/bin/python3
import allure import logging
import re import re
import time import time
from neo3 import wallet import allure
from robot.api import logger
import contract import contract
import converters import converters
import rpc_client import rpc_client
from common import (GAS_HASH, MAINNET_SINGLE_ADDR, MAINNET_WALLET_PATH, MAINNET_WALLET_PASS, from common import (
MORPH_ENDPOINT, NEO_MAINNET_ENDPOINT, NEOFS_CONTRACT, NEOGO_EXECUTABLE) GAS_HASH,
MAINNET_SINGLE_ADDR,
MAINNET_WALLET_PASS,
MAINNET_WALLET_PATH,
MORPH_ENDPOINT,
NEO_MAINNET_ENDPOINT,
NEOFS_CONTRACT,
NEOGO_EXECUTABLE,
)
from converters import load_wallet from converters import load_wallet
from neo3 import wallet
from wallet import nep17_transfer from wallet import nep17_transfer
from wrappers import run_sh_with_passwd_contract from wrappers import run_sh_with_passwd_contract
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
EMPTY_PASSWORD = '' EMPTY_PASSWORD = ""
TX_PERSIST_TIMEOUT = 15 # seconds TX_PERSIST_TIMEOUT = 15 # seconds
ASSET_POWER_MAINCHAIN = 10 ** 8 ASSET_POWER_MAINCHAIN = 10**8
ASSET_POWER_SIDECHAIN = 10 ** 12 ASSET_POWER_SIDECHAIN = 10**12
morph_rpc_cli = rpc_client.RPCClient(MORPH_ENDPOINT) morph_rpc_cli = rpc_client.RPCClient(MORPH_ENDPOINT)
mainnet_rpc_cli = rpc_client.RPCClient(NEO_MAINNET_ENDPOINT) mainnet_rpc_cli = rpc_client.RPCClient(NEO_MAINNET_ENDPOINT)
@allure.step('Withdraw Mainnet Gas') @allure.step("Withdraw Mainnet Gas")
def withdraw_mainnet_gas(wlt: str, amount: int): def withdraw_mainnet_gas(wlt: str, amount: int):
address = _address_from_wallet(wlt, EMPTY_PASSWORD) address = _address_from_wallet(wlt, EMPTY_PASSWORD)
scripthash = wallet.Account.address_to_script_hash(address) scripthash = wallet.Account.address_to_script_hash(address)
@ -39,10 +47,10 @@ def withdraw_mainnet_gas(wlt: str, amount: int):
) )
logger.info(f"Executing command: {cmd}") logger.info(f"Executing command: {cmd}")
raw_out = run_sh_with_passwd_contract('', cmd, expect_confirmation=True) raw_out = run_sh_with_passwd_contract("", cmd, expect_confirmation=True)
out = raw_out.decode('utf-8') out = raw_out.decode("utf-8")
logger.info(f"Command completed with output: {out}") logger.info(f"Command completed with output: {out}")
m = re.match(r'^Sent invocation transaction (\w{64})$', out) m = re.match(r"^Sent invocation transaction (\w{64})$", out)
if m is None: if m is None:
raise Exception("Can not get Tx.") raise Exception("Can not get Tx.")
tx = m.group(1) tx = m.group(1)
@ -72,36 +80,33 @@ def transaction_accepted(tx_id: str):
return False return False
@allure.step('Get NeoFS Balance') @allure.step("Get NeoFS Balance")
def get_balance(wallet_path: str): def get_balance(wallet_path: str):
""" """
This function returns NeoFS balance for given wallet. This function returns NeoFS balance for given wallet.
""" """
wlt = load_wallet(wallet_path) wlt = load_wallet(wallet_path)
acc = wlt.accounts[-1] acc = wlt.accounts[-1]
payload = [ payload = [{"type": "Hash160", "value": str(acc.script_hash)}]
{
'type': 'Hash160',
'value': str(acc.script_hash)
}
]
try: try:
resp = morph_rpc_cli.invoke_function( resp = morph_rpc_cli.invoke_function(
contract.get_balance_contract_hash(MORPH_ENDPOINT), contract.get_balance_contract_hash(MORPH_ENDPOINT), "balanceOf", payload
'balanceOf',
payload
) )
logger.info(f"Got response \n{resp}") logger.info(f"Got response \n{resp}")
value = int(resp['stack'][0]['value']) value = int(resp["stack"][0]["value"])
return value / ASSET_POWER_SIDECHAIN return value / ASSET_POWER_SIDECHAIN
except Exception as out: except Exception as out:
logger.error(f"failed to get wallet balance: {out}") logger.error(f"failed to get wallet balance: {out}")
raise out raise out
@allure.step('Transfer Mainnet Gas') @allure.step("Transfer Mainnet Gas")
def transfer_mainnet_gas(wallet_to: str, amount: int, wallet_password: str = EMPTY_PASSWORD, def transfer_mainnet_gas(
wallet_path: str = MAINNET_WALLET_PATH): wallet_to: str,
amount: int,
wallet_password: str = EMPTY_PASSWORD,
wallet_path: str = MAINNET_WALLET_PATH,
):
""" """
This function transfer GAS in main chain from mainnet wallet to This function transfer GAS in main chain from mainnet wallet to
the provided wallet. If the wallet contains more than one address, the provided wallet. If the wallet contains more than one address,
@ -117,15 +122,20 @@ def transfer_mainnet_gas(wallet_to: str, amount: int, wallet_password: str = EMP
""" """
address_to = _address_from_wallet(wallet_to, wallet_password) address_to = _address_from_wallet(wallet_to, wallet_password)
txid = nep17_transfer(wallet_path, address_to, amount, NEO_MAINNET_ENDPOINT, txid = nep17_transfer(
wallet_pass=MAINNET_WALLET_PASS, addr_from=MAINNET_SINGLE_ADDR) wallet_path,
address_to,
amount,
NEO_MAINNET_ENDPOINT,
wallet_pass=MAINNET_WALLET_PASS,
addr_from=MAINNET_SINGLE_ADDR,
)
if not transaction_accepted(txid): if not transaction_accepted(txid):
raise AssertionError(f"TX {txid} hasn't been processed") raise AssertionError(f"TX {txid} hasn't been processed")
@allure.step('NeoFS Deposit') @allure.step("NeoFS Deposit")
def neofs_deposit(wallet_to: str, amount: int, def neofs_deposit(wallet_to: str, amount: int, wallet_password: str = EMPTY_PASSWORD):
wallet_password: str = EMPTY_PASSWORD):
""" """
Transferring GAS from given wallet to NeoFS contract address. Transferring GAS from given wallet to NeoFS contract address.
""" """
@ -135,8 +145,14 @@ def neofs_deposit(wallet_to: str, amount: int,
address_to = _address_from_wallet(wallet_to, wallet_password) address_to = _address_from_wallet(wallet_to, wallet_password)
txid = nep17_transfer(wallet_to, deposit_addr, amount, NEO_MAINNET_ENDPOINT, txid = nep17_transfer(
wallet_pass=wallet_password, addr_from=address_to) wallet_to,
deposit_addr,
amount,
NEO_MAINNET_ENDPOINT,
wallet_pass=wallet_password,
addr_from=address_to,
)
if not transaction_accepted(txid): if not transaction_accepted(txid):
raise AssertionError(f"TX {txid} hasn't been processed") raise AssertionError(f"TX {txid} hasn't been processed")
@ -156,21 +172,21 @@ def _address_from_wallet(wlt: str, wallet_password: str):
return address return address
@allure.step('Get Mainnet Balance') @allure.step("Get Mainnet Balance")
def get_mainnet_balance(address: str): def get_mainnet_balance(address: str):
resp = mainnet_rpc_cli.get_nep17_balances(address=address) resp = mainnet_rpc_cli.get_nep17_balances(address=address)
logger.info(f"Got getnep17balances response: {resp}") logger.info(f"Got getnep17balances response: {resp}")
for balance in resp['balance']: for balance in resp["balance"]:
if balance['assethash'] == GAS_HASH: if balance["assethash"] == GAS_HASH:
return float(balance['amount'])/ASSET_POWER_MAINCHAIN return float(balance["amount"]) / ASSET_POWER_MAINCHAIN
return float(0) return float(0)
@allure.step('Get Sidechain Balance') @allure.step("Get Sidechain Balance")
def get_sidechain_balance(address: str): def get_sidechain_balance(address: str):
resp = morph_rpc_cli.get_nep17_balances(address=address) resp = morph_rpc_cli.get_nep17_balances(address=address)
logger.info(f"Got getnep17balances response: {resp}") logger.info(f"Got getnep17balances response: {resp}")
for balance in resp['balance']: for balance in resp["balance"]:
if balance['assethash'] == GAS_HASH: if balance["assethash"] == GAS_HASH:
return float(balance['amount'])/ASSET_POWER_SIDECHAIN return float(balance["amount"]) / ASSET_POWER_SIDECHAIN
return float(0) return float(0)

View file

@ -2,6 +2,7 @@
import allure import allure
import json import json
import logging
import os import os
import re import re
import uuid import uuid
@ -9,12 +10,12 @@ from enum import Enum
from time import sleep from time import sleep
from typing import Optional from typing import Optional
import allure
import boto3 import boto3
import urllib3 import urllib3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from robot.api import logger
from cli_helpers import _run_with_passwd, log_command_execution from cli_helpers import _run_with_passwd, log_command_execution
from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PASS, S3_GATE_WALLET_PATH
from data_formatters import get_wallet_public_key from data_formatters import get_wallet_public_key
########################################################## ##########################################################
@ -22,10 +23,10 @@ from data_formatters import get_wallet_public_key
# boto library produces on requests to S3-gate in dev-env. # boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings() urllib3.disable_warnings()
########################################################## ##########################################################
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
CREDENTIALS_CREATE_TIMEOUT = '30s' CREDENTIALS_CREATE_TIMEOUT = "30s"
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate') NEOFS_EXEC = os.getenv("NEOFS_EXEC", "neofs-authmate")
# Artificial delay that we add after object deletion and container creation # Artificial delay that we add after object deletion and container creation
# Delay is added because sometimes immediately after deletion object still appears # Delay is added because sometimes immediately after deletion object still appears
@ -33,179 +34,193 @@ NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
# TODO: remove after https://github.com/nspcc-dev/neofs-s3-gw/issues/610 is fixed # TODO: remove after https://github.com/nspcc-dev/neofs-s3-gw/issues/610 is fixed
S3_SYNC_WAIT_TIME = 5 S3_SYNC_WAIT_TIME = 5
class VersioningStatus(Enum): class VersioningStatus(Enum):
ENABLED = 'Enabled' ENABLED = "Enabled"
SUSPENDED = 'Suspended' SUSPENDED = "Suspended"
@allure.step('Init S3 Credentials') @allure.step("Init S3 Credentials")
def init_s3_credentials(wallet_path, s3_bearer_rules_file: Optional[str] = None): def init_s3_credentials(wallet_path, s3_bearer_rules_file: Optional[str] = None):
bucket = str(uuid.uuid4()) bucket = str(uuid.uuid4())
s3_bearer_rules = s3_bearer_rules_file or 'robot/resources/files/s3_bearer_rules.json' s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json"
gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS) gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS)
cmd = ( cmd = (
f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} ' f"{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} "
f'issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} ' f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} "
f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} ' f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} "
f'--bearer-rules {s3_bearer_rules}' f"--bearer-rules {s3_bearer_rules}"
) )
logger.info(f'Executing command: {cmd}') logger.info(f"Executing command: {cmd}")
try: try:
output = _run_with_passwd(cmd) output = _run_with_passwd(cmd)
logger.info(f'Command completed with output: {output}') logger.info(f"Command completed with output: {output}")
# output contains some debug info and then several JSON structures, so we find each # output contains some debug info and then several JSON structures, so we find each
# JSON structure by curly brackets (naive approach, but works while JSON is not nested) # JSON structure by curly brackets (naive approach, but works while JSON is not nested)
# and then we take JSON containing secret_access_key # and then we take JSON containing secret_access_key
json_blocks = re.findall(r'\{.*?\}', output, re.DOTALL) json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL)
for json_block in json_blocks: for json_block in json_blocks:
try: try:
parsed_json_block = json.loads(json_block) parsed_json_block = json.loads(json_block)
if 'secret_access_key' in parsed_json_block: if "secret_access_key" in parsed_json_block:
return ( return (
parsed_json_block['container_id'], parsed_json_block["container_id"],
bucket, bucket,
parsed_json_block['access_key_id'], parsed_json_block["access_key_id"],
parsed_json_block['secret_access_key'], parsed_json_block["secret_access_key"],
parsed_json_block['owner_private_key'] parsed_json_block["owner_private_key"],
) )
except json.JSONDecodeError: except json.JSONDecodeError:
raise AssertionError(f'Could not parse info from output\n{output}') raise AssertionError(f"Could not parse info from output\n{output}")
raise AssertionError(f'Could not find AWS credentials in output:\n{output}') raise AssertionError(f"Could not find AWS credentials in output:\n{output}")
except Exception as exc: except Exception as exc:
raise RuntimeError(f'Failed to init s3 credentials because of error\n{exc}') from exc raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc
@allure.step('Config S3 client') @allure.step("Config S3 client")
def config_s3_client(access_key_id: str, secret_access_key: str): def config_s3_client(access_key_id: str, secret_access_key: str):
try: try:
session = boto3.session.Session() session = boto3.session.Session()
s3_client = session.client( s3_client = session.client(
service_name='s3', service_name="s3",
aws_access_key_id=access_key_id, aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key, aws_secret_access_key=secret_access_key,
endpoint_url=S3_GATE, verify=False endpoint_url=S3_GATE,
verify=False,
) )
return s3_client return s3_client
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Create bucket S3') @allure.step("Create bucket S3")
def create_bucket_s3(s3_client): def create_bucket_s3(s3_client):
bucket_name = str(uuid.uuid4()) bucket_name = str(uuid.uuid4())
try: try:
s3_bucket = s3_client.create_bucket(Bucket=bucket_name) s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
log_command_execution(f'Created S3 bucket {bucket_name}', s3_bucket) log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket)
sleep(S3_SYNC_WAIT_TIME) sleep(S3_SYNC_WAIT_TIME)
return bucket_name return bucket_name
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('List buckets S3') @allure.step("List buckets S3")
def list_buckets_s3(s3_client): def list_buckets_s3(s3_client):
found_buckets = [] found_buckets = []
try: try:
response = s3_client.list_buckets() response = s3_client.list_buckets()
log_command_execution('S3 List buckets result', response) log_command_execution("S3 List buckets result", response)
for bucket in response['Buckets']: for bucket in response["Buckets"]:
found_buckets.append(bucket['Name']) found_buckets.append(bucket["Name"])
return found_buckets return found_buckets
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Delete bucket S3') @allure.step("Delete bucket S3")
def delete_bucket_s3(s3_client, bucket: str): def delete_bucket_s3(s3_client, bucket: str):
try: try:
response = s3_client.delete_bucket(Bucket=bucket) response = s3_client.delete_bucket(Bucket=bucket)
log_command_execution('S3 Delete bucket result', response) log_command_execution("S3 Delete bucket result", response)
sleep(S3_SYNC_WAIT_TIME) sleep(S3_SYNC_WAIT_TIME)
return response return response
except ClientError as err: except ClientError as err:
log_command_execution('S3 Delete bucket error', str(err)) log_command_execution("S3 Delete bucket error", str(err))
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Head bucket S3') @allure.step("Head bucket S3")
def head_bucket(s3_client, bucket: str): def head_bucket(s3_client, bucket: str):
try: try:
response = s3_client.head_bucket(Bucket=bucket) response = s3_client.head_bucket(Bucket=bucket)
log_command_execution('S3 Head bucket result', response) log_command_execution("S3 Head bucket result", response)
return response return response
except ClientError as err: except ClientError as err:
log_command_execution('S3 Head bucket error', str(err)) log_command_execution("S3 Head bucket error", str(err))
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Set bucket versioning status') @allure.step("Set bucket versioning status")
def set_bucket_versioning(s3_client, bucket_name: str, status: VersioningStatus) -> None: def set_bucket_versioning(s3_client, bucket_name: str, status: VersioningStatus) -> None:
try: try:
response = s3_client.put_bucket_versioning(Bucket=bucket_name, VersioningConfiguration={'Status': status.value}) response = s3_client.put_bucket_versioning(
log_command_execution('S3 Set bucket versioning to', response) Bucket=bucket_name, VersioningConfiguration={"Status": status.value}
)
log_command_execution("S3 Set bucket versioning to", response)
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during set bucket versioning: {err}') from err raise Exception(f"Got error during set bucket versioning: {err}") from err
@allure.step('Get bucket versioning status') @allure.step("Get bucket versioning status")
def get_bucket_versioning_status(s3_client, bucket_name: str) -> str: def get_bucket_versioning_status(s3_client, bucket_name: str) -> str:
try: try:
response = s3_client.get_bucket_versioning(Bucket=bucket_name) response = s3_client.get_bucket_versioning(Bucket=bucket_name)
status = response.get('Status') status = response.get("Status")
log_command_execution('S3 Got bucket versioning status', response) log_command_execution("S3 Got bucket versioning status", response)
return status return status
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during get bucket versioning status: {err}') from err raise Exception(f"Got error during get bucket versioning status: {err}") from err
@allure.step('Put bucket tagging') @allure.step("Put bucket tagging")
def put_bucket_tagging(s3_client, bucket_name: str, tags: list): def put_bucket_tagging(s3_client, bucket_name: str, tags: list):
try: try:
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags] tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags]
tagging = {'TagSet': tags} tagging = {"TagSet": tags}
response = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tagging) response = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tagging)
log_command_execution('S3 Put bucket tagging', response) log_command_execution("S3 Put bucket tagging", response)
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during put bucket tagging: {err}') from err raise Exception(f"Got error during put bucket tagging: {err}") from err
@allure.step('Get bucket tagging') @allure.step("Get bucket tagging")
def get_bucket_tagging(s3_client, bucket_name: str) -> list: def get_bucket_tagging(s3_client, bucket_name: str) -> list:
try: try:
response = s3_client.get_bucket_tagging(Bucket=bucket_name) response = s3_client.get_bucket_tagging(Bucket=bucket_name)
log_command_execution('S3 Get bucket tagging', response) log_command_execution("S3 Get bucket tagging", response)
return response.get('TagSet') return response.get("TagSet")
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during get bucket tagging: {err}') from err raise Exception(f"Got error during get bucket tagging: {err}") from err
@allure.step('Delete bucket tagging') @allure.step("Delete bucket tagging")
def delete_bucket_tagging(s3_client, bucket_name: str) -> None: def delete_bucket_tagging(s3_client, bucket_name: str) -> None:
try: try:
response = s3_client.delete_bucket_tagging(Bucket=bucket_name) response = s3_client.delete_bucket_tagging(Bucket=bucket_name)
log_command_execution('S3 Delete bucket tagging', response) log_command_execution("S3 Delete bucket tagging", response)
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during delete bucket tagging: {err}') from err raise Exception(f"Got error during delete bucket tagging: {err}") from err

View file

@ -1,15 +1,15 @@
#!/usr/bin/python3.9 #!/usr/bin/python3.9
import allure import logging
import os import os
import uuid import uuid
from enum import Enum from enum import Enum
from time import sleep from time import sleep
from typing import Optional from typing import Optional
import allure
import urllib3 import urllib3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from robot.api import logger
from cli_helpers import log_command_execution from cli_helpers import log_command_execution
from python_keywords.aws_cli_client import AwsCliClient from python_keywords.aws_cli_client import AwsCliClient
from python_keywords.s3_gate_bucket import S3_SYNC_WAIT_TIME from python_keywords.s3_gate_bucket import S3_SYNC_WAIT_TIME
@ -19,128 +19,143 @@ from python_keywords.s3_gate_bucket import S3_SYNC_WAIT_TIME
# boto library produces on requests to S3-gate in dev-env. # boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings() urllib3.disable_warnings()
########################################################## ##########################################################
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
CREDENTIALS_CREATE_TIMEOUT = '30s' CREDENTIALS_CREATE_TIMEOUT = "30s"
ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/') ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
class VersioningStatus(Enum): class VersioningStatus(Enum):
ENABLED = 'Enabled' ENABLED = "Enabled"
SUSPENDED = 'Suspended' SUSPENDED = "Suspended"
@allure.step('List objects S3 v2') @allure.step("List objects S3 v2")
def list_objects_s3_v2(s3_client, bucket: str) -> list: def list_objects_s3_v2(s3_client, bucket: str) -> list:
try: try:
response = s3_client.list_objects_v2(Bucket=bucket) response = s3_client.list_objects_v2(Bucket=bucket)
content = response.get('Contents', []) content = response.get("Contents", [])
log_command_execution('S3 v2 List objects result', response) log_command_execution("S3 v2 List objects result", response)
obj_list = [] obj_list = []
for obj in content: for obj in content:
obj_list.append(obj['Key']) obj_list.append(obj["Key"])
logger.info(f'Found s3 objects: {obj_list}') logger.info(f"Found s3 objects: {obj_list}")
return obj_list return obj_list
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('List objects S3') @allure.step("List objects S3")
def list_objects_s3(s3_client, bucket: str) -> list: def list_objects_s3(s3_client, bucket: str) -> list:
try: try:
response = s3_client.list_objects(Bucket=bucket) response = s3_client.list_objects(Bucket=bucket)
content = response.get('Contents', []) content = response.get("Contents", [])
log_command_execution('S3 List objects result', response) log_command_execution("S3 List objects result", response)
obj_list = [] obj_list = []
for obj in content: for obj in content:
obj_list.append(obj['Key']) obj_list.append(obj["Key"])
logger.info(f'Found s3 objects: {obj_list}') logger.info(f"Found s3 objects: {obj_list}")
return obj_list return obj_list
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('List objects versions S3') @allure.step("List objects versions S3")
def list_objects_versions_s3(s3_client, bucket: str) -> list: def list_objects_versions_s3(s3_client, bucket: str) -> list:
try: try:
response = s3_client.list_object_versions(Bucket=bucket) response = s3_client.list_object_versions(Bucket=bucket)
versions = response.get('Versions', []) versions = response.get("Versions", [])
log_command_execution('S3 List objects versions result', response) log_command_execution("S3 List objects versions result", response)
return versions return versions
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Put object S3') @allure.step("Put object S3")
def put_object_s3(s3_client, bucket: str, filepath: str): def put_object_s3(s3_client, bucket: str, filepath: str):
filename = os.path.basename(filepath) filename = os.path.basename(filepath)
if isinstance(s3_client, AwsCliClient): if isinstance(s3_client, AwsCliClient):
file_content = filepath file_content = filepath
else: else:
with open(filepath, 'rb') as put_file: with open(filepath, "rb") as put_file:
file_content = put_file.read() file_content = put_file.read()
try: try:
response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename) response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename)
log_command_execution('S3 Put object result', response) log_command_execution("S3 Put object result", response)
return response.get('VersionId') return response.get("VersionId")
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Head object S3') @allure.step("Head object S3")
def head_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None): def head_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None):
try: try:
params = {'Bucket': bucket, 'Key': object_key} params = {"Bucket": bucket, "Key": object_key}
if version_id: if version_id:
params['VersionId'] = version_id params["VersionId"] = version_id
response = s3_client.head_object(**params) response = s3_client.head_object(**params)
log_command_execution('S3 Head object result', response) log_command_execution("S3 Head object result", response)
return response return response
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Delete object S3') @allure.step("Delete object S3")
def delete_object_s3(s3_client, bucket, object_key, version_id: str = None): def delete_object_s3(s3_client, bucket, object_key, version_id: str = None):
try: try:
params = {'Bucket': bucket, 'Key': object_key} params = {"Bucket": bucket, "Key": object_key}
if version_id: if version_id:
params['VersionId'] = version_id params["VersionId"] = version_id
response = s3_client.delete_object(**params) response = s3_client.delete_object(**params)
log_command_execution('S3 Delete object result', response) log_command_execution("S3 Delete object result", response)
sleep(S3_SYNC_WAIT_TIME) sleep(S3_SYNC_WAIT_TIME)
return response return response
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Delete objects S3') @allure.step("Delete objects S3")
def delete_objects_s3(s3_client, bucket: str, object_keys: list): def delete_objects_s3(s3_client, bucket: str, object_keys: list):
try: try:
response = s3_client.delete_objects(Bucket=bucket, Delete=_make_objs_dict(object_keys)) response = s3_client.delete_objects(Bucket=bucket, Delete=_make_objs_dict(object_keys))
log_command_execution('S3 Delete objects result', response) log_command_execution("S3 Delete objects result", response)
sleep(S3_SYNC_WAIT_TIME) sleep(S3_SYNC_WAIT_TIME)
return response return response
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Delete object versions S3')
@allure.step("Delete object versions S3")
def delete_object_versions_s3(s3_client, bucket: str, object_versions: list): def delete_object_versions_s3(s3_client, bucket: str, object_versions: list):
try: try:
# Build deletion list in S3 format # Build deletion list in S3 format
@ -154,187 +169,230 @@ def delete_object_versions_s3(s3_client, bucket: str, object_versions: list):
] ]
} }
response = s3_client.delete_objects(Bucket=bucket, Delete=delete_list) response = s3_client.delete_objects(Bucket=bucket, Delete=delete_list)
log_command_execution('S3 Delete objects result', response) log_command_execution("S3 Delete objects result", response)
return response return response
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Copy object S3') @allure.step("Copy object S3")
def copy_object_s3(s3_client, bucket, object_key, bucket_dst=None): def copy_object_s3(s3_client, bucket, object_key, bucket_dst=None):
filename = f'{os.getcwd()}/{uuid.uuid4()}' filename = f"{os.getcwd()}/{uuid.uuid4()}"
try: try:
response = s3_client.copy_object(Bucket=bucket_dst or bucket, response = s3_client.copy_object(
CopySource=f'{bucket}/{object_key}', Bucket=bucket_dst or bucket, CopySource=f"{bucket}/{object_key}", Key=filename
Key=filename) )
log_command_execution('S3 Copy objects result', response) log_command_execution("S3 Copy objects result", response)
return filename return filename
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Get object S3') @allure.step("Get object S3")
def get_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None): def get_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None):
filename = f'{ASSETS_DIR}/{uuid.uuid4()}' filename = f"{ASSETS_DIR}/{uuid.uuid4()}"
try: try:
params = {'Bucket': bucket, 'Key': object_key} params = {"Bucket": bucket, "Key": object_key}
if version_id: if version_id:
params['VersionId'] = version_id params["VersionId"] = version_id
if isinstance(s3_client, AwsCliClient): if isinstance(s3_client, AwsCliClient):
params['file_path'] = filename params["file_path"] = filename
response = s3_client.get_object(**params) response = s3_client.get_object(**params)
log_command_execution('S3 Get objects result', response) log_command_execution("S3 Get objects result", response)
if not isinstance(s3_client, AwsCliClient): if not isinstance(s3_client, AwsCliClient):
with open(f'{filename}', 'wb') as get_file: with open(f"{filename}", "wb") as get_file:
chunk = response['Body'].read(1024) chunk = response["Body"].read(1024)
while chunk: while chunk:
get_file.write(chunk) get_file.write(chunk)
chunk = response['Body'].read(1024) chunk = response["Body"].read(1024)
return filename return filename
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Create multipart upload S3') @allure.step("Create multipart upload S3")
def create_multipart_upload_s3(s3_client, bucket_name: str, object_key: str) -> str: def create_multipart_upload_s3(s3_client, bucket_name: str, object_key: str) -> str:
try: try:
response = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_key) response = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_key)
log_command_execution('S3 Created multipart upload', response) log_command_execution("S3 Created multipart upload", response)
assert response.get('UploadId'), f'Expected UploadId in response:\n{response}' assert response.get("UploadId"), f"Expected UploadId in response:\n{response}"
return response.get('UploadId') return response.get("UploadId")
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('List multipart uploads S3') @allure.step("List multipart uploads S3")
def list_multipart_uploads_s3(s3_client, bucket_name: str) -> Optional[list[dict]]: def list_multipart_uploads_s3(s3_client, bucket_name: str) -> Optional[list[dict]]:
try: try:
response = s3_client.list_multipart_uploads(Bucket=bucket_name) response = s3_client.list_multipart_uploads(Bucket=bucket_name)
log_command_execution('S3 List multipart upload', response) log_command_execution("S3 List multipart upload", response)
return response.get('Uploads') return response.get("Uploads")
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Abort multipart upload S3') @allure.step("Abort multipart upload S3")
def abort_multipart_uploads_s3(s3_client, bucket_name: str, object_key: str, upload_id: str): def abort_multipart_uploads_s3(s3_client, bucket_name: str, object_key: str, upload_id: str):
try: try:
response = s3_client.abort_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id) response = s3_client.abort_multipart_upload(
log_command_execution('S3 Abort multipart upload', response) Bucket=bucket_name, Key=object_key, UploadId=upload_id
)
log_command_execution("S3 Abort multipart upload", response)
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Upload part S3') @allure.step("Upload part S3")
def upload_part_s3(s3_client, bucket_name: str, object_key: str, upload_id: str, part_num: int, filepath: str) -> str: def upload_part_s3(
s3_client, bucket_name: str, object_key: str, upload_id: str, part_num: int, filepath: str
) -> str:
if isinstance(s3_client, AwsCliClient): if isinstance(s3_client, AwsCliClient):
file_content = filepath file_content = filepath
else: else:
with open(filepath, 'rb') as put_file: with open(filepath, "rb") as put_file:
file_content = put_file.read() file_content = put_file.read()
try: try:
response = s3_client.upload_part(UploadId=upload_id, Bucket=bucket_name, Key=object_key, PartNumber=part_num, response = s3_client.upload_part(
Body=file_content) UploadId=upload_id,
log_command_execution('S3 Upload part', response) Bucket=bucket_name,
assert response.get('ETag'), f'Expected ETag in response:\n{response}' Key=object_key,
PartNumber=part_num,
Body=file_content,
)
log_command_execution("S3 Upload part", response)
assert response.get("ETag"), f"Expected ETag in response:\n{response}"
return response.get('ETag') return response.get("ETag")
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('List parts S3') @allure.step("List parts S3")
def list_parts_s3(s3_client, bucket_name: str, object_key: str, upload_id: str) -> list[dict]: def list_parts_s3(s3_client, bucket_name: str, object_key: str, upload_id: str) -> list[dict]:
try: try:
response = s3_client.list_parts(UploadId=upload_id, Bucket=bucket_name, Key=object_key) response = s3_client.list_parts(UploadId=upload_id, Bucket=bucket_name, Key=object_key)
log_command_execution('S3 List part', response) log_command_execution("S3 List part", response)
assert response.get('Parts'), f'Expected Parts in response:\n{response}' assert response.get("Parts"), f"Expected Parts in response:\n{response}"
return response.get('Parts') return response.get("Parts")
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Complete multipart upload S3') @allure.step("Complete multipart upload S3")
def complete_multipart_upload_s3(s3_client, bucket_name: str, object_key: str, upload_id: str, def complete_multipart_upload_s3(
parts: list): s3_client, bucket_name: str, object_key: str, upload_id: str, parts: list
):
try: try:
parts = [{'ETag': etag, 'PartNumber': part_num} for part_num, etag in parts] parts = [{"ETag": etag, "PartNumber": part_num} for part_num, etag in parts]
response = s3_client.complete_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id, response = s3_client.complete_multipart_upload(
MultipartUpload={'Parts': parts}) Bucket=bucket_name, Key=object_key, UploadId=upload_id, MultipartUpload={"Parts": parts}
log_command_execution('S3 Complete multipart upload', response) )
log_command_execution("S3 Complete multipart upload", response)
except ClientError as err: except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n' raise Exception(
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step('Put object tagging') @allure.step("Put object tagging")
def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list): def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list):
try: try:
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags] tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags]
tagging = {'TagSet': tags} tagging = {"TagSet": tags}
s3_client.put_object_tagging(Bucket=bucket_name, Key=object_key, Tagging=tagging) s3_client.put_object_tagging(Bucket=bucket_name, Key=object_key, Tagging=tagging)
log_command_execution('S3 Put object tagging', str(tags)) log_command_execution("S3 Put object tagging", str(tags))
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during put object tagging: {err}') from err raise Exception(f"Got error during put object tagging: {err}") from err
@allure.step('Get object tagging') @allure.step("Get object tagging")
def get_object_tagging(s3_client, bucket_name: str, object_key: str) -> list: def get_object_tagging(s3_client, bucket_name: str, object_key: str) -> list:
try: try:
response = s3_client.get_object_tagging(Bucket=bucket_name, Key=object_key) response = s3_client.get_object_tagging(Bucket=bucket_name, Key=object_key)
log_command_execution('S3 Get object tagging', response) log_command_execution("S3 Get object tagging", response)
return response.get('TagSet') return response.get("TagSet")
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during get object tagging: {err}') from err raise Exception(f"Got error during get object tagging: {err}") from err
@allure.step('Delete object tagging') @allure.step("Delete object tagging")
def delete_object_tagging(s3_client, bucket_name: str, object_key: str): def delete_object_tagging(s3_client, bucket_name: str, object_key: str):
try: try:
response = s3_client.delete_object_tagging(Bucket=bucket_name, Key=object_key) response = s3_client.delete_object_tagging(Bucket=bucket_name, Key=object_key)
log_command_execution('S3 Delete object tagging', response) log_command_execution("S3 Delete object tagging", response)
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during delete object tagging: {err}') from err raise Exception(f"Got error during delete object tagging: {err}") from err
@allure.step('Get object tagging') @allure.step("Get object tagging")
def get_object_attributes(s3_client, bucket_name: str, object_key: str, *attributes: str, version_id: str = None, def get_object_attributes(
max_parts: int = None, part_number: int = None, get_full_resp=True) -> dict: s3_client,
bucket_name: str,
object_key: str,
*attributes: str,
version_id: str = None,
max_parts: int = None,
part_number: int = None,
get_full_resp=True,
) -> dict:
try: try:
if not isinstance(s3_client, AwsCliClient): if not isinstance(s3_client, AwsCliClient):
logger.warn('Method get_object_attributes is not supported by boto3 client') logger.warning("Method get_object_attributes is not supported by boto3 client")
return {} return {}
response = s3_client.get_object_attributes(bucket_name, object_key, *attributes, version_id=version_id, response = s3_client.get_object_attributes(
max_parts=max_parts, part_number=part_number) bucket_name,
log_command_execution('S3 Get object attributes', response) object_key,
*attributes,
version_id=version_id,
max_parts=max_parts,
part_number=part_number,
)
log_command_execution("S3 Get object attributes", response)
for attr in attributes: for attr in attributes:
assert attr in response, f'Expected attribute {attr} in {response}' assert attr in response, f"Expected attribute {attr} in {response}"
if get_full_resp: if get_full_resp:
return response return response
@ -342,13 +400,13 @@ def get_object_attributes(s3_client, bucket_name: str, object_key: str, *attribu
return response.get(attributes[0]) return response.get(attributes[0])
except ClientError as err: except ClientError as err:
raise Exception(f'Got error during get object attributes: {err}') from err raise Exception(f"Got error during get object attributes: {err}") from err
def _make_objs_dict(key_names): def _make_objs_dict(key_names):
objs_list = [] objs_list = []
for key in key_names: for key in key_names:
obj_dict = {'Key': key} obj_dict = {"Key": key}
objs_list.append(obj_dict) objs_list.append(obj_dict)
objs_dict = {'Objects': objs_list} objs_dict = {"Objects": objs_list}
return objs_dict return objs_dict

View file

@ -7,15 +7,17 @@
import allure import allure
import base64 import base64
import json import json
import logging
import os import os
import uuid import uuid
import allure
import json_transformers import json_transformers
from cli_helpers import _cmd_run, _run_with_passwd from cli_helpers import _cmd_run, _run_with_passwd
from common import ASSETS_DIR, NEOFS_ENDPOINT, WALLET_CONFIG from common import ASSETS_DIR, NEOFS_ENDPOINT, WALLET_CONFIG
from neo3 import wallet from neo3 import wallet
from robot.api import logger
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
# path to neofs-cli executable # path to neofs-cli executable
@ -45,9 +47,9 @@ def generate_session_token(owner: str, session_wallet: str, cid: str = "") -> st
with open(session_wallet) as fout: with open(session_wallet) as fout:
session_wlt_content = json.load(fout) session_wlt_content = json.load(fout)
session_wlt = wallet.Wallet.from_json(session_wlt_content, password="") session_wlt = wallet.Wallet.from_json(session_wlt_content, password="")
pub_key_64 = base64.b64encode( pub_key_64 = base64.b64encode(bytes.fromhex(str(session_wlt.accounts[0].public_key))).decode(
bytes.fromhex(str(session_wlt.accounts[0].public_key)) "utf-8"
).decode("utf-8") )
session_token = { session_token = {
"body": { "body": {

View file

@ -5,61 +5,62 @@
that storage policies are respected. that storage policies are respected.
""" """
import allure import logging
from typing import Optional from typing import Optional
from robot.api import logger import allure
import complex_object_actions import complex_object_actions
import neofs_verbs import neofs_verbs
from common import NEOFS_NETMAP from common import NEOFS_NETMAP
from grpc_responses import OBJECT_NOT_FOUND, error_matches_status from grpc_responses import OBJECT_NOT_FOUND, error_matches_status
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@allure.step('Get Object Copies') @allure.step("Get Object Copies")
def get_object_copies(complexity: str, wallet: str, cid: str, oid: str): def get_object_copies(complexity: str, wallet: str, cid: str, oid: str):
""" """
The function performs requests to all nodes of the container and The function performs requests to all nodes of the container and
finds out if they store a copy of the object. The procedure is finds out if they store a copy of the object. The procedure is
different for simple and complex object, so the function requires different for simple and complex object, so the function requires
a sign of object complexity. a sign of object complexity.
Args: Args:
complexity (str): the tag of object size and complexity, complexity (str): the tag of object size and complexity,
[Simple|Complex] [Simple|Complex]
wallet (str): the path to the wallet on whose behalf the wallet (str): the path to the wallet on whose behalf the
copies are got copies are got
cid (str): ID of the container cid (str): ID of the container
oid (str): ID of the Object oid (str): ID of the Object
Returns: Returns:
(int): the number of object copies in the container (int): the number of object copies in the container
""" """
return (get_simple_object_copies(wallet, cid, oid) if complexity == "Simple" return (
else get_complex_object_copies(wallet, cid, oid)) get_simple_object_copies(wallet, cid, oid)
if complexity == "Simple"
else get_complex_object_copies(wallet, cid, oid)
)
@allure.step('Get Simple Object Copies') @allure.step("Get Simple Object Copies")
def get_simple_object_copies(wallet: str, cid: str, oid: str): def get_simple_object_copies(wallet: str, cid: str, oid: str):
""" """
To figure out the number of a simple object copies, only direct To figure out the number of a simple object copies, only direct
HEAD requests should be made to the every node of the container. HEAD requests should be made to the every node of the container.
We consider non-empty HEAD response as a stored object copy. We consider non-empty HEAD response as a stored object copy.
Args: Args:
wallet (str): the path to the wallet on whose behalf the wallet (str): the path to the wallet on whose behalf the
copies are got copies are got
cid (str): ID of the container cid (str): ID of the container
oid (str): ID of the Object oid (str): ID of the Object
Returns: Returns:
(int): the number of object copies in the container (int): the number of object copies in the container
""" """
copies = 0 copies = 0
for node in NEOFS_NETMAP: for node in NEOFS_NETMAP:
try: try:
response = neofs_verbs.head_object(wallet, cid, oid, response = neofs_verbs.head_object(wallet, cid, oid, endpoint=node, is_direct=True)
endpoint=node,
is_direct=True)
if response: if response:
logger.info(f"Found object {oid} on node {node}") logger.info(f"Found object {oid} on node {node}")
copies += 1 copies += 1
@ -69,39 +70,41 @@ def get_simple_object_copies(wallet: str, cid: str, oid: str):
return copies return copies
@allure.step('Get Complex Object Copies') @allure.step("Get Complex Object Copies")
def get_complex_object_copies(wallet: str, cid: str, oid: str): def get_complex_object_copies(wallet: str, cid: str, oid: str):
""" """
To figure out the number of a complex object copies, we firstly To figure out the number of a complex object copies, we firstly
need to retrieve its Last object. We consider that the number of need to retrieve its Last object. We consider that the number of
complex object copies is equal to the number of its last object complex object copies is equal to the number of its last object
copies. When we have the Last object ID, the task is reduced copies. When we have the Last object ID, the task is reduced
to getting simple object copies. to getting simple object copies.
Args: Args:
wallet (str): the path to the wallet on whose behalf the wallet (str): the path to the wallet on whose behalf the
copies are got copies are got
cid (str): ID of the container cid (str): ID of the container
oid (str): ID of the Object oid (str): ID of the Object
Returns: Returns:
(int): the number of object copies in the container (int): the number of object copies in the container
""" """
last_oid = complex_object_actions.get_last_object(wallet, cid, oid) last_oid = complex_object_actions.get_last_object(wallet, cid, oid)
return get_simple_object_copies(wallet, cid, last_oid) return get_simple_object_copies(wallet, cid, last_oid)
@allure.step('Get Nodes With Object') @allure.step("Get Nodes With Object")
def get_nodes_with_object(wallet: str, cid: str, oid: str, skip_nodes: Optional[list[str]] = None) -> list[str]: def get_nodes_with_object(
wallet: str, cid: str, oid: str, skip_nodes: Optional[list[str]] = None
) -> list[str]:
""" """
The function returns list of nodes which store The function returns list of nodes which store
the given object. the given object.
Args: Args:
wallet (str): the path to the wallet on whose behalf wallet (str): the path to the wallet on whose behalf
we request the nodes we request the nodes
cid (str): ID of the container which store the object cid (str): ID of the container which store the object
oid (str): object ID oid (str): object ID
skip_nodes (list): list of nodes that should be excluded from check skip_nodes (list): list of nodes that should be excluded from check
Returns: Returns:
(list): nodes which store the object (list): nodes which store the object
""" """
nodes_to_search = NEOFS_NETMAP nodes_to_search = NEOFS_NETMAP
if skip_nodes: if skip_nodes:
@ -110,9 +113,7 @@ def get_nodes_with_object(wallet: str, cid: str, oid: str, skip_nodes: Optional[
nodes_list = [] nodes_list = []
for node in nodes_to_search: for node in nodes_to_search:
try: try:
res = neofs_verbs.head_object(wallet, cid, oid, res = neofs_verbs.head_object(wallet, cid, oid, endpoint=node, is_direct=True)
endpoint=node,
is_direct=True)
if res is not None: if res is not None:
logger.info(f"Found object {oid} on node {node}") logger.info(f"Found object {oid} on node {node}")
nodes_list.append(node) nodes_list.append(node)
@ -122,30 +123,28 @@ def get_nodes_with_object(wallet: str, cid: str, oid: str, skip_nodes: Optional[
return nodes_list return nodes_list
@allure.step('Get Nodes Without Object') @allure.step("Get Nodes Without Object")
def get_nodes_without_object(wallet: str, cid: str, oid: str): def get_nodes_without_object(wallet: str, cid: str, oid: str):
""" """
The function returns list of nodes which do not store The function returns list of nodes which do not store
the given object. the given object.
Args: Args:
wallet (str): the path to the wallet on whose behalf wallet (str): the path to the wallet on whose behalf
we request the nodes we request the nodes
cid (str): ID of the container which store the object cid (str): ID of the container which store the object
oid (str): object ID oid (str): object ID
Returns: Returns:
(list): nodes which do not store the object (list): nodes which do not store the object
""" """
nodes_list = [] nodes_list = []
for node in NEOFS_NETMAP: for node in NEOFS_NETMAP:
try: try:
res = neofs_verbs.head_object(wallet, cid, oid, res = neofs_verbs.head_object(wallet, cid, oid, endpoint=node, is_direct=True)
endpoint=node,
is_direct=True)
if res is None: if res is None:
nodes_list.append(node) nodes_list.append(node)
except Exception as err: except Exception as err:
if error_matches_status(err, OBJECT_NOT_FOUND): if error_matches_status(err, OBJECT_NOT_FOUND):
nodes_list.append(node) nodes_list.append(node)
else: else:
raise Exception(f'Got error {err} on head object command') from err raise Exception(f"Got error {err} on head object command") from err
return nodes_list return nodes_list

View file

@ -3,46 +3,33 @@
import allure import allure
import json import json
import allure
import neofs_verbs import neofs_verbs
from neo3 import wallet from neo3 import wallet
from robot.libraries.BuiltIn import BuiltIn
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@allure.step('Verify Head Tombstone') @allure.step("Verify Head Tombstone")
def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str): def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str):
header = neofs_verbs.head_object(wallet_path, cid, oid_ts) header = neofs_verbs.head_object(wallet_path, cid, oid_ts)
header = header['header'] header = header["header"]
assert header["containerID"] == cid, "Tombstone Header CID is wrong"
BuiltIn().should_be_equal(header["containerID"], cid,
msg="Tombstone Header CID is wrong")
wlt_data = dict() wlt_data = dict()
with open(wallet_path, 'r') as fout: with open(wallet_path, "r") as fout:
wlt_data = json.loads(fout.read()) wlt_data = json.loads(fout.read())
wlt = wallet.Wallet.from_json(wlt_data, password='') wlt = wallet.Wallet.from_json(wlt_data, password="")
addr = wlt.accounts[0].address addr = wlt.accounts[0].address
BuiltIn().should_be_equal(header["ownerID"], addr, assert header["ownerID"] == addr, "Tombstone Owner ID is wrong"
msg="Tombstone Owner ID is wrong") assert header["objectType"] == "TOMBSTONE", "Header Type isn't Tombstone"
assert (
BuiltIn().should_be_equal(header["objectType"], 'TOMBSTONE', header["sessionToken"]["body"]["object"]["verb"] == "DELETE"
msg="Header Type isn't Tombstone") ), "Header Session Type isn't DELETE"
assert (
BuiltIn().should_be_equal( header["sessionToken"]["body"]["object"]["address"]["containerID"] == cid
header["sessionToken"]["body"]["object"]["verb"], 'DELETE', ), "Header Session ID is wrong"
msg="Header Session Type isn't DELETE" assert (
) header["sessionToken"]["body"]["object"]["address"]["objectID"] == oid
), "Header Session OID is wrong"
BuiltIn().should_be_equal(
header["sessionToken"]["body"]["object"]["address"]["containerID"],
cid,
msg="Header Session ID is wrong"
)
BuiltIn().should_be_equal(
header["sessionToken"]["body"]["object"]["address"]["objectID"],
oid,
msg="Header Session OID is wrong"
)

View file

@ -2,18 +2,19 @@
import allure import allure
import hashlib import hashlib
import logging
import os import os
import tarfile import tarfile
from typing import Tuple
import uuid import uuid
from typing import Tuple
import allure
import docker import docker
import wallet import wallet
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
from cli_helpers import _cmd_run from cli_helpers import _cmd_run
from robot.api import logger from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
from robot.libraries.BuiltIn import BuiltIn
logger = logging.getLogger("NeoLogger")
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@ -26,14 +27,14 @@ def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str:
(str): the path to the generated file (str): the path to the generated file
""" """
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}" file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
with open(file_path, 'wb') as fout: with open(file_path, "wb") as fout:
fout.write(os.urandom(size)) fout.write(os.urandom(size))
logger.info(f"file with size {size} bytes has been generated: {file_path}") logger.info(f"file with size {size} bytes has been generated: {file_path}")
return file_path return file_path
@allure.step('Generate file') @allure.step("Generate file")
def generate_file_and_file_hash(size: int) -> Tuple[str, str]: def generate_file_and_file_hash(size: int) -> Tuple[str, str]:
""" """
Function generates a binary file with the specified size in bytes Function generates a binary file with the specified size in bytes
@ -50,7 +51,7 @@ def generate_file_and_file_hash(size: int) -> Tuple[str, str]:
return file_path, file_hash return file_path, file_hash
@allure.step('Get File Hash') @allure.step("Get File Hash")
def get_file_hash(filename: str, len: int = None): def get_file_hash(filename: str, len: int = None):
""" """
This function generates hash for the specified file. This function generates hash for the specified file.
@ -69,23 +70,22 @@ def get_file_hash(filename: str, len: int = None):
return file_hash.hexdigest() return file_hash.hexdigest()
@allure.step('Generate Wallet') @allure.step("Generate Wallet")
def generate_wallet(): def generate_wallet():
return wallet.init_wallet(ASSETS_DIR) return wallet.init_wallet(ASSETS_DIR)
@allure.step('Get Docker Logs') @allure.step("Get Docker Logs")
def get_container_logs(testcase_name: str) -> None: def get_container_logs(testcase_name: str) -> None:
client = docker.APIClient(base_url='unix://var/run/docker.sock') client = docker.APIClient(base_url="unix://var/run/docker.sock")
logs_dir = BuiltIn().get_variable_value("${OUTPUT_DIR}") logs_dir = os.getenv("${OUTPUT_DIR}")
tar_name = f"{logs_dir}/dockerlogs({testcase_name}).tar.gz" tar_name = f"{logs_dir}/dockerlogs({testcase_name}).tar.gz"
tar = tarfile.open(tar_name, "w:gz") tar = tarfile.open(tar_name, "w:gz")
for container in client.containers(): for container in client.containers():
container_name = container['Names'][0][1:] container_name = container["Names"][0][1:]
if (client.inspect_container(container_name)['Config']['Domainname'] if client.inspect_container(container_name)["Config"]["Domainname"] == "neofs.devenv":
== "neofs.devenv"):
file_name = f"{logs_dir}/docker_log_{container_name}" file_name = f"{logs_dir}/docker_log_{container_name}"
with open(file_name, 'wb') as out: with open(file_name, "wb") as out:
out.write(client.logs(container_name)) out.write(client.logs(container_name))
logger.info(f"Collected logs from container {container_name}") logger.info(f"Collected logs from container {container_name}")
tar.add(file_name) tar.add(file_name)
@ -93,10 +93,10 @@ def get_container_logs(testcase_name: str) -> None:
tar.close() tar.close()
@allure.step('Make Up') @allure.step("Make Up")
def make_up(services: list = [], config_dict: dict = {}): def make_up(services: list = [], config_dict: dict = {}):
test_path = os.getcwd() test_path = os.getcwd()
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env') dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env")
os.chdir(dev_path) os.chdir(dev_path)
if len(services) > 0: if len(services) > 0:
@ -104,33 +104,30 @@ def make_up(services: list = [], config_dict: dict = {}):
if config_dict != {}: if config_dict != {}:
with open(f"{dev_path}/.int_test.env", "a") as out: with open(f"{dev_path}/.int_test.env", "a") as out:
for key, value in config_dict.items(): for key, value in config_dict.items():
out.write(f'{key}={value}') out.write(f"{key}={value}")
cmd = f'make up/{service}' cmd = f"make up/{service}"
_cmd_run(cmd) _cmd_run(cmd)
else: else:
cmd = ( cmd = f"make up/basic;" f"make update.max_object_size val={SIMPLE_OBJ_SIZE}"
f'make up/basic;'
f'make update.max_object_size val={SIMPLE_OBJ_SIZE}'
)
_cmd_run(cmd, timeout=120) _cmd_run(cmd, timeout=120)
os.chdir(test_path) os.chdir(test_path)
@allure.step('Make Down') @allure.step("Make Down")
def make_down(services: list = []): def make_down(services: list = []):
test_path = os.getcwd() test_path = os.getcwd()
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env') dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env")
os.chdir(dev_path) os.chdir(dev_path)
if len(services) > 0: if len(services) > 0:
for service in services: for service in services:
cmd = f'make down/{service}' cmd = f"make down/{service}"
_cmd_run(cmd) _cmd_run(cmd)
with open(f"{dev_path}/.int_test.env", "w"): with open(f"{dev_path}/.int_test.env", "w"):
pass pass
else: else:
cmd = 'make down; make clean' cmd = "make down; make clean"
_cmd_run(cmd, timeout=60) _cmd_run(cmd, timeout=60)
os.chdir(test_path) os.chdir(test_path)