forked from TrueCloudLab/frostfs-testcases
Some linter fixes.
Signed-off-by: a.y.volkov <a.y.volkov@yadro.com>
This commit is contained in:
parent
e086d0d62b
commit
0e27ea02c1
19 changed files with 291 additions and 275 deletions
|
@ -1,34 +1,34 @@
|
||||||
#!/usr/bin/python3.8
|
#!/usr/bin/python3.8
|
||||||
|
|
||||||
from enum import Enum, auto
|
import base64
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
|
from enum import Enum, auto
|
||||||
|
|
||||||
import base64
|
|
||||||
import base58
|
import base58
|
||||||
from cli_helpers import _cmd_run
|
from cli_helpers import _cmd_run
|
||||||
from common import ASSETS_DIR, NEOFS_ENDPOINT, WALLET_PASS
|
from common import ASSETS_DIR, NEOFS_ENDPOINT, WALLET_PASS
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Robot Keywords and helper functions for work with NeoFS ACL.
|
Robot Keywords and helper functions for work with NeoFS ACL.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
# path to neofs-cli executable
|
# path to neofs-cli executable
|
||||||
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
||||||
EACL_LIFETIME = 100500
|
EACL_LIFETIME = 100500
|
||||||
|
|
||||||
|
|
||||||
class AutoName(Enum):
|
class AutoName(Enum):
|
||||||
def _generate_next_value_(name, start, count, last_values):
|
def _generate_next_value_(name, start, count, last_values):
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
class Role(AutoName):
|
class Role(AutoName):
|
||||||
USER = auto()
|
USER = auto()
|
||||||
SYSTEM = auto()
|
SYSTEM = auto()
|
||||||
|
@ -65,12 +65,13 @@ def _encode_cid_for_eacl(cid: str) -> str:
|
||||||
cid_base58 = base58.b58decode(cid)
|
cid_base58 = base58.b58decode(cid)
|
||||||
return base64.b64encode(cid_base58).decode("utf-8")
|
return base64.b64encode(cid_base58).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
@keyword('Create eACL')
|
@keyword('Create eACL')
|
||||||
def create_eacl(cid: str, rules_list: list):
|
def create_eacl(cid: str, rules_list: list):
|
||||||
table = f"{os.getcwd()}/{ASSETS_DIR}/eacl_table_{str(uuid.uuid4())}.json"
|
table = f"{os.getcwd()}/{ASSETS_DIR}/eacl_table_{str(uuid.uuid4())}.json"
|
||||||
rules = ""
|
rules = ""
|
||||||
for rule in rules_list:
|
for rule in rules_list:
|
||||||
# TODO: check if $Object: is still necessary for filtering in the newest releases
|
# TODO: check if $Object: is still necessary for filtering in the newest releases
|
||||||
rules += f"--rule '{rule}' "
|
rules += f"--rule '{rule}' "
|
||||||
cmd = (
|
cmd = (
|
||||||
f"{NEOFS_CLI_EXEC} acl extended create --cid {cid} "
|
f"{NEOFS_CLI_EXEC} acl extended create --cid {cid} "
|
||||||
|
@ -99,47 +100,47 @@ def form_bearertoken_file(wif: str, cid: str, eacl_records: list) -> str:
|
||||||
json_eacl = json.loads(eacl)
|
json_eacl = json.loads(eacl)
|
||||||
logger.info(json_eacl)
|
logger.info(json_eacl)
|
||||||
eacl_result = {
|
eacl_result = {
|
||||||
"body":
|
"body":
|
||||||
|
{
|
||||||
|
"eaclTable":
|
||||||
{
|
{
|
||||||
"eaclTable":
|
"containerID":
|
||||||
{
|
|
||||||
"containerID":
|
|
||||||
{
|
{
|
||||||
"value": enc_cid
|
"value": enc_cid
|
||||||
},
|
},
|
||||||
"records": []
|
"records": []
|
||||||
},
|
},
|
||||||
"lifetime":
|
"lifetime":
|
||||||
{
|
{
|
||||||
"exp": EACL_LIFETIME,
|
"exp": EACL_LIFETIME,
|
||||||
"nbf": "1",
|
"nbf": "1",
|
||||||
"iat": "0"
|
"iat": "0"
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if not eacl_records:
|
if not eacl_records:
|
||||||
raise(f"Got empty eacl_records list: {eacl_records}")
|
raise (f"Got empty eacl_records list: {eacl_records}")
|
||||||
for record in eacl_records:
|
for record in eacl_records:
|
||||||
op_data = {
|
op_data = {
|
||||||
"operation": record['Operation'],
|
"operation": record['Operation'],
|
||||||
"action": record['Access'],
|
"action": record['Access'],
|
||||||
"filters": [],
|
"filters": [],
|
||||||
"targets": []
|
"targets": []
|
||||||
}
|
}
|
||||||
|
|
||||||
if Role(record['Role']):
|
if Role(record['Role']):
|
||||||
op_data['targets'] = [
|
op_data['targets'] = [
|
||||||
{
|
{
|
||||||
"role": record['Role']
|
"role": record['Role']
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
op_data['targets'] = [
|
op_data['targets'] = [
|
||||||
{
|
{
|
||||||
"keys": [ record['Role'] ]
|
"keys": [record['Role']]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
if 'Filters' in record.keys():
|
if 'Filters' in record.keys():
|
||||||
op_data["filters"].append(record['Filters'])
|
op_data["filters"].append(record['Filters'])
|
||||||
|
|
|
@ -6,8 +6,8 @@ and other CLIs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import pexpect
|
|
||||||
|
|
||||||
|
import pexpect
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
@ -21,20 +21,21 @@ def _cmd_run(cmd, timeout=30):
|
||||||
try:
|
try:
|
||||||
logger.info(f"Executing command: {cmd}")
|
logger.info(f"Executing command: {cmd}")
|
||||||
compl_proc = subprocess.run(cmd, check=True, universal_newlines=True,
|
compl_proc = subprocess.run(cmd, check=True, universal_newlines=True,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout,
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout,
|
||||||
shell=True)
|
shell=True)
|
||||||
output = compl_proc.stdout
|
output = compl_proc.stdout
|
||||||
logger.info(f"Output: {output}")
|
logger.info(f"Output: {output}")
|
||||||
return output
|
return output
|
||||||
except subprocess.CalledProcessError as exc:
|
except subprocess.CalledProcessError as exc:
|
||||||
raise RuntimeError(f"Error:\nreturn code: {exc.returncode} "
|
raise RuntimeError(f"Error:\nreturn code: {exc.returncode} "
|
||||||
f"\nOutput: {exc.output}") from exc
|
f"\nOutput: {exc.output}") from exc
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
return_code, _ = subprocess.getstatusoutput(cmd)
|
return_code, _ = subprocess.getstatusoutput(cmd)
|
||||||
logger.info(f"Error:\nreturn code: {return_code}\nOutput: "
|
logger.info(f"Error:\nreturn code: {return_code}\nOutput: "
|
||||||
f"{exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}")
|
f"{exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def _run_with_passwd(cmd):
|
def _run_with_passwd(cmd):
|
||||||
child = pexpect.spawn(cmd)
|
child = pexpect.spawn(cmd)
|
||||||
child.expect(".*")
|
child.expect(".*")
|
||||||
|
|
|
@ -6,6 +6,7 @@ from robot.api.deco import keyword
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
@keyword('Run Process And Enter Empty Password')
|
@keyword('Run Process And Enter Empty Password')
|
||||||
def run_proccess_and_interact(cmd: str) -> str:
|
def run_proccess_and_interact(cmd: str) -> str:
|
||||||
p = pexpect.spawn(cmd)
|
p = pexpect.spawn(cmd)
|
||||||
|
|
|
@ -10,18 +10,18 @@
|
||||||
first non-null response.
|
first non-null response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from common import NEOFS_NETMAP
|
|
||||||
import neofs_verbs
|
|
||||||
|
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
from robot.libraries.BuiltIn import BuiltIn
|
from robot.libraries.BuiltIn import BuiltIn
|
||||||
|
|
||||||
|
import neofs_verbs
|
||||||
|
from common import NEOFS_NETMAP
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
@keyword('Get Link Object')
|
@keyword('Get Link Object')
|
||||||
def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str=""):
|
def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str = ""):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
wallet (str): path to the wallet on whose behalf the Storage Nodes
|
wallet (str): path to the wallet on whose behalf the Storage Nodes
|
||||||
|
@ -37,10 +37,10 @@ def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str=""):
|
||||||
for node in NEOFS_NETMAP:
|
for node in NEOFS_NETMAP:
|
||||||
try:
|
try:
|
||||||
resp = neofs_verbs.head_object(wallet, cid, oid,
|
resp = neofs_verbs.head_object(wallet, cid, oid,
|
||||||
endpoint=node,
|
endpoint=node,
|
||||||
is_raw=True,
|
is_raw=True,
|
||||||
is_direct=True,
|
is_direct=True,
|
||||||
bearer_token=bearer_token)
|
bearer_token=bearer_token)
|
||||||
if resp['link']:
|
if resp['link']:
|
||||||
return resp['link']
|
return resp['link']
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -65,9 +65,9 @@ def get_last_object(wallet: str, cid: str, oid: str):
|
||||||
for node in NEOFS_NETMAP:
|
for node in NEOFS_NETMAP:
|
||||||
try:
|
try:
|
||||||
resp = neofs_verbs.head_object(wallet, cid, oid,
|
resp = neofs_verbs.head_object(wallet, cid, oid,
|
||||||
endpoint=node,
|
endpoint=node,
|
||||||
is_raw=True,
|
is_raw=True,
|
||||||
is_direct=True)
|
is_direct=True)
|
||||||
if resp['lastPart']:
|
if resp['lastPart']:
|
||||||
return resp['lastPart']
|
return resp['lastPart']
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|
|
@ -8,22 +8,21 @@
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from common import NEOFS_ENDPOINT, COMMON_PLACEMENT_RULE, NEOFS_CLI_EXEC, WALLET_PASS
|
|
||||||
from cli_helpers import _cmd_run
|
|
||||||
from data_formatters import dict_to_attrs
|
|
||||||
import json_transformers
|
|
||||||
|
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
|
import json_transformers
|
||||||
|
from cli_helpers import _cmd_run
|
||||||
|
from common import NEOFS_ENDPOINT, COMMON_PLACEMENT_RULE, NEOFS_CLI_EXEC, WALLET_PASS
|
||||||
|
from data_formatters import dict_to_attrs
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
@keyword('Create Container')
|
@keyword('Create Container')
|
||||||
def create_container(wallet: str, rule: str=COMMON_PLACEMENT_RULE, basic_acl: str='',
|
def create_container(wallet: str, rule: str = COMMON_PLACEMENT_RULE, basic_acl: str = '',
|
||||||
attributes: dict={}, session_token: str='', session_wallet: str='',
|
attributes: dict = {}, session_token: str = '', session_wallet: str = '',
|
||||||
options: str=''):
|
options: str = ''):
|
||||||
"""
|
"""
|
||||||
A wrapper for `neofs-cli container create` call.
|
A wrapper for `neofs-cli container create` call.
|
||||||
|
|
||||||
|
@ -58,18 +57,18 @@ def create_container(wallet: str, rule: str=COMMON_PLACEMENT_RULE, basic_acl: st
|
||||||
|
|
||||||
logger.info("Container created; waiting until it is persisted in sidechain")
|
logger.info("Container created; waiting until it is persisted in sidechain")
|
||||||
|
|
||||||
deadline_to_persist = 15 # seconds
|
deadline_to_persist = 15 # seconds
|
||||||
for i in range(0, deadline_to_persist):
|
for i in range(0, deadline_to_persist):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
containers = list_containers(wallet)
|
containers = list_containers(wallet)
|
||||||
if cid in containers:
|
if cid in containers:
|
||||||
break
|
break
|
||||||
logger.info(f"There is no {cid} in {containers} yet; continue")
|
logger.info(f"There is no {cid} in {containers} yet; continue")
|
||||||
if i+1 == deadline_to_persist:
|
if i + 1 == deadline_to_persist:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"After {deadline_to_persist} seconds the container "
|
f"After {deadline_to_persist} seconds the container "
|
||||||
f"{cid} hasn't been persisted; exiting"
|
f"{cid} hasn't been persisted; exiting"
|
||||||
)
|
)
|
||||||
return cid
|
return cid
|
||||||
|
|
||||||
|
|
||||||
|
@ -113,7 +112,7 @@ def get_container(wallet: str, cid: str):
|
||||||
attributes[attr['key']] = attr['value']
|
attributes[attr['key']] = attr['value']
|
||||||
container_info['attributes'] = attributes
|
container_info['attributes'] = attributes
|
||||||
container_info['ownerID'] = json_transformers.json_reencode(
|
container_info['ownerID'] = json_transformers.json_reencode(
|
||||||
container_info['ownerID']['value'])
|
container_info['ownerID']['value'])
|
||||||
return container_info
|
return container_info
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,15 +7,15 @@ from functools import reduce
|
||||||
|
|
||||||
|
|
||||||
def dict_to_attrs(attrs: dict):
|
def dict_to_attrs(attrs: dict):
|
||||||
'''
|
"""
|
||||||
This function takes dictionary of object attributes and converts them
|
This function takes dictionary of object attributes and converts them
|
||||||
into the string. The string is passed to `--attibutes` key of the
|
into the string. The string is passed to `--attributes` key of the
|
||||||
neofs-cli.
|
neofs-cli.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
attrs (dict): object attirbutes in {"a": "b", "c": "d"} format.
|
attrs (dict): object attributes in {"a": "b", "c": "d"} format.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
(str): string in "a=b,c=d" format.
|
(str): string in "a=b,c=d" format.
|
||||||
'''
|
"""
|
||||||
return reduce(lambda a,b: f"{a},{b}", map(lambda i: f"{i}={attrs[i]}", attrs))
|
return reduce(lambda a, b: f"{a},{b}", map(lambda i: f"{i}={attrs[i]}", attrs))
|
||||||
|
|
|
@ -3,15 +3,16 @@
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
from robot.libraries.BuiltIn import BuiltIn
|
||||||
|
|
||||||
from common import HTTP_GATE
|
from common import HTTP_GATE
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
|
||||||
from robot.libraries.BuiltIn import BuiltIn
|
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
|
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
|
||||||
|
|
||||||
|
|
||||||
@keyword('Get via HTTP Gate')
|
@keyword('Get via HTTP Gate')
|
||||||
def get_via_http_gate(cid: str, oid: str):
|
def get_via_http_gate(cid: str, oid: str):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,24 +1,25 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
'''
|
"""
|
||||||
When doing requests to NeoFS, we get JSON output as an automatically decoded
|
When doing requests to NeoFS, we get JSON output as an automatically decoded
|
||||||
structure from protobuf. Some fields are decoded with boilerplates and binary
|
structure from protobuf. Some fields are decoded with boilerplates and binary
|
||||||
values are Base64-encoded.
|
values are Base64-encoded.
|
||||||
|
|
||||||
This module contains functions which rearrange the structure and reencode binary
|
This module contains functions which rearrange the structure and reencode binary
|
||||||
data from Base64 to Base58.
|
data from Base64 to Base58.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
import base58
|
import base58
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
def decode_simple_header(data: dict):
|
def decode_simple_header(data: dict):
|
||||||
'''
|
"""
|
||||||
This function reencodes Simple Object header and its attributes.
|
This function reencodes Simple Object header and its attributes.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
data = decode_common_fields(data)
|
data = decode_common_fields(data)
|
||||||
|
|
||||||
|
@ -34,32 +35,33 @@ def decode_simple_header(data: dict):
|
||||||
|
|
||||||
|
|
||||||
def decode_split_header(data: dict):
|
def decode_split_header(data: dict):
|
||||||
'''
|
"""
|
||||||
This function rearranges Complex Object header.
|
This function rearranges Complex Object header.
|
||||||
The header holds SplitID, a random unique
|
The header holds SplitID, a random unique
|
||||||
number, which is common among all splitted objects, and IDs of the Linking
|
number, which is common among all splitted objects, and IDs of the Linking
|
||||||
Object and the last splitted Object.
|
Object and the last splitted Object.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
data["splitId"] = json_reencode(data["splitId"])
|
data["splitId"] = json_reencode(data["splitId"])
|
||||||
data["lastPart"] = (
|
data["lastPart"] = (
|
||||||
json_reencode(data["lastPart"]["value"])
|
json_reencode(data["lastPart"]["value"])
|
||||||
if data["lastPart"] else None
|
if data["lastPart"] else None
|
||||||
)
|
)
|
||||||
data["link"] = (
|
data["link"] = (
|
||||||
json_reencode(data["link"]["value"])
|
json_reencode(data["link"]["value"])
|
||||||
if data["link"] else None
|
if data["link"] else None
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def decode_linking_object(data: dict):
|
def decode_linking_object(data: dict):
|
||||||
'''
|
"""
|
||||||
This function reencodes Linking Object header.
|
This function reencodes Linking Object header.
|
||||||
It contains IDs of child Objects and Split Chain data.
|
It contains IDs of child Objects and Split Chain data.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
data = decode_simple_header(data)
|
data = decode_simple_header(data)
|
||||||
# reencoding Child Object IDs
|
# reencoding Child Object IDs
|
||||||
|
@ -68,13 +70,13 @@ def decode_linking_object(data: dict):
|
||||||
data['header']['split']['children'][ind] = json_reencode(val['value'])
|
data['header']['split']['children'][ind] = json_reencode(val['value'])
|
||||||
data['header']['split']['splitID'] = json_reencode(data['header']['split']['splitID'])
|
data['header']['split']['splitID'] = json_reencode(data['header']['split']['splitID'])
|
||||||
data['header']['split']['previous'] = (
|
data['header']['split']['previous'] = (
|
||||||
json_reencode(data['header']['split']['previous']['value'])
|
json_reencode(data['header']['split']['previous']['value'])
|
||||||
if data['header']['split']['previous'] else None
|
if data['header']['split']['previous'] else None
|
||||||
)
|
)
|
||||||
data['header']['split']['parent'] = (
|
data['header']['split']['parent'] = (
|
||||||
json_reencode(data['header']['split']['parent']['value'])
|
json_reencode(data['header']['split']['parent']['value'])
|
||||||
if data['header']['split']['parent'] else None
|
if data['header']['split']['parent'] else None
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
||||||
|
|
||||||
|
@ -82,9 +84,9 @@ def decode_linking_object(data: dict):
|
||||||
|
|
||||||
|
|
||||||
def decode_storage_group(data: dict):
|
def decode_storage_group(data: dict):
|
||||||
'''
|
"""
|
||||||
This function reencodes Storage Group header.
|
This function reencodes Storage Group header.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
data = decode_common_fields(data)
|
data = decode_common_fields(data)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
@ -92,53 +94,56 @@ def decode_storage_group(data: dict):
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def decode_tombstone(data: dict):
|
def decode_tombstone(data: dict):
|
||||||
'''
|
"""
|
||||||
This function reencodes Tombstone header.
|
This function reencodes Tombstone header.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
data = decode_simple_header(data)
|
data = decode_simple_header(data)
|
||||||
data['header']['sessionToken'] = decode_session_token(
|
data['header']['sessionToken'] = decode_session_token(
|
||||||
data['header']['sessionToken'])
|
data['header']['sessionToken'])
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def decode_session_token(data: dict):
|
def decode_session_token(data: dict):
|
||||||
'''
|
"""
|
||||||
This function reencodes a fragment of header which contains
|
This function reencodes a fragment of header which contains
|
||||||
information about session token.
|
information about session token.
|
||||||
'''
|
"""
|
||||||
data['body']['object']['address']['containerID'] = json_reencode(
|
data['body']['object']['address']['containerID'] = json_reencode(
|
||||||
data['body']['object']['address']['containerID']['value'])
|
data['body']['object']['address']['containerID']['value'])
|
||||||
data['body']['object']['address']['objectID'] = json_reencode(
|
data['body']['object']['address']['objectID'] = json_reencode(
|
||||||
data['body']['object']['address']['objectID']['value'])
|
data['body']['object']['address']['objectID']['value'])
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def json_reencode(data: str):
|
def json_reencode(data: str):
|
||||||
'''
|
"""
|
||||||
According to JSON protocol, binary data (Object/Container/Storage Group IDs, etc)
|
According to JSON protocol, binary data (Object/Container/Storage Group IDs, etc)
|
||||||
is converted to string via Base58 encoder. But we usually operate with Base64-encoded
|
is converted to string via Base58 encoder. But we usually operate with Base64-encoded
|
||||||
format.
|
format.
|
||||||
This function reencodes given Base58 string into the Base64 one.
|
This function reencodes given Base58 string into the Base64 one.
|
||||||
'''
|
"""
|
||||||
return base58.b58encode(base64.b64decode(data)).decode("utf-8")
|
return base58.b58encode(base64.b64decode(data)).decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def encode_for_json(data: str):
|
def encode_for_json(data: str):
|
||||||
'''
|
"""
|
||||||
This function encodes binary data for sending them as protobuf
|
This function encodes binary data for sending them as protobuf
|
||||||
structures.
|
structures.
|
||||||
'''
|
"""
|
||||||
return base64.b64encode(base58.b58decode(data)).decode('utf-8')
|
return base64.b64encode(base58.b58decode(data)).decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
def decode_common_fields(data: dict):
|
def decode_common_fields(data: dict):
|
||||||
'''
|
"""
|
||||||
Despite of type (simple/complex Object, Storage Group, etc) every Object
|
Despite of type (simple/complex Object, Storage Group, etc) every Object
|
||||||
header contains several common fields.
|
header contains several common fields.
|
||||||
This function rearranges these fields.
|
This function rearranges these fields.
|
||||||
'''
|
"""
|
||||||
# reencoding binary IDs
|
# reencoding binary IDs
|
||||||
data["objectID"] = json_reencode(data["objectID"]["value"])
|
data["objectID"] = json_reencode(data["objectID"]["value"])
|
||||||
data["header"]["containerID"] = json_reencode(data["header"]["containerID"]["value"])
|
data["header"]["containerID"] = json_reencode(data["header"]["containerID"]["value"])
|
||||||
|
@ -146,6 +151,6 @@ def decode_common_fields(data: dict):
|
||||||
data["header"]["homomorphicHash"] = json_reencode(data["header"]["homomorphicHash"]["sum"])
|
data["header"]["homomorphicHash"] = json_reencode(data["header"]["homomorphicHash"]["sum"])
|
||||||
data["header"]["payloadHash"] = json_reencode(data["header"]["payloadHash"]["sum"])
|
data["header"]["payloadHash"] = json_reencode(data["header"]["payloadHash"]["sum"])
|
||||||
data["header"]["version"] = (
|
data["header"]["version"] = (
|
||||||
f"{data['header']['version']['major']}{data['header']['version']['minor']}"
|
f"{data['header']['version']['major']}{data['header']['version']['minor']}"
|
||||||
)
|
)
|
||||||
return data
|
return data
|
||||||
|
|
|
@ -5,12 +5,13 @@ import os
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from neo3 import wallet
|
from neo3 import wallet
|
||||||
from common import NEOFS_NETMAP_DICT
|
|
||||||
import neofs_verbs
|
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
from robot.libraries.BuiltIn import BuiltIn
|
from robot.libraries.BuiltIn import BuiltIn
|
||||||
|
|
||||||
|
import neofs_verbs
|
||||||
|
from common import NEOFS_NETMAP_DICT
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
# path to neofs-cli executable
|
# path to neofs-cli executable
|
||||||
|
@ -30,7 +31,7 @@ def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str):
|
||||||
header = header['header']
|
header = header['header']
|
||||||
|
|
||||||
BuiltIn().should_be_equal(header["containerID"], cid,
|
BuiltIn().should_be_equal(header["containerID"], cid,
|
||||||
msg="Tombstone Header CID is wrong")
|
msg="Tombstone Header CID is wrong")
|
||||||
|
|
||||||
wlt_data = dict()
|
wlt_data = dict()
|
||||||
with open(wallet_path, 'r') as fout:
|
with open(wallet_path, 'r') as fout:
|
||||||
|
@ -39,21 +40,21 @@ def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str):
|
||||||
addr = wlt.accounts[0].address
|
addr = wlt.accounts[0].address
|
||||||
|
|
||||||
BuiltIn().should_be_equal(header["ownerID"], addr,
|
BuiltIn().should_be_equal(header["ownerID"], addr,
|
||||||
msg="Tombstone Owner ID is wrong")
|
msg="Tombstone Owner ID is wrong")
|
||||||
|
|
||||||
BuiltIn().should_be_equal(header["objectType"], 'TOMBSTONE',
|
BuiltIn().should_be_equal(header["objectType"], 'TOMBSTONE',
|
||||||
msg="Header Type isn't Tombstone")
|
msg="Header Type isn't Tombstone")
|
||||||
|
|
||||||
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["verb"], 'DELETE',
|
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["verb"], 'DELETE',
|
||||||
msg="Header Session Type isn't DELETE")
|
msg="Header Session Type isn't DELETE")
|
||||||
|
|
||||||
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["address"]["containerID"],
|
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["address"]["containerID"],
|
||||||
cid,
|
cid,
|
||||||
msg="Header Session ID is wrong")
|
msg="Header Session ID is wrong")
|
||||||
|
|
||||||
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["address"]["objectID"],
|
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["address"]["objectID"],
|
||||||
oid,
|
oid,
|
||||||
msg="Header Session OID is wrong")
|
msg="Header Session OID is wrong")
|
||||||
|
|
||||||
|
|
||||||
@keyword('Get control endpoint with wif')
|
@keyword('Get control endpoint with wif')
|
||||||
|
|
|
@ -6,17 +6,17 @@
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import random
|
import random
|
||||||
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from common import NEOFS_ENDPOINT, ASSETS_DIR, NEOFS_NETMAP, WALLET_PASS
|
|
||||||
from cli_helpers import _cmd_run
|
|
||||||
import json_transformers
|
|
||||||
from data_formatters import dict_to_attrs
|
|
||||||
|
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
|
import json_transformers
|
||||||
|
from cli_helpers import _cmd_run
|
||||||
|
from common import NEOFS_ENDPOINT, ASSETS_DIR, NEOFS_NETMAP, WALLET_PASS
|
||||||
|
from data_formatters import dict_to_attrs
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
@ -25,9 +25,9 @@ NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
||||||
|
|
||||||
|
|
||||||
@keyword('Get object')
|
@keyword('Get object')
|
||||||
def get_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
def get_object(wallet: str, cid: str, oid: str, bearer_token: str = "",
|
||||||
write_object: str="", endpoint: str="", options: str="" ):
|
write_object: str = "", endpoint: str = "", options: str = ""):
|
||||||
'''
|
"""
|
||||||
GET from NeoFS.
|
GET from NeoFS.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -40,7 +40,7 @@ def get_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
||||||
options (optional, str): any options which `neofs-cli object get` accepts
|
options (optional, str): any options which `neofs-cli object get` accepts
|
||||||
Returns:
|
Returns:
|
||||||
(str): path to downloaded file
|
(str): path to downloaded file
|
||||||
'''
|
"""
|
||||||
|
|
||||||
if not write_object:
|
if not write_object:
|
||||||
write_object = str(uuid.uuid4())
|
write_object = str(uuid.uuid4())
|
||||||
|
@ -62,8 +62,8 @@ def get_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
||||||
# TODO: make `bearer_token` optional
|
# TODO: make `bearer_token` optional
|
||||||
@keyword('Get Range Hash')
|
@keyword('Get Range Hash')
|
||||||
def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut: str,
|
def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut: str,
|
||||||
options: str=""):
|
options: str = ""):
|
||||||
'''
|
"""
|
||||||
GETRANGEHASH of given Object.
|
GETRANGEHASH of given Object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -76,7 +76,7 @@ def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut
|
||||||
options (optional, str): any options which `neofs-cli object hash` accepts
|
options (optional, str): any options which `neofs-cli object hash` accepts
|
||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
'''
|
"""
|
||||||
cmd = (
|
cmd = (
|
||||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
||||||
f'object hash --cid {cid} --oid {oid} --range {range_cut} --config {WALLET_PASS} '
|
f'object hash --cid {cid} --oid {oid} --range {range_cut} --config {WALLET_PASS} '
|
||||||
|
@ -89,9 +89,9 @@ def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut
|
||||||
|
|
||||||
|
|
||||||
@keyword('Put object')
|
@keyword('Put object')
|
||||||
def put_object(wallet: str, path: str, cid: str, bearer: str="", user_headers: dict={},
|
def put_object(wallet: str, path: str, cid: str, bearer: str = "", user_headers: dict = {},
|
||||||
endpoint: str="", options: str="" ):
|
endpoint: str = "", options: str = ""):
|
||||||
'''
|
"""
|
||||||
PUT of given file.
|
PUT of given file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -104,7 +104,7 @@ def put_object(wallet: str, path: str, cid: str, bearer: str="", user_headers: d
|
||||||
options (optional, str): any options which `neofs-cli object put` accepts
|
options (optional, str): any options which `neofs-cli object put` accepts
|
||||||
Returns:
|
Returns:
|
||||||
(str): ID of uploaded Object
|
(str): ID of uploaded Object
|
||||||
'''
|
"""
|
||||||
if not endpoint:
|
if not endpoint:
|
||||||
endpoint = random.sample(NEOFS_NETMAP, 1)[0]
|
endpoint = random.sample(NEOFS_NETMAP, 1)[0]
|
||||||
cmd = (
|
cmd = (
|
||||||
|
@ -121,8 +121,8 @@ def put_object(wallet: str, path: str, cid: str, bearer: str="", user_headers: d
|
||||||
|
|
||||||
|
|
||||||
@keyword('Delete object')
|
@keyword('Delete object')
|
||||||
def delete_object(wallet: str, cid: str, oid: str, bearer: str="", options: str=""):
|
def delete_object(wallet: str, cid: str, oid: str, bearer: str = "", options: str = ""):
|
||||||
'''
|
"""
|
||||||
DELETE an Object.
|
DELETE an Object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -133,7 +133,7 @@ def delete_object(wallet: str, cid: str, oid: str, bearer: str="", options: str=
|
||||||
options (optional, str): any options which `neofs-cli object delete` accepts
|
options (optional, str): any options which `neofs-cli object delete` accepts
|
||||||
Returns:
|
Returns:
|
||||||
(str): Tombstone ID
|
(str): Tombstone ID
|
||||||
'''
|
"""
|
||||||
cmd = (
|
cmd = (
|
||||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
||||||
f'object delete --cid {cid} --oid {oid} {options} --config {WALLET_PASS} '
|
f'object delete --cid {cid} --oid {oid} {options} --config {WALLET_PASS} '
|
||||||
|
@ -149,20 +149,21 @@ def delete_object(wallet: str, cid: str, oid: str, bearer: str="", options: str=
|
||||||
# TODO: make `bearer` an optional parameter
|
# TODO: make `bearer` an optional parameter
|
||||||
@keyword('Get Range')
|
@keyword('Get Range')
|
||||||
def get_range(wallet: str, cid: str, oid: str, file_path: str, bearer: str, range_cut: str,
|
def get_range(wallet: str, cid: str, oid: str, file_path: str, bearer: str, range_cut: str,
|
||||||
options:str=""):
|
options: str = ""):
|
||||||
'''
|
"""
|
||||||
GETRANGE an Object.
|
GETRANGE an Object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
wallet (str): wallet on whose behalf GETRANGE is done
|
wallet (str): wallet on whose behalf GETRANGE is done
|
||||||
cid (str): ID of Container where we get the Object from
|
cid (str): ID of Container where we get the Object from
|
||||||
oid (str): ID of Object we are going to request
|
oid (str): ID of Object we are going to request
|
||||||
|
file_path (str): file path
|
||||||
range_cut (str): range to take data from in the form offset:length
|
range_cut (str): range to take data from in the form offset:length
|
||||||
bearer (optional, str): path to Bearer Token file, appends to `--bearer` key
|
bearer (optional, str): path to Bearer Token file, appends to `--bearer` key
|
||||||
options (optional, str): any options which `neofs-cli object range` accepts
|
options (optional, str): any options which `neofs-cli object range` accepts
|
||||||
Returns:
|
Returns:
|
||||||
(void)
|
(void)
|
||||||
'''
|
"""
|
||||||
range_file = f"{ASSETS_DIR}/{uuid.uuid4()}"
|
range_file = f"{ASSETS_DIR}/{uuid.uuid4()}"
|
||||||
cmd = (
|
cmd = (
|
||||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
||||||
|
@ -178,9 +179,9 @@ def get_range(wallet: str, cid: str, oid: str, file_path: str, bearer: str, rang
|
||||||
|
|
||||||
|
|
||||||
@keyword('Search object')
|
@keyword('Search object')
|
||||||
def search_object(wallet: str, cid: str, keys: str="", bearer: str="", filters: dict={},
|
def search_object(wallet: str, cid: str, keys: str = "", bearer: str = "", filters: dict = {},
|
||||||
expected_objects_list=[]):
|
expected_objects_list=[]):
|
||||||
'''
|
"""
|
||||||
SEARCH an Object.
|
SEARCH an Object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -193,7 +194,7 @@ def search_object(wallet: str, cid: str, keys: str="", bearer: str="", filters:
|
||||||
expected_objects_list (optional, list): a list of ObjectIDs to compare found Objects with
|
expected_objects_list (optional, list): a list of ObjectIDs to compare found Objects with
|
||||||
Returns:
|
Returns:
|
||||||
(list): list of found ObjectIDs
|
(list): list of found ObjectIDs
|
||||||
'''
|
"""
|
||||||
filters_result = ""
|
filters_result = ""
|
||||||
if filters:
|
if filters:
|
||||||
filters_result += "--filters "
|
filters_result += "--filters "
|
||||||
|
@ -215,16 +216,16 @@ def search_object(wallet: str, cid: str, keys: str="", bearer: str="", filters:
|
||||||
f"is equal for expected list '{expected_objects_list}'")
|
f"is equal for expected list '{expected_objects_list}'")
|
||||||
else:
|
else:
|
||||||
logger.warn(f"Found object list {found_objects} ",
|
logger.warn(f"Found object list {found_objects} ",
|
||||||
f"is not equal to expected list '{expected_objects_list}'")
|
f"is not equal to expected list '{expected_objects_list}'")
|
||||||
|
|
||||||
return found_objects
|
return found_objects
|
||||||
|
|
||||||
|
|
||||||
@keyword('Head object')
|
@keyword('Head object')
|
||||||
def head_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
def head_object(wallet: str, cid: str, oid: str, bearer_token: str = "",
|
||||||
options:str="", endpoint: str="", json_output: bool = True,
|
options: str = "", endpoint: str = "", json_output: bool = True,
|
||||||
is_raw: bool = False, is_direct: bool = False):
|
is_raw: bool = False, is_direct: bool = False):
|
||||||
'''
|
"""
|
||||||
HEAD an Object.
|
HEAD an Object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -245,7 +246,7 @@ def head_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
||||||
(dict): HEAD response in JSON format
|
(dict): HEAD response in JSON format
|
||||||
or
|
or
|
||||||
(str): HEAD response as a plain text
|
(str): HEAD response as a plain text
|
||||||
'''
|
"""
|
||||||
cmd = (
|
cmd = (
|
||||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint if endpoint else NEOFS_ENDPOINT} '
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint if endpoint else NEOFS_ENDPOINT} '
|
||||||
f'--wallet {wallet} --config {WALLET_PASS} '
|
f'--wallet {wallet} --config {WALLET_PASS} '
|
||||||
|
|
|
@ -8,11 +8,11 @@
|
||||||
import random
|
import random
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
|
||||||
from robot.api.deco import keyword
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
@keyword('Stop Nodes')
|
@keyword('Stop Nodes')
|
||||||
def stop_nodes(number: int, nodes: list):
|
def stop_nodes(number: int, nodes: list):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
|
|
||||||
import os
|
|
||||||
import pexpect
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from robot.api.deco import keyword
|
import pexpect
|
||||||
from robot.api import logger
|
|
||||||
from neo3 import wallet
|
from neo3 import wallet
|
||||||
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
from common import *
|
|
||||||
import rpc_client
|
|
||||||
import contract
|
import contract
|
||||||
|
import rpc_client
|
||||||
|
from common import *
|
||||||
from wrappers import run_sh_with_passwd_contract
|
from wrappers import run_sh_with_passwd_contract
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
@ -65,20 +64,20 @@ def get_balance(wif: str):
|
||||||
|
|
||||||
acc = wallet.Account.from_wif(wif, '')
|
acc = wallet.Account.from_wif(wif, '')
|
||||||
payload = [
|
payload = [
|
||||||
{
|
{
|
||||||
'type': 'Hash160',
|
'type': 'Hash160',
|
||||||
'value': str(acc.script_hash)
|
'value': str(acc.script_hash)
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
try:
|
try:
|
||||||
resp = morph_rpc_cli.invoke_function(
|
resp = morph_rpc_cli.invoke_function(
|
||||||
contract.get_balance_contract_hash(NEOFS_NEO_API_ENDPOINT),
|
contract.get_balance_contract_hash(NEOFS_NEO_API_ENDPOINT),
|
||||||
'balanceOf',
|
'balanceOf',
|
||||||
payload
|
payload
|
||||||
)
|
)
|
||||||
logger.info(resp)
|
logger.info(resp)
|
||||||
value = int(resp['stack'][0]['value'])
|
value = int(resp['stack'][0]['value'])
|
||||||
return value/(10**MORPH_TOKEN_POWER)
|
return value / (10 ** MORPH_TOKEN_POWER)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"failed to get {wif} balance: {e}")
|
logger.error(f"failed to get {wif} balance: {e}")
|
||||||
raise e
|
raise e
|
||||||
|
|
|
@ -6,12 +6,12 @@ import uuid
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
import botocore
|
import botocore
|
||||||
from cli_helpers import _run_with_passwd
|
|
||||||
|
|
||||||
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
|
|
||||||
import urllib3
|
import urllib3
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
|
from cli_helpers import _run_with_passwd
|
||||||
|
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
|
||||||
|
|
||||||
##########################################################
|
##########################################################
|
||||||
# Disabling warnings on self-signed certificate which the
|
# Disabling warnings on self-signed certificate which the
|
||||||
|
@ -24,6 +24,7 @@ CREDENTIALS_CREATE_TIMEOUT = '30s'
|
||||||
|
|
||||||
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
|
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
|
||||||
|
|
||||||
|
|
||||||
@keyword('Init S3 Credentials')
|
@keyword('Init S3 Credentials')
|
||||||
def init_s3_credentials(wallet):
|
def init_s3_credentials(wallet):
|
||||||
bucket = str(uuid.uuid4())
|
bucket = str(uuid.uuid4())
|
||||||
|
@ -71,7 +72,7 @@ def config_s3_client(access_key_id, secret_access_key):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('List objects S3 v2')
|
@keyword('List objects S3 v2')
|
||||||
|
@ -87,7 +88,7 @@ def list_objects_s3_v2(s3_client, bucket):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('List objects S3')
|
@keyword('List objects S3')
|
||||||
|
@ -103,7 +104,7 @@ def list_objects_s3(s3_client, bucket):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Create bucket S3')
|
@keyword('Create bucket S3')
|
||||||
|
@ -117,7 +118,7 @@ def create_bucket_s3(s3_client):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('List buckets S3')
|
@keyword('List buckets S3')
|
||||||
|
@ -134,7 +135,7 @@ def list_buckets_s3(s3_client):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Delete bucket S3')
|
@keyword('Delete bucket S3')
|
||||||
|
@ -147,7 +148,7 @@ def delete_bucket_s3(s3_client, bucket):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Head bucket S3')
|
@keyword('Head bucket S3')
|
||||||
|
@ -159,7 +160,7 @@ def head_bucket(s3_client, bucket):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Put object S3')
|
@keyword('Put object S3')
|
||||||
|
@ -174,12 +175,11 @@ def put_object_s3(s3_client, bucket, filepath):
|
||||||
logger.info(f"S3 Put object result: {response}")
|
logger.info(f"S3 Put object result: {response}")
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Head object S3')
|
@keyword('Head object S3')
|
||||||
def head_object_s3(s3_client, bucket, object_key):
|
def head_object_s3(s3_client, bucket, object_key):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = s3_client.head_object(Bucket=bucket, Key=object_key)
|
response = s3_client.head_object(Bucket=bucket, Key=object_key)
|
||||||
logger.info(f"S3 Head object result: {response}")
|
logger.info(f"S3 Head object result: {response}")
|
||||||
|
@ -187,7 +187,7 @@ def head_object_s3(s3_client, bucket, object_key):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Delete object S3')
|
@keyword('Delete object S3')
|
||||||
|
@ -199,7 +199,7 @@ def delete_object_s3(s3_client, bucket, object_key):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Copy object S3')
|
@keyword('Copy object S3')
|
||||||
|
@ -207,14 +207,14 @@ def copy_object_s3(s3_client, bucket, object_key):
|
||||||
filename = f"{os.getcwd()}/{uuid.uuid4()}"
|
filename = f"{os.getcwd()}/{uuid.uuid4()}"
|
||||||
try:
|
try:
|
||||||
response = s3_client.copy_object(Bucket=bucket,
|
response = s3_client.copy_object(Bucket=bucket,
|
||||||
CopySource=f"{bucket}/{object_key}",
|
CopySource=f"{bucket}/{object_key}",
|
||||||
Key=filename)
|
Key=filename)
|
||||||
logger.info(f"S3 Copy object result: {response}")
|
logger.info(f"S3 Copy object result: {response}")
|
||||||
return filename
|
return filename
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
||||||
|
|
||||||
@keyword('Get object S3')
|
@keyword('Get object S3')
|
||||||
|
@ -233,4 +233,4 @@ def get_object_s3(s3_client, bucket, object_key):
|
||||||
|
|
||||||
except botocore.exceptions.ClientError as err:
|
except botocore.exceptions.ClientError as err:
|
||||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||||
|
|
|
@ -10,12 +10,12 @@ import os
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from neo3 import wallet
|
from neo3 import wallet
|
||||||
from common import WALLET_PASS, ASSETS_DIR
|
|
||||||
from cli_helpers import _cmd_run
|
|
||||||
import json_transformers
|
|
||||||
|
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
|
import json_transformers
|
||||||
|
from cli_helpers import _cmd_run
|
||||||
|
from common import WALLET_PASS, ASSETS_DIR
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
||||||
|
|
||||||
|
|
||||||
@keyword('Generate Session Token')
|
@keyword('Generate Session Token')
|
||||||
def generate_session_token(owner: str, session_wallet: str, cid: str='') -> str:
|
def generate_session_token(owner: str, session_wallet: str, cid: str = '') -> str:
|
||||||
"""
|
"""
|
||||||
This function generates session token for ContainerSessionContext
|
This function generates session token for ContainerSessionContext
|
||||||
and writes it to the file. It is able to prepare session token file
|
and writes it to the file. It is able to prepare session token file
|
||||||
|
@ -47,34 +47,33 @@ def generate_session_token(owner: str, session_wallet: str, cid: str='') -> str:
|
||||||
session_wlt_content = json.load(fout)
|
session_wlt_content = json.load(fout)
|
||||||
session_wlt = wallet.Wallet.from_json(session_wlt_content, password="")
|
session_wlt = wallet.Wallet.from_json(session_wlt_content, password="")
|
||||||
pub_key_64 = base64.b64encode(
|
pub_key_64 = base64.b64encode(
|
||||||
bytes.fromhex(
|
bytes.fromhex(
|
||||||
str(session_wlt.accounts[0].public_key)
|
str(session_wlt.accounts[0].public_key)
|
||||||
)
|
)
|
||||||
).decode('utf-8')
|
).decode('utf-8')
|
||||||
|
|
||||||
session_token = {
|
session_token = {
|
||||||
"body":{
|
"body": {
|
||||||
"id":f"{base64.b64encode(uuid.uuid4().bytes).decode('utf-8')}",
|
"id": f"{base64.b64encode(uuid.uuid4().bytes).decode('utf-8')}",
|
||||||
"ownerID":{
|
"ownerID": {
|
||||||
"value":f"{json_transformers.encode_for_json(owner)}"
|
"value": f"{json_transformers.encode_for_json(owner)}"
|
||||||
},
|
},
|
||||||
"lifetime":{
|
"lifetime": {
|
||||||
"exp":"100000000",
|
"exp": "100000000",
|
||||||
"nbf":"0",
|
"nbf": "0",
|
||||||
"iat":"0"
|
"iat": "0"
|
||||||
},
|
},
|
||||||
"sessionKey":f"{pub_key_64}",
|
"sessionKey": f"{pub_key_64}",
|
||||||
"container":{
|
"container": {
|
||||||
"verb":"PUT",
|
"verb": "PUT",
|
||||||
"wildcard": cid != '',
|
"wildcard": cid != '',
|
||||||
**({ "containerID":
|
**({"containerID":
|
||||||
{"value":
|
{"value": f"{base64.b64encode(cid.encode('utf-8')).decode('utf-8')}"}
|
||||||
f"{base64.b64encode(cid.encode('utf-8')).decode('utf-8')}"}
|
} if cid != '' else {}
|
||||||
} if cid != '' else {}
|
)
|
||||||
)
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(f"Got this Session Token: {session_token}")
|
logger.info(f"Got this Session Token: {session_token}")
|
||||||
with open(file_path, 'w', encoding='utf-8') as session_token_file:
|
with open(file_path, 'w', encoding='utf-8') as session_token_file:
|
||||||
|
@ -83,7 +82,7 @@ def generate_session_token(owner: str, session_wallet: str, cid: str='') -> str:
|
||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
@keyword ('Sign Session Token')
|
@keyword('Sign Session Token')
|
||||||
def sign_session_token(session_token: str, wlt: str):
|
def sign_session_token(session_token: str, wlt: str):
|
||||||
"""
|
"""
|
||||||
This function signs the session token by the given wallet.
|
This function signs the session token by the given wallet.
|
||||||
|
|
|
@ -5,16 +5,16 @@
|
||||||
It contains wrappers for `neofs-cli storagegroup` verbs.
|
It contains wrappers for `neofs-cli storagegroup` verbs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from cli_helpers import _cmd_run
|
|
||||||
|
|
||||||
from common import NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_PASS
|
|
||||||
from robot.api.deco import keyword
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
|
from cli_helpers import _cmd_run
|
||||||
|
from common import NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_PASS
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
@keyword('Put Storagegroup')
|
@keyword('Put Storagegroup')
|
||||||
def put_storagegroup(wallet: str, cid: str, objects: list, bearer_token: str=""):
|
def put_storagegroup(wallet: str, cid: str, objects: list, bearer_token: str = ""):
|
||||||
"""
|
"""
|
||||||
Wrapper for `neofs-cli storagegroup put`. Before the SG is created,
|
Wrapper for `neofs-cli storagegroup put`. Before the SG is created,
|
||||||
neofs-cli performs HEAD on `objects`, so this verb must be allowed
|
neofs-cli performs HEAD on `objects`, so this verb must be allowed
|
||||||
|
@ -40,7 +40,7 @@ def put_storagegroup(wallet: str, cid: str, objects: list, bearer_token: str="")
|
||||||
|
|
||||||
|
|
||||||
@keyword('List Storagegroup')
|
@keyword('List Storagegroup')
|
||||||
def list_storagegroup(wallet: str, cid: str, bearer_token: str=""):
|
def list_storagegroup(wallet: str, cid: str, bearer_token: str = ""):
|
||||||
"""
|
"""
|
||||||
Wrapper for `neofs-cli storagegroup list`. This operation
|
Wrapper for `neofs-cli storagegroup list`. This operation
|
||||||
requires SEARCH allowed for `wallet` in `cid`.
|
requires SEARCH allowed for `wallet` in `cid`.
|
||||||
|
@ -64,7 +64,7 @@ def list_storagegroup(wallet: str, cid: str, bearer_token: str=""):
|
||||||
|
|
||||||
|
|
||||||
@keyword('Get Storagegroup')
|
@keyword('Get Storagegroup')
|
||||||
def get_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str=''):
|
def get_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str = ''):
|
||||||
"""
|
"""
|
||||||
Wrapper for `neofs-cli storagegroup get`.
|
Wrapper for `neofs-cli storagegroup get`.
|
||||||
Args:
|
Args:
|
||||||
|
@ -103,7 +103,7 @@ def get_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str=''):
|
||||||
|
|
||||||
|
|
||||||
@keyword('Delete Storagegroup')
|
@keyword('Delete Storagegroup')
|
||||||
def delete_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str=""):
|
def delete_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str = ""):
|
||||||
"""
|
"""
|
||||||
Wrapper for `neofs-cli storagegroup delete`.
|
Wrapper for `neofs-cli storagegroup delete`.
|
||||||
Args:
|
Args:
|
||||||
|
|
|
@ -5,12 +5,12 @@
|
||||||
that storage policies are kept.
|
that storage policies are kept.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from common import NEOFS_NETMAP
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
|
|
||||||
import complex_object_actions
|
import complex_object_actions
|
||||||
import neofs_verbs
|
import neofs_verbs
|
||||||
|
from common import NEOFS_NETMAP
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
@ -54,8 +54,8 @@ def get_simple_object_copies(wallet: str, cid: str, oid: str):
|
||||||
for node in NEOFS_NETMAP:
|
for node in NEOFS_NETMAP:
|
||||||
try:
|
try:
|
||||||
response = neofs_verbs.head_object(wallet, cid, oid,
|
response = neofs_verbs.head_object(wallet, cid, oid,
|
||||||
endpoint=node,
|
endpoint=node,
|
||||||
is_direct=True)
|
is_direct=True)
|
||||||
if response:
|
if response:
|
||||||
copies += 1
|
copies += 1
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
@ -101,8 +101,8 @@ def get_nodes_with_object(wallet: str, cid: str, oid: str):
|
||||||
for node in NEOFS_NETMAP:
|
for node in NEOFS_NETMAP:
|
||||||
try:
|
try:
|
||||||
res = neofs_verbs.head_object(wallet, cid, oid,
|
res = neofs_verbs.head_object(wallet, cid, oid,
|
||||||
endpoint=node,
|
endpoint=node,
|
||||||
is_direct=True)
|
is_direct=True)
|
||||||
if res is not None:
|
if res is not None:
|
||||||
nodes_list.append(node)
|
nodes_list.append(node)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
@ -127,8 +127,8 @@ def get_nodes_without_object(wallet: str, cid: str, oid: str):
|
||||||
nodes_list = []
|
nodes_list = []
|
||||||
for node in NEOFS_NETMAP:
|
for node in NEOFS_NETMAP:
|
||||||
res = neofs_verbs.head_object(wallet, cid, oid,
|
res = neofs_verbs.head_object(wallet, cid, oid,
|
||||||
endpoint=node,
|
endpoint=node,
|
||||||
is_direct=True)
|
is_direct=True)
|
||||||
if res is None:
|
if res is None:
|
||||||
nodes_list.append(node)
|
nodes_list.append(node)
|
||||||
return nodes_list
|
return nodes_list
|
||||||
|
|
|
@ -1,20 +1,21 @@
|
||||||
#!/usr/bin/python3.8
|
#!/usr/bin/python3.8
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import tarfile
|
import tarfile
|
||||||
import uuid
|
import uuid
|
||||||
import hashlib
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from common import SIMPLE_OBJ_SIZE, ASSETS_DIR
|
import docker
|
||||||
from cli_helpers import _cmd_run
|
|
||||||
from robot.api.deco import keyword
|
|
||||||
from robot.api import logger
|
from robot.api import logger
|
||||||
|
from robot.api.deco import keyword
|
||||||
from robot.libraries.BuiltIn import BuiltIn
|
from robot.libraries.BuiltIn import BuiltIn
|
||||||
|
|
||||||
|
from cli_helpers import _cmd_run
|
||||||
|
from common import SIMPLE_OBJ_SIZE, ASSETS_DIR
|
||||||
|
|
||||||
ROBOT_AUTO_KEYWORDS = False
|
ROBOT_AUTO_KEYWORDS = False
|
||||||
|
|
||||||
|
|
||||||
@keyword('Generate file')
|
@keyword('Generate file')
|
||||||
def generate_file_and_file_hash(size: int) -> str:
|
def generate_file_and_file_hash(size: int) -> str:
|
||||||
"""
|
"""
|
||||||
|
@ -60,15 +61,16 @@ def get_container_logs(testcase_name: str) -> None:
|
||||||
container_name = container['Names'][0][1:]
|
container_name = container['Names'][0][1:]
|
||||||
if client.inspect_container(container_name)['Config']['Domainname'] == "neofs.devenv":
|
if client.inspect_container(container_name)['Config']['Domainname'] == "neofs.devenv":
|
||||||
file_name = f"{logs_dir}/docker_log_{container_name}"
|
file_name = f"{logs_dir}/docker_log_{container_name}"
|
||||||
with open(file_name,'wb') as out:
|
with open(file_name, 'wb') as out:
|
||||||
out.write(client.logs(container_name))
|
out.write(client.logs(container_name))
|
||||||
logger.info(f"Collected logs from container {container_name}")
|
logger.info(f"Collected logs from container {container_name}")
|
||||||
tar.add(file_name)
|
tar.add(file_name)
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
tar.close()
|
tar.close()
|
||||||
|
|
||||||
|
|
||||||
@keyword('Make Up')
|
@keyword('Make Up')
|
||||||
def make_up(services: list=[], config_dict: dict={}):
|
def make_up(services: list = [], config_dict: dict = {}):
|
||||||
test_path = os.getcwd()
|
test_path = os.getcwd()
|
||||||
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
|
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
|
||||||
os.chdir(dev_path)
|
os.chdir(dev_path)
|
||||||
|
@ -87,8 +89,9 @@ def make_up(services: list=[], config_dict: dict={}):
|
||||||
|
|
||||||
os.chdir(test_path)
|
os.chdir(test_path)
|
||||||
|
|
||||||
|
|
||||||
@keyword('Make Down')
|
@keyword('Make Down')
|
||||||
def make_down(services: list=[]):
|
def make_down(services: list = []):
|
||||||
test_path = os.getcwd()
|
test_path = os.getcwd()
|
||||||
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
|
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
|
||||||
os.chdir(dev_path)
|
os.chdir(dev_path)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
ROOT='../..'
|
ROOT = '../..'
|
||||||
CERT="%s/../../ca" % ROOT
|
CERT = "%s/../../ca" % ROOT
|
||||||
|
|
||||||
# Common NeoFS variables can be declared from neofs-dev-env env variables.
|
# Common NeoFS variables can be declared from neofs-dev-env env variables.
|
||||||
# High priority is accepted for those envs.
|
# High priority is accepted for those envs.
|
||||||
|
@ -9,7 +9,7 @@ CERT="%s/../../ca" % ROOT
|
||||||
CONTAINER_WAIT_INTERVAL = "1m"
|
CONTAINER_WAIT_INTERVAL = "1m"
|
||||||
|
|
||||||
NEOFS_EPOCH_TIMEOUT = (os.getenv("NEOFS_EPOCH_TIMEOUT") if os.getenv("NEOFS_EPOCH_TIMEOUT")
|
NEOFS_EPOCH_TIMEOUT = (os.getenv("NEOFS_EPOCH_TIMEOUT") if os.getenv("NEOFS_EPOCH_TIMEOUT")
|
||||||
else os.getenv("NEOFS_IR_TIMERS_EPOCH", "300s"))
|
else os.getenv("NEOFS_IR_TIMERS_EPOCH", "300s"))
|
||||||
|
|
||||||
SIMPLE_OBJ_SIZE = 1000
|
SIMPLE_OBJ_SIZE = 1000
|
||||||
COMPLEX_OBJ_SIZE = 2000
|
COMPLEX_OBJ_SIZE = 2000
|
||||||
|
@ -19,7 +19,7 @@ MAINNET_TIMEOUT = os.getenv('MAINNET_TIMEOUT', "1min")
|
||||||
MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", '1s')
|
MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", '1s')
|
||||||
NEOFS_CONTRACT_CACHE_TIMEOUT = os.getenv("NEOFS_CONTRACT_CACHE_TIMEOUT", "30s")
|
NEOFS_CONTRACT_CACHE_TIMEOUT = os.getenv("NEOFS_CONTRACT_CACHE_TIMEOUT", "30s")
|
||||||
|
|
||||||
#TODO: change to NEOFS_STORAGE_DEFAULT_GC_REMOVER_SLEEP_INTERVAL
|
# TODO: change to NEOFS_STORAGE_DEFAULT_GC_REMOVER_SLEEP_INTERVAL
|
||||||
|
|
||||||
SHARD_0_GC_SLEEP = os.getenv("NEOFS_STORAGE_SHARD_0_GC_REMOVER_SLEEP_INTERVAL", "1m")
|
SHARD_0_GC_SLEEP = os.getenv("NEOFS_STORAGE_SHARD_0_GC_REMOVER_SLEEP_INTERVAL", "1m")
|
||||||
|
|
||||||
|
@ -37,32 +37,37 @@ S3_GATE = os.getenv("S3_GATE", 'https://s3.neofs.devenv:8080')
|
||||||
GAS_HASH = '0xd2a4cff31913016155e38e474a2c06d08be276cf'
|
GAS_HASH = '0xd2a4cff31913016155e38e474a2c06d08be276cf'
|
||||||
|
|
||||||
NEOFS_CONTRACT = (os.getenv("NEOFS_CONTRACT") if os.getenv("NEOFS_CONTRACT")
|
NEOFS_CONTRACT = (os.getenv("NEOFS_CONTRACT") if os.getenv("NEOFS_CONTRACT")
|
||||||
else os.getenv("NEOFS_IR_CONTRACTS_NEOFS", '008b43d3de8741b896015f79ac0fbfa4055b4574'))
|
else os.getenv("NEOFS_IR_CONTRACTS_NEOFS", '008b43d3de8741b896015f79ac0fbfa4055b4574'))
|
||||||
|
|
||||||
COMMON_PLACEMENT_RULE = "REP 2 IN X CBF 1 SELECT 4 FROM * AS X"
|
COMMON_PLACEMENT_RULE = "REP 2 IN X CBF 1 SELECT 4 FROM * AS X"
|
||||||
|
|
||||||
ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
|
ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
|
||||||
|
|
||||||
MORPH_MAGIC = os.environ["MORPH_MAGIC"]
|
MORPH_MAGIC = os.getenv("MORPH_MAGIC")
|
||||||
GATE_PUB_KEY = '0313b1ac3a8076e155a7e797b24f0b650cccad5941ea59d7cfd51a024a8b2a06bf'
|
GATE_PUB_KEY = '0313b1ac3a8076e155a7e797b24f0b650cccad5941ea59d7cfd51a024a8b2a06bf'
|
||||||
|
|
||||||
NEOFS_NETMAP_DICT = {'s01': {'rpc': 's01.neofs.devenv:8080',
|
STORAGE_NODE_1 = os.getenv('DATA_NODE_1', 's01.neofs.devenv:8080')
|
||||||
'control': 's01.neofs.devenv:8081',
|
STORAGE_NODE_2 = os.getenv('DATA_NODE_2', 's02.neofs.devenv:8080')
|
||||||
'wif': 'Kwk6k2eC3L3QuPvD8aiaNyoSXgQ2YL1bwS5CP1oKoA9waeAze97s',
|
STORAGE_NODE_3 = os.getenv('DATA_NODE_3', 's03.neofs.devenv:8080')
|
||||||
'UN-LOCODE': 'RU MOW'},
|
STORAGE_NODE_4 = os.getenv('DATA_NODE_4', 's04.neofs.devenv:8080')
|
||||||
's02': {'rpc': 's02.neofs.devenv:8080',
|
|
||||||
'control': 's02.neofs.devenv:8081',
|
NEOFS_NETMAP_DICT = {'s01': {'rpc': STORAGE_NODE_1,
|
||||||
'wif': 'L1NdHdnrTNGQZH1fJSrdUZJyeYFHvaQSSHZHxhK3udiGFdr5YaZ6',
|
'control': 's01.neofs.devenv:8081',
|
||||||
'UN-LOCODE': 'RU LED'},
|
'wif': 'Kwk6k2eC3L3QuPvD8aiaNyoSXgQ2YL1bwS5CP1oKoA9waeAze97s',
|
||||||
's03': {'rpc': 's03.neofs.devenv:8080',
|
'UN-LOCODE': 'RU MOW'},
|
||||||
'control': 's03.neofs.devenv:8081',
|
's02': {'rpc': STORAGE_NODE_2,
|
||||||
'wif': 'KzN38k39af6ACWJjK8YrnARWo86ddcc1EuBWz7xFEdcELcP3ZTym',
|
'control': 's02.neofs.devenv:8081',
|
||||||
'UN-LOCODE': 'SE STO'},
|
'wif': 'L1NdHdnrTNGQZH1fJSrdUZJyeYFHvaQSSHZHxhK3udiGFdr5YaZ6',
|
||||||
's04': {'rpc': 's04.neofs.devenv:8080',
|
'UN-LOCODE': 'RU LED'},
|
||||||
'control': 's04.neofs.devenv:8081',
|
's03': {'rpc': STORAGE_NODE_3,
|
||||||
'wif': 'Kzk1Z3dowAqfNyjqeYKWenZMduFV3NAKgXg9K1sA4jRKYxEc8HEW',
|
'control': 's03.neofs.devenv:8081',
|
||||||
'UN-LOCODE': 'FI HEL'}
|
'wif': 'KzN38k39af6ACWJjK8YrnARWo86ddcc1EuBWz7xFEdcELcP3ZTym',
|
||||||
}
|
'UN-LOCODE': 'SE STO'},
|
||||||
|
's04': {'rpc': STORAGE_NODE_4,
|
||||||
|
'control': 's04.neofs.devenv:8081',
|
||||||
|
'wif': 'Kzk1Z3dowAqfNyjqeYKWenZMduFV3NAKgXg9K1sA4jRKYxEc8HEW',
|
||||||
|
'UN-LOCODE': 'FI HEL'}
|
||||||
|
}
|
||||||
NEOFS_NETMAP = [i['rpc'] for i in NEOFS_NETMAP_DICT.values()]
|
NEOFS_NETMAP = [i['rpc'] for i in NEOFS_NETMAP_DICT.values()]
|
||||||
NEOGO_EXECUTABLE = os.getenv('NEOGO_EXECUTABLE', 'neo-go')
|
NEOGO_EXECUTABLE = os.getenv('NEOGO_EXECUTABLE', 'neo-go')
|
||||||
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
# ACLs with set F flag
|
# ACLs with set F flag
|
||||||
PUBLIC_ACL_F = "0x1FBFBFFF"
|
PUBLIC_ACL_F = "0x1FBFBFFF"
|
||||||
PRIVATE_ACL_F = "0x1C8C8CCC"
|
PRIVATE_ACL_F = "0x1C8C8CCC"
|
||||||
READONLY_ACL_F = "0x1FBF8CFF"
|
READONLY_ACL_F = "0x1FBF8CFF"
|
||||||
|
|
||||||
# ACLs without F flag set
|
# ACLs without F flag set
|
||||||
PUBLIC_ACL = "0x0FBFBFFF"
|
PUBLIC_ACL = "0x0FBFBFFF"
|
||||||
INACCESSIBLE_ACL = "0x40000000"
|
INACCESSIBLE_ACL = "0x40000000"
|
||||||
STICKYBIT_PUB_ACL = "0x3FFFFFFF"
|
STICKYBIT_PUB_ACL = "0x3FFFFFFF"
|
||||||
|
|
Loading…
Reference in a new issue