forked from TrueCloudLab/frostfs-testcases
Some linter fixes.
Signed-off-by: a.y.volkov <a.y.volkov@yadro.com>
This commit is contained in:
parent
e086d0d62b
commit
0e27ea02c1
19 changed files with 291 additions and 275 deletions
|
@ -1,34 +1,34 @@
|
|||
#!/usr/bin/python3.8
|
||||
|
||||
from enum import Enum, auto
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import uuid
|
||||
from enum import Enum, auto
|
||||
|
||||
import base64
|
||||
import base58
|
||||
from cli_helpers import _cmd_run
|
||||
from common import ASSETS_DIR, NEOFS_ENDPOINT, WALLET_PASS
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
|
||||
from robot.api.deco import keyword
|
||||
|
||||
"""
|
||||
Robot Keywords and helper functions for work with NeoFS ACL.
|
||||
"""
|
||||
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
# path to neofs-cli executable
|
||||
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
||||
EACL_LIFETIME = 100500
|
||||
|
||||
|
||||
class AutoName(Enum):
|
||||
def _generate_next_value_(name, start, count, last_values):
|
||||
return name
|
||||
|
||||
|
||||
class Role(AutoName):
|
||||
USER = auto()
|
||||
SYSTEM = auto()
|
||||
|
@ -65,12 +65,13 @@ def _encode_cid_for_eacl(cid: str) -> str:
|
|||
cid_base58 = base58.b58decode(cid)
|
||||
return base64.b64encode(cid_base58).decode("utf-8")
|
||||
|
||||
|
||||
@keyword('Create eACL')
|
||||
def create_eacl(cid: str, rules_list: list):
|
||||
table = f"{os.getcwd()}/{ASSETS_DIR}/eacl_table_{str(uuid.uuid4())}.json"
|
||||
rules = ""
|
||||
for rule in rules_list:
|
||||
# TODO: check if $Object: is still necessary for filtering in the newest releases
|
||||
# TODO: check if $Object: is still necessary for filtering in the newest releases
|
||||
rules += f"--rule '{rule}' "
|
||||
cmd = (
|
||||
f"{NEOFS_CLI_EXEC} acl extended create --cid {cid} "
|
||||
|
@ -99,47 +100,47 @@ def form_bearertoken_file(wif: str, cid: str, eacl_records: list) -> str:
|
|||
json_eacl = json.loads(eacl)
|
||||
logger.info(json_eacl)
|
||||
eacl_result = {
|
||||
"body":
|
||||
"body":
|
||||
{
|
||||
"eaclTable":
|
||||
{
|
||||
"eaclTable":
|
||||
{
|
||||
"containerID":
|
||||
"containerID":
|
||||
{
|
||||
"value": enc_cid
|
||||
},
|
||||
"records": []
|
||||
},
|
||||
"lifetime":
|
||||
{
|
||||
"exp": EACL_LIFETIME,
|
||||
"nbf": "1",
|
||||
"iat": "0"
|
||||
}
|
||||
"records": []
|
||||
},
|
||||
"lifetime":
|
||||
{
|
||||
"exp": EACL_LIFETIME,
|
||||
"nbf": "1",
|
||||
"iat": "0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if not eacl_records:
|
||||
raise(f"Got empty eacl_records list: {eacl_records}")
|
||||
raise (f"Got empty eacl_records list: {eacl_records}")
|
||||
for record in eacl_records:
|
||||
op_data = {
|
||||
"operation": record['Operation'],
|
||||
"action": record['Access'],
|
||||
"filters": [],
|
||||
"targets": []
|
||||
}
|
||||
"operation": record['Operation'],
|
||||
"action": record['Access'],
|
||||
"filters": [],
|
||||
"targets": []
|
||||
}
|
||||
|
||||
if Role(record['Role']):
|
||||
op_data['targets'] = [
|
||||
{
|
||||
"role": record['Role']
|
||||
}
|
||||
]
|
||||
{
|
||||
"role": record['Role']
|
||||
}
|
||||
]
|
||||
else:
|
||||
op_data['targets'] = [
|
||||
{
|
||||
"keys": [ record['Role'] ]
|
||||
}
|
||||
]
|
||||
{
|
||||
"keys": [record['Role']]
|
||||
}
|
||||
]
|
||||
|
||||
if 'Filters' in record.keys():
|
||||
op_data["filters"].append(record['Filters'])
|
||||
|
|
|
@ -6,8 +6,8 @@ and other CLIs.
|
|||
"""
|
||||
|
||||
import subprocess
|
||||
import pexpect
|
||||
|
||||
import pexpect
|
||||
from robot.api import logger
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
@ -21,20 +21,21 @@ def _cmd_run(cmd, timeout=30):
|
|||
try:
|
||||
logger.info(f"Executing command: {cmd}")
|
||||
compl_proc = subprocess.run(cmd, check=True, universal_newlines=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout,
|
||||
shell=True)
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout,
|
||||
shell=True)
|
||||
output = compl_proc.stdout
|
||||
logger.info(f"Output: {output}")
|
||||
return output
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise RuntimeError(f"Error:\nreturn code: {exc.returncode} "
|
||||
f"\nOutput: {exc.output}") from exc
|
||||
f"\nOutput: {exc.output}") from exc
|
||||
except Exception as exc:
|
||||
return_code, _ = subprocess.getstatusoutput(cmd)
|
||||
logger.info(f"Error:\nreturn code: {return_code}\nOutput: "
|
||||
f"{exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}")
|
||||
f"{exc.output.decode('utf-8') if type(exc.output) is bytes else exc.output}")
|
||||
raise
|
||||
|
||||
|
||||
def _run_with_passwd(cmd):
|
||||
child = pexpect.spawn(cmd)
|
||||
child.expect(".*")
|
||||
|
|
|
@ -6,6 +6,7 @@ from robot.api.deco import keyword
|
|||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
@keyword('Run Process And Enter Empty Password')
|
||||
def run_proccess_and_interact(cmd: str) -> str:
|
||||
p = pexpect.spawn(cmd)
|
||||
|
|
|
@ -10,18 +10,18 @@
|
|||
first non-null response.
|
||||
"""
|
||||
|
||||
from common import NEOFS_NETMAP
|
||||
import neofs_verbs
|
||||
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
from robot.libraries.BuiltIn import BuiltIn
|
||||
|
||||
import neofs_verbs
|
||||
from common import NEOFS_NETMAP
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
@keyword('Get Link Object')
|
||||
def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str=""):
|
||||
def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str = ""):
|
||||
"""
|
||||
Args:
|
||||
wallet (str): path to the wallet on whose behalf the Storage Nodes
|
||||
|
@ -37,10 +37,10 @@ def get_link_object(wallet: str, cid: str, oid: str, bearer_token: str=""):
|
|||
for node in NEOFS_NETMAP:
|
||||
try:
|
||||
resp = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_raw=True,
|
||||
is_direct=True,
|
||||
bearer_token=bearer_token)
|
||||
endpoint=node,
|
||||
is_raw=True,
|
||||
is_direct=True,
|
||||
bearer_token=bearer_token)
|
||||
if resp['link']:
|
||||
return resp['link']
|
||||
except Exception:
|
||||
|
@ -65,9 +65,9 @@ def get_last_object(wallet: str, cid: str, oid: str):
|
|||
for node in NEOFS_NETMAP:
|
||||
try:
|
||||
resp = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_raw=True,
|
||||
is_direct=True)
|
||||
endpoint=node,
|
||||
is_raw=True,
|
||||
is_direct=True)
|
||||
if resp['lastPart']:
|
||||
return resp['lastPart']
|
||||
except Exception:
|
||||
|
|
|
@ -8,22 +8,21 @@
|
|||
import json
|
||||
import time
|
||||
|
||||
from common import NEOFS_ENDPOINT, COMMON_PLACEMENT_RULE, NEOFS_CLI_EXEC, WALLET_PASS
|
||||
from cli_helpers import _cmd_run
|
||||
from data_formatters import dict_to_attrs
|
||||
import json_transformers
|
||||
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
import json_transformers
|
||||
from cli_helpers import _cmd_run
|
||||
from common import NEOFS_ENDPOINT, COMMON_PLACEMENT_RULE, NEOFS_CLI_EXEC, WALLET_PASS
|
||||
from data_formatters import dict_to_attrs
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
@keyword('Create Container')
|
||||
def create_container(wallet: str, rule: str=COMMON_PLACEMENT_RULE, basic_acl: str='',
|
||||
attributes: dict={}, session_token: str='', session_wallet: str='',
|
||||
options: str=''):
|
||||
def create_container(wallet: str, rule: str = COMMON_PLACEMENT_RULE, basic_acl: str = '',
|
||||
attributes: dict = {}, session_token: str = '', session_wallet: str = '',
|
||||
options: str = ''):
|
||||
"""
|
||||
A wrapper for `neofs-cli container create` call.
|
||||
|
||||
|
@ -58,18 +57,18 @@ def create_container(wallet: str, rule: str=COMMON_PLACEMENT_RULE, basic_acl: st
|
|||
|
||||
logger.info("Container created; waiting until it is persisted in sidechain")
|
||||
|
||||
deadline_to_persist = 15 # seconds
|
||||
deadline_to_persist = 15 # seconds
|
||||
for i in range(0, deadline_to_persist):
|
||||
time.sleep(1)
|
||||
containers = list_containers(wallet)
|
||||
if cid in containers:
|
||||
break
|
||||
logger.info(f"There is no {cid} in {containers} yet; continue")
|
||||
if i+1 == deadline_to_persist:
|
||||
if i + 1 == deadline_to_persist:
|
||||
raise RuntimeError(
|
||||
f"After {deadline_to_persist} seconds the container "
|
||||
f"{cid} hasn't been persisted; exiting"
|
||||
)
|
||||
f"After {deadline_to_persist} seconds the container "
|
||||
f"{cid} hasn't been persisted; exiting"
|
||||
)
|
||||
return cid
|
||||
|
||||
|
||||
|
@ -113,7 +112,7 @@ def get_container(wallet: str, cid: str):
|
|||
attributes[attr['key']] = attr['value']
|
||||
container_info['attributes'] = attributes
|
||||
container_info['ownerID'] = json_transformers.json_reencode(
|
||||
container_info['ownerID']['value'])
|
||||
container_info['ownerID']['value'])
|
||||
return container_info
|
||||
|
||||
|
||||
|
|
|
@ -7,15 +7,15 @@ from functools import reduce
|
|||
|
||||
|
||||
def dict_to_attrs(attrs: dict):
|
||||
'''
|
||||
"""
|
||||
This function takes dictionary of object attributes and converts them
|
||||
into the string. The string is passed to `--attibutes` key of the
|
||||
into the string. The string is passed to `--attributes` key of the
|
||||
neofs-cli.
|
||||
|
||||
Args:
|
||||
attrs (dict): object attirbutes in {"a": "b", "c": "d"} format.
|
||||
attrs (dict): object attributes in {"a": "b", "c": "d"} format.
|
||||
|
||||
Returns:
|
||||
(str): string in "a=b,c=d" format.
|
||||
'''
|
||||
return reduce(lambda a,b: f"{a},{b}", map(lambda i: f"{i}={attrs[i]}", attrs))
|
||||
"""
|
||||
return reduce(lambda a, b: f"{a},{b}", map(lambda i: f"{i}={attrs[i]}", attrs))
|
||||
|
|
|
@ -3,15 +3,16 @@
|
|||
import shutil
|
||||
|
||||
import requests
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
from robot.libraries.BuiltIn import BuiltIn
|
||||
|
||||
from common import HTTP_GATE
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.libraries.BuiltIn import BuiltIn
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
|
||||
|
||||
|
||||
@keyword('Get via HTTP Gate')
|
||||
def get_via_http_gate(cid: str, oid: str):
|
||||
"""
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
'''
|
||||
"""
|
||||
When doing requests to NeoFS, we get JSON output as an automatically decoded
|
||||
structure from protobuf. Some fields are decoded with boilerplates and binary
|
||||
values are Base64-encoded.
|
||||
|
||||
This module contains functions which rearrange the structure and reencode binary
|
||||
data from Base64 to Base58.
|
||||
'''
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
|
||||
import base58
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
def decode_simple_header(data: dict):
|
||||
'''
|
||||
"""
|
||||
This function reencodes Simple Object header and its attributes.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
data = decode_common_fields(data)
|
||||
|
||||
|
@ -34,32 +35,33 @@ def decode_simple_header(data: dict):
|
|||
|
||||
|
||||
def decode_split_header(data: dict):
|
||||
'''
|
||||
"""
|
||||
This function rearranges Complex Object header.
|
||||
The header holds SplitID, a random unique
|
||||
number, which is common among all splitted objects, and IDs of the Linking
|
||||
Object and the last splitted Object.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
data["splitId"] = json_reencode(data["splitId"])
|
||||
data["lastPart"] = (
|
||||
json_reencode(data["lastPart"]["value"])
|
||||
if data["lastPart"] else None
|
||||
)
|
||||
json_reencode(data["lastPart"]["value"])
|
||||
if data["lastPart"] else None
|
||||
)
|
||||
data["link"] = (
|
||||
json_reencode(data["link"]["value"])
|
||||
if data["link"] else None
|
||||
)
|
||||
json_reencode(data["link"]["value"])
|
||||
if data["link"] else None
|
||||
)
|
||||
except Exception as exc:
|
||||
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def decode_linking_object(data: dict):
|
||||
'''
|
||||
"""
|
||||
This function reencodes Linking Object header.
|
||||
It contains IDs of child Objects and Split Chain data.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
data = decode_simple_header(data)
|
||||
# reencoding Child Object IDs
|
||||
|
@ -68,13 +70,13 @@ def decode_linking_object(data: dict):
|
|||
data['header']['split']['children'][ind] = json_reencode(val['value'])
|
||||
data['header']['split']['splitID'] = json_reencode(data['header']['split']['splitID'])
|
||||
data['header']['split']['previous'] = (
|
||||
json_reencode(data['header']['split']['previous']['value'])
|
||||
if data['header']['split']['previous'] else None
|
||||
)
|
||||
json_reencode(data['header']['split']['previous']['value'])
|
||||
if data['header']['split']['previous'] else None
|
||||
)
|
||||
data['header']['split']['parent'] = (
|
||||
json_reencode(data['header']['split']['parent']['value'])
|
||||
if data['header']['split']['parent'] else None
|
||||
)
|
||||
json_reencode(data['header']['split']['parent']['value'])
|
||||
if data['header']['split']['parent'] else None
|
||||
)
|
||||
except Exception as exc:
|
||||
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
||||
|
||||
|
@ -82,9 +84,9 @@ def decode_linking_object(data: dict):
|
|||
|
||||
|
||||
def decode_storage_group(data: dict):
|
||||
'''
|
||||
"""
|
||||
This function reencodes Storage Group header.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
data = decode_common_fields(data)
|
||||
except Exception as exc:
|
||||
|
@ -92,53 +94,56 @@ def decode_storage_group(data: dict):
|
|||
|
||||
return data
|
||||
|
||||
|
||||
def decode_tombstone(data: dict):
|
||||
'''
|
||||
"""
|
||||
This function reencodes Tombstone header.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
data = decode_simple_header(data)
|
||||
data['header']['sessionToken'] = decode_session_token(
|
||||
data['header']['sessionToken'])
|
||||
data['header']['sessionToken'])
|
||||
except Exception as exc:
|
||||
raise ValueError(f"failed to decode JSON output: {exc}") from exc
|
||||
return data
|
||||
|
||||
|
||||
def decode_session_token(data: dict):
|
||||
'''
|
||||
"""
|
||||
This function reencodes a fragment of header which contains
|
||||
information about session token.
|
||||
'''
|
||||
"""
|
||||
data['body']['object']['address']['containerID'] = json_reencode(
|
||||
data['body']['object']['address']['containerID']['value'])
|
||||
data['body']['object']['address']['containerID']['value'])
|
||||
data['body']['object']['address']['objectID'] = json_reencode(
|
||||
data['body']['object']['address']['objectID']['value'])
|
||||
data['body']['object']['address']['objectID']['value'])
|
||||
return data
|
||||
|
||||
|
||||
def json_reencode(data: str):
|
||||
'''
|
||||
"""
|
||||
According to JSON protocol, binary data (Object/Container/Storage Group IDs, etc)
|
||||
is converted to string via Base58 encoder. But we usually operate with Base64-encoded
|
||||
format.
|
||||
This function reencodes given Base58 string into the Base64 one.
|
||||
'''
|
||||
"""
|
||||
return base58.b58encode(base64.b64decode(data)).decode("utf-8")
|
||||
|
||||
|
||||
def encode_for_json(data: str):
|
||||
'''
|
||||
"""
|
||||
This function encodes binary data for sending them as protobuf
|
||||
structures.
|
||||
'''
|
||||
"""
|
||||
return base64.b64encode(base58.b58decode(data)).decode('utf-8')
|
||||
|
||||
|
||||
def decode_common_fields(data: dict):
|
||||
'''
|
||||
"""
|
||||
Despite of type (simple/complex Object, Storage Group, etc) every Object
|
||||
header contains several common fields.
|
||||
This function rearranges these fields.
|
||||
'''
|
||||
"""
|
||||
# reencoding binary IDs
|
||||
data["objectID"] = json_reencode(data["objectID"]["value"])
|
||||
data["header"]["containerID"] = json_reencode(data["header"]["containerID"]["value"])
|
||||
|
@ -146,6 +151,6 @@ def decode_common_fields(data: dict):
|
|||
data["header"]["homomorphicHash"] = json_reencode(data["header"]["homomorphicHash"]["sum"])
|
||||
data["header"]["payloadHash"] = json_reencode(data["header"]["payloadHash"]["sum"])
|
||||
data["header"]["version"] = (
|
||||
f"{data['header']['version']['major']}{data['header']['version']['minor']}"
|
||||
)
|
||||
f"{data['header']['version']['major']}{data['header']['version']['minor']}"
|
||||
)
|
||||
return data
|
||||
|
|
|
@ -5,12 +5,13 @@ import os
|
|||
import random
|
||||
|
||||
from neo3 import wallet
|
||||
from common import NEOFS_NETMAP_DICT
|
||||
import neofs_verbs
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
from robot.libraries.BuiltIn import BuiltIn
|
||||
|
||||
import neofs_verbs
|
||||
from common import NEOFS_NETMAP_DICT
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
# path to neofs-cli executable
|
||||
|
@ -30,7 +31,7 @@ def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str):
|
|||
header = header['header']
|
||||
|
||||
BuiltIn().should_be_equal(header["containerID"], cid,
|
||||
msg="Tombstone Header CID is wrong")
|
||||
msg="Tombstone Header CID is wrong")
|
||||
|
||||
wlt_data = dict()
|
||||
with open(wallet_path, 'r') as fout:
|
||||
|
@ -39,21 +40,21 @@ def verify_head_tombstone(wallet_path: str, cid: str, oid_ts: str, oid: str):
|
|||
addr = wlt.accounts[0].address
|
||||
|
||||
BuiltIn().should_be_equal(header["ownerID"], addr,
|
||||
msg="Tombstone Owner ID is wrong")
|
||||
msg="Tombstone Owner ID is wrong")
|
||||
|
||||
BuiltIn().should_be_equal(header["objectType"], 'TOMBSTONE',
|
||||
msg="Header Type isn't Tombstone")
|
||||
msg="Header Type isn't Tombstone")
|
||||
|
||||
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["verb"], 'DELETE',
|
||||
msg="Header Session Type isn't DELETE")
|
||||
msg="Header Session Type isn't DELETE")
|
||||
|
||||
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["address"]["containerID"],
|
||||
cid,
|
||||
msg="Header Session ID is wrong")
|
||||
cid,
|
||||
msg="Header Session ID is wrong")
|
||||
|
||||
BuiltIn().should_be_equal(header["sessionToken"]["body"]["object"]["address"]["objectID"],
|
||||
oid,
|
||||
msg="Header Session OID is wrong")
|
||||
oid,
|
||||
msg="Header Session OID is wrong")
|
||||
|
||||
|
||||
@keyword('Get control endpoint with wif')
|
||||
|
|
|
@ -6,17 +6,17 @@
|
|||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import random
|
||||
import re
|
||||
import uuid
|
||||
|
||||
from common import NEOFS_ENDPOINT, ASSETS_DIR, NEOFS_NETMAP, WALLET_PASS
|
||||
from cli_helpers import _cmd_run
|
||||
import json_transformers
|
||||
from data_formatters import dict_to_attrs
|
||||
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
import json_transformers
|
||||
from cli_helpers import _cmd_run
|
||||
from common import NEOFS_ENDPOINT, ASSETS_DIR, NEOFS_NETMAP, WALLET_PASS
|
||||
from data_formatters import dict_to_attrs
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
@ -25,9 +25,9 @@ NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
|||
|
||||
|
||||
@keyword('Get object')
|
||||
def get_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
||||
write_object: str="", endpoint: str="", options: str="" ):
|
||||
'''
|
||||
def get_object(wallet: str, cid: str, oid: str, bearer_token: str = "",
|
||||
write_object: str = "", endpoint: str = "", options: str = ""):
|
||||
"""
|
||||
GET from NeoFS.
|
||||
|
||||
Args:
|
||||
|
@ -40,7 +40,7 @@ def get_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
|||
options (optional, str): any options which `neofs-cli object get` accepts
|
||||
Returns:
|
||||
(str): path to downloaded file
|
||||
'''
|
||||
"""
|
||||
|
||||
if not write_object:
|
||||
write_object = str(uuid.uuid4())
|
||||
|
@ -62,8 +62,8 @@ def get_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
|||
# TODO: make `bearer_token` optional
|
||||
@keyword('Get Range Hash')
|
||||
def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut: str,
|
||||
options: str=""):
|
||||
'''
|
||||
options: str = ""):
|
||||
"""
|
||||
GETRANGEHASH of given Object.
|
||||
|
||||
Args:
|
||||
|
@ -76,7 +76,7 @@ def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut
|
|||
options (optional, str): any options which `neofs-cli object hash` accepts
|
||||
Returns:
|
||||
None
|
||||
'''
|
||||
"""
|
||||
cmd = (
|
||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
||||
f'object hash --cid {cid} --oid {oid} --range {range_cut} --config {WALLET_PASS} '
|
||||
|
@ -89,9 +89,9 @@ def get_range_hash(wallet: str, cid: str, oid: str, bearer_token: str, range_cut
|
|||
|
||||
|
||||
@keyword('Put object')
|
||||
def put_object(wallet: str, path: str, cid: str, bearer: str="", user_headers: dict={},
|
||||
endpoint: str="", options: str="" ):
|
||||
'''
|
||||
def put_object(wallet: str, path: str, cid: str, bearer: str = "", user_headers: dict = {},
|
||||
endpoint: str = "", options: str = ""):
|
||||
"""
|
||||
PUT of given file.
|
||||
|
||||
Args:
|
||||
|
@ -104,7 +104,7 @@ def put_object(wallet: str, path: str, cid: str, bearer: str="", user_headers: d
|
|||
options (optional, str): any options which `neofs-cli object put` accepts
|
||||
Returns:
|
||||
(str): ID of uploaded Object
|
||||
'''
|
||||
"""
|
||||
if not endpoint:
|
||||
endpoint = random.sample(NEOFS_NETMAP, 1)[0]
|
||||
cmd = (
|
||||
|
@ -121,8 +121,8 @@ def put_object(wallet: str, path: str, cid: str, bearer: str="", user_headers: d
|
|||
|
||||
|
||||
@keyword('Delete object')
|
||||
def delete_object(wallet: str, cid: str, oid: str, bearer: str="", options: str=""):
|
||||
'''
|
||||
def delete_object(wallet: str, cid: str, oid: str, bearer: str = "", options: str = ""):
|
||||
"""
|
||||
DELETE an Object.
|
||||
|
||||
Args:
|
||||
|
@ -133,7 +133,7 @@ def delete_object(wallet: str, cid: str, oid: str, bearer: str="", options: str=
|
|||
options (optional, str): any options which `neofs-cli object delete` accepts
|
||||
Returns:
|
||||
(str): Tombstone ID
|
||||
'''
|
||||
"""
|
||||
cmd = (
|
||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
||||
f'object delete --cid {cid} --oid {oid} {options} --config {WALLET_PASS} '
|
||||
|
@ -149,20 +149,21 @@ def delete_object(wallet: str, cid: str, oid: str, bearer: str="", options: str=
|
|||
# TODO: make `bearer` an optional parameter
|
||||
@keyword('Get Range')
|
||||
def get_range(wallet: str, cid: str, oid: str, file_path: str, bearer: str, range_cut: str,
|
||||
options:str=""):
|
||||
'''
|
||||
options: str = ""):
|
||||
"""
|
||||
GETRANGE an Object.
|
||||
|
||||
Args:
|
||||
wallet (str): wallet on whose behalf GETRANGE is done
|
||||
cid (str): ID of Container where we get the Object from
|
||||
oid (str): ID of Object we are going to request
|
||||
file_path (str): file path
|
||||
range_cut (str): range to take data from in the form offset:length
|
||||
bearer (optional, str): path to Bearer Token file, appends to `--bearer` key
|
||||
options (optional, str): any options which `neofs-cli object range` accepts
|
||||
Returns:
|
||||
(void)
|
||||
'''
|
||||
"""
|
||||
range_file = f"{ASSETS_DIR}/{uuid.uuid4()}"
|
||||
cmd = (
|
||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
||||
|
@ -178,9 +179,9 @@ def get_range(wallet: str, cid: str, oid: str, file_path: str, bearer: str, rang
|
|||
|
||||
|
||||
@keyword('Search object')
|
||||
def search_object(wallet: str, cid: str, keys: str="", bearer: str="", filters: dict={},
|
||||
expected_objects_list=[]):
|
||||
'''
|
||||
def search_object(wallet: str, cid: str, keys: str = "", bearer: str = "", filters: dict = {},
|
||||
expected_objects_list=[]):
|
||||
"""
|
||||
SEARCH an Object.
|
||||
|
||||
Args:
|
||||
|
@ -193,7 +194,7 @@ def search_object(wallet: str, cid: str, keys: str="", bearer: str="", filters:
|
|||
expected_objects_list (optional, list): a list of ObjectIDs to compare found Objects with
|
||||
Returns:
|
||||
(list): list of found ObjectIDs
|
||||
'''
|
||||
"""
|
||||
filters_result = ""
|
||||
if filters:
|
||||
filters_result += "--filters "
|
||||
|
@ -215,16 +216,16 @@ def search_object(wallet: str, cid: str, keys: str="", bearer: str="", filters:
|
|||
f"is equal for expected list '{expected_objects_list}'")
|
||||
else:
|
||||
logger.warn(f"Found object list {found_objects} ",
|
||||
f"is not equal to expected list '{expected_objects_list}'")
|
||||
f"is not equal to expected list '{expected_objects_list}'")
|
||||
|
||||
return found_objects
|
||||
|
||||
|
||||
@keyword('Head object')
|
||||
def head_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
||||
options:str="", endpoint: str="", json_output: bool = True,
|
||||
is_raw: bool = False, is_direct: bool = False):
|
||||
'''
|
||||
def head_object(wallet: str, cid: str, oid: str, bearer_token: str = "",
|
||||
options: str = "", endpoint: str = "", json_output: bool = True,
|
||||
is_raw: bool = False, is_direct: bool = False):
|
||||
"""
|
||||
HEAD an Object.
|
||||
|
||||
Args:
|
||||
|
@ -245,7 +246,7 @@ def head_object(wallet: str, cid: str, oid: str, bearer_token: str="",
|
|||
(dict): HEAD response in JSON format
|
||||
or
|
||||
(str): HEAD response as a plain text
|
||||
'''
|
||||
"""
|
||||
cmd = (
|
||||
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint if endpoint else NEOFS_ENDPOINT} '
|
||||
f'--wallet {wallet} --config {WALLET_PASS} '
|
||||
|
|
|
@ -8,11 +8,11 @@
|
|||
import random
|
||||
|
||||
import docker
|
||||
|
||||
from robot.api.deco import keyword
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
@keyword('Stop Nodes')
|
||||
def stop_nodes(number: int, nodes: list):
|
||||
"""
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import pexpect
|
||||
import re
|
||||
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
import pexpect
|
||||
from neo3 import wallet
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
from common import *
|
||||
import rpc_client
|
||||
import contract
|
||||
import rpc_client
|
||||
from common import *
|
||||
from wrappers import run_sh_with_passwd_contract
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
@ -65,20 +64,20 @@ def get_balance(wif: str):
|
|||
|
||||
acc = wallet.Account.from_wif(wif, '')
|
||||
payload = [
|
||||
{
|
||||
'type': 'Hash160',
|
||||
'value': str(acc.script_hash)
|
||||
}
|
||||
]
|
||||
{
|
||||
'type': 'Hash160',
|
||||
'value': str(acc.script_hash)
|
||||
}
|
||||
]
|
||||
try:
|
||||
resp = morph_rpc_cli.invoke_function(
|
||||
contract.get_balance_contract_hash(NEOFS_NEO_API_ENDPOINT),
|
||||
'balanceOf',
|
||||
payload
|
||||
)
|
||||
contract.get_balance_contract_hash(NEOFS_NEO_API_ENDPOINT),
|
||||
'balanceOf',
|
||||
payload
|
||||
)
|
||||
logger.info(resp)
|
||||
value = int(resp['stack'][0]['value'])
|
||||
return value/(10**MORPH_TOKEN_POWER)
|
||||
return value / (10 ** MORPH_TOKEN_POWER)
|
||||
except Exception as e:
|
||||
logger.error(f"failed to get {wif} balance: {e}")
|
||||
raise e
|
||||
|
|
|
@ -6,12 +6,12 @@ import uuid
|
|||
|
||||
import boto3
|
||||
import botocore
|
||||
from cli_helpers import _run_with_passwd
|
||||
|
||||
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
|
||||
import urllib3
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
from cli_helpers import _run_with_passwd
|
||||
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
|
||||
|
||||
##########################################################
|
||||
# Disabling warnings on self-signed certificate which the
|
||||
|
@ -24,6 +24,7 @@ CREDENTIALS_CREATE_TIMEOUT = '30s'
|
|||
|
||||
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
|
||||
|
||||
|
||||
@keyword('Init S3 Credentials')
|
||||
def init_s3_credentials(wallet):
|
||||
bucket = str(uuid.uuid4())
|
||||
|
@ -71,7 +72,7 @@ def config_s3_client(access_key_id, secret_access_key):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('List objects S3 v2')
|
||||
|
@ -87,7 +88,7 @@ def list_objects_s3_v2(s3_client, bucket):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('List objects S3')
|
||||
|
@ -103,7 +104,7 @@ def list_objects_s3(s3_client, bucket):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Create bucket S3')
|
||||
|
@ -117,7 +118,7 @@ def create_bucket_s3(s3_client):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('List buckets S3')
|
||||
|
@ -134,7 +135,7 @@ def list_buckets_s3(s3_client):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Delete bucket S3')
|
||||
|
@ -147,7 +148,7 @@ def delete_bucket_s3(s3_client, bucket):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Head bucket S3')
|
||||
|
@ -159,7 +160,7 @@ def head_bucket(s3_client, bucket):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Put object S3')
|
||||
|
@ -174,12 +175,11 @@ def put_object_s3(s3_client, bucket, filepath):
|
|||
logger.info(f"S3 Put object result: {response}")
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Head object S3')
|
||||
def head_object_s3(s3_client, bucket, object_key):
|
||||
|
||||
try:
|
||||
response = s3_client.head_object(Bucket=bucket, Key=object_key)
|
||||
logger.info(f"S3 Head object result: {response}")
|
||||
|
@ -187,7 +187,7 @@ def head_object_s3(s3_client, bucket, object_key):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Delete object S3')
|
||||
|
@ -199,7 +199,7 @@ def delete_object_s3(s3_client, bucket, object_key):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Copy object S3')
|
||||
|
@ -207,14 +207,14 @@ def copy_object_s3(s3_client, bucket, object_key):
|
|||
filename = f"{os.getcwd()}/{uuid.uuid4()}"
|
||||
try:
|
||||
response = s3_client.copy_object(Bucket=bucket,
|
||||
CopySource=f"{bucket}/{object_key}",
|
||||
Key=filename)
|
||||
CopySource=f"{bucket}/{object_key}",
|
||||
Key=filename)
|
||||
logger.info(f"S3 Copy object result: {response}")
|
||||
return filename
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Get object S3')
|
||||
|
@ -233,4 +233,4 @@ def get_object_s3(s3_client, bucket, object_key):
|
|||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
|
|
@ -10,12 +10,12 @@ import os
|
|||
import uuid
|
||||
|
||||
from neo3 import wallet
|
||||
from common import WALLET_PASS, ASSETS_DIR
|
||||
from cli_helpers import _cmd_run
|
||||
import json_transformers
|
||||
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
import json_transformers
|
||||
from cli_helpers import _cmd_run
|
||||
from common import WALLET_PASS, ASSETS_DIR
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
@ -24,7 +24,7 @@ NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
|||
|
||||
|
||||
@keyword('Generate Session Token')
|
||||
def generate_session_token(owner: str, session_wallet: str, cid: str='') -> str:
|
||||
def generate_session_token(owner: str, session_wallet: str, cid: str = '') -> str:
|
||||
"""
|
||||
This function generates session token for ContainerSessionContext
|
||||
and writes it to the file. It is able to prepare session token file
|
||||
|
@ -47,34 +47,33 @@ def generate_session_token(owner: str, session_wallet: str, cid: str='') -> str:
|
|||
session_wlt_content = json.load(fout)
|
||||
session_wlt = wallet.Wallet.from_json(session_wlt_content, password="")
|
||||
pub_key_64 = base64.b64encode(
|
||||
bytes.fromhex(
|
||||
str(session_wlt.accounts[0].public_key)
|
||||
)
|
||||
).decode('utf-8')
|
||||
bytes.fromhex(
|
||||
str(session_wlt.accounts[0].public_key)
|
||||
)
|
||||
).decode('utf-8')
|
||||
|
||||
session_token = {
|
||||
"body":{
|
||||
"id":f"{base64.b64encode(uuid.uuid4().bytes).decode('utf-8')}",
|
||||
"ownerID":{
|
||||
"value":f"{json_transformers.encode_for_json(owner)}"
|
||||
},
|
||||
"lifetime":{
|
||||
"exp":"100000000",
|
||||
"nbf":"0",
|
||||
"iat":"0"
|
||||
},
|
||||
"sessionKey":f"{pub_key_64}",
|
||||
"container":{
|
||||
"verb":"PUT",
|
||||
"wildcard": cid != '',
|
||||
**({ "containerID":
|
||||
{"value":
|
||||
f"{base64.b64encode(cid.encode('utf-8')).decode('utf-8')}"}
|
||||
} if cid != '' else {}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
"body": {
|
||||
"id": f"{base64.b64encode(uuid.uuid4().bytes).decode('utf-8')}",
|
||||
"ownerID": {
|
||||
"value": f"{json_transformers.encode_for_json(owner)}"
|
||||
},
|
||||
"lifetime": {
|
||||
"exp": "100000000",
|
||||
"nbf": "0",
|
||||
"iat": "0"
|
||||
},
|
||||
"sessionKey": f"{pub_key_64}",
|
||||
"container": {
|
||||
"verb": "PUT",
|
||||
"wildcard": cid != '',
|
||||
**({"containerID":
|
||||
{"value": f"{base64.b64encode(cid.encode('utf-8')).decode('utf-8')}"}
|
||||
} if cid != '' else {}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(f"Got this Session Token: {session_token}")
|
||||
with open(file_path, 'w', encoding='utf-8') as session_token_file:
|
||||
|
@ -83,7 +82,7 @@ def generate_session_token(owner: str, session_wallet: str, cid: str='') -> str:
|
|||
return file_path
|
||||
|
||||
|
||||
@keyword ('Sign Session Token')
|
||||
@keyword('Sign Session Token')
|
||||
def sign_session_token(session_token: str, wlt: str):
|
||||
"""
|
||||
This function signs the session token by the given wallet.
|
||||
|
|
|
@ -5,16 +5,16 @@
|
|||
It contains wrappers for `neofs-cli storagegroup` verbs.
|
||||
"""
|
||||
|
||||
from cli_helpers import _cmd_run
|
||||
|
||||
from common import NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_PASS
|
||||
from robot.api.deco import keyword
|
||||
|
||||
from cli_helpers import _cmd_run
|
||||
from common import NEOFS_CLI_EXEC, NEOFS_ENDPOINT, WALLET_PASS
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
@keyword('Put Storagegroup')
|
||||
def put_storagegroup(wallet: str, cid: str, objects: list, bearer_token: str=""):
|
||||
def put_storagegroup(wallet: str, cid: str, objects: list, bearer_token: str = ""):
|
||||
"""
|
||||
Wrapper for `neofs-cli storagegroup put`. Before the SG is created,
|
||||
neofs-cli performs HEAD on `objects`, so this verb must be allowed
|
||||
|
@ -40,7 +40,7 @@ def put_storagegroup(wallet: str, cid: str, objects: list, bearer_token: str="")
|
|||
|
||||
|
||||
@keyword('List Storagegroup')
|
||||
def list_storagegroup(wallet: str, cid: str, bearer_token: str=""):
|
||||
def list_storagegroup(wallet: str, cid: str, bearer_token: str = ""):
|
||||
"""
|
||||
Wrapper for `neofs-cli storagegroup list`. This operation
|
||||
requires SEARCH allowed for `wallet` in `cid`.
|
||||
|
@ -64,7 +64,7 @@ def list_storagegroup(wallet: str, cid: str, bearer_token: str=""):
|
|||
|
||||
|
||||
@keyword('Get Storagegroup')
|
||||
def get_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str=''):
|
||||
def get_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str = ''):
|
||||
"""
|
||||
Wrapper for `neofs-cli storagegroup get`.
|
||||
Args:
|
||||
|
@ -103,7 +103,7 @@ def get_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str=''):
|
|||
|
||||
|
||||
@keyword('Delete Storagegroup')
|
||||
def delete_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str=""):
|
||||
def delete_storagegroup(wallet: str, cid: str, oid: str, bearer_token: str = ""):
|
||||
"""
|
||||
Wrapper for `neofs-cli storagegroup delete`.
|
||||
Args:
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
that storage policies are kept.
|
||||
'''
|
||||
|
||||
from common import NEOFS_NETMAP
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
import complex_object_actions
|
||||
import neofs_verbs
|
||||
|
||||
from robot.api.deco import keyword
|
||||
from robot.api import logger
|
||||
from common import NEOFS_NETMAP
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
@ -54,8 +54,8 @@ def get_simple_object_copies(wallet: str, cid: str, oid: str):
|
|||
for node in NEOFS_NETMAP:
|
||||
try:
|
||||
response = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
if response:
|
||||
copies += 1
|
||||
except Exception as exc:
|
||||
|
@ -101,8 +101,8 @@ def get_nodes_with_object(wallet: str, cid: str, oid: str):
|
|||
for node in NEOFS_NETMAP:
|
||||
try:
|
||||
res = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
if res is not None:
|
||||
nodes_list.append(node)
|
||||
except Exception as exc:
|
||||
|
@ -127,8 +127,8 @@ def get_nodes_without_object(wallet: str, cid: str, oid: str):
|
|||
nodes_list = []
|
||||
for node in NEOFS_NETMAP:
|
||||
res = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
if res is None:
|
||||
nodes_list.append(node)
|
||||
return nodes_list
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
#!/usr/bin/python3.8
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import tarfile
|
||||
import uuid
|
||||
import hashlib
|
||||
import docker
|
||||
|
||||
from common import SIMPLE_OBJ_SIZE, ASSETS_DIR
|
||||
from cli_helpers import _cmd_run
|
||||
from robot.api.deco import keyword
|
||||
import docker
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
from robot.libraries.BuiltIn import BuiltIn
|
||||
|
||||
from cli_helpers import _cmd_run
|
||||
from common import SIMPLE_OBJ_SIZE, ASSETS_DIR
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
|
||||
|
||||
@keyword('Generate file')
|
||||
def generate_file_and_file_hash(size: int) -> str:
|
||||
"""
|
||||
|
@ -60,15 +61,16 @@ def get_container_logs(testcase_name: str) -> None:
|
|||
container_name = container['Names'][0][1:]
|
||||
if client.inspect_container(container_name)['Config']['Domainname'] == "neofs.devenv":
|
||||
file_name = f"{logs_dir}/docker_log_{container_name}"
|
||||
with open(file_name,'wb') as out:
|
||||
with open(file_name, 'wb') as out:
|
||||
out.write(client.logs(container_name))
|
||||
logger.info(f"Collected logs from container {container_name}")
|
||||
tar.add(file_name)
|
||||
os.remove(file_name)
|
||||
tar.close()
|
||||
|
||||
|
||||
@keyword('Make Up')
|
||||
def make_up(services: list=[], config_dict: dict={}):
|
||||
def make_up(services: list = [], config_dict: dict = {}):
|
||||
test_path = os.getcwd()
|
||||
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
|
||||
os.chdir(dev_path)
|
||||
|
@ -87,8 +89,9 @@ def make_up(services: list=[], config_dict: dict={}):
|
|||
|
||||
os.chdir(test_path)
|
||||
|
||||
|
||||
@keyword('Make Down')
|
||||
def make_down(services: list=[]):
|
||||
def make_down(services: list = []):
|
||||
test_path = os.getcwd()
|
||||
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
|
||||
os.chdir(dev_path)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
|
||||
ROOT='../..'
|
||||
CERT="%s/../../ca" % ROOT
|
||||
ROOT = '../..'
|
||||
CERT = "%s/../../ca" % ROOT
|
||||
|
||||
# Common NeoFS variables can be declared from neofs-dev-env env variables.
|
||||
# High priority is accepted for those envs.
|
||||
|
@ -9,7 +9,7 @@ CERT="%s/../../ca" % ROOT
|
|||
CONTAINER_WAIT_INTERVAL = "1m"
|
||||
|
||||
NEOFS_EPOCH_TIMEOUT = (os.getenv("NEOFS_EPOCH_TIMEOUT") if os.getenv("NEOFS_EPOCH_TIMEOUT")
|
||||
else os.getenv("NEOFS_IR_TIMERS_EPOCH", "300s"))
|
||||
else os.getenv("NEOFS_IR_TIMERS_EPOCH", "300s"))
|
||||
|
||||
SIMPLE_OBJ_SIZE = 1000
|
||||
COMPLEX_OBJ_SIZE = 2000
|
||||
|
@ -19,7 +19,7 @@ MAINNET_TIMEOUT = os.getenv('MAINNET_TIMEOUT', "1min")
|
|||
MORPH_BLOCK_TIME = os.getenv("MORPH_BLOCK_TIME", '1s')
|
||||
NEOFS_CONTRACT_CACHE_TIMEOUT = os.getenv("NEOFS_CONTRACT_CACHE_TIMEOUT", "30s")
|
||||
|
||||
#TODO: change to NEOFS_STORAGE_DEFAULT_GC_REMOVER_SLEEP_INTERVAL
|
||||
# TODO: change to NEOFS_STORAGE_DEFAULT_GC_REMOVER_SLEEP_INTERVAL
|
||||
|
||||
SHARD_0_GC_SLEEP = os.getenv("NEOFS_STORAGE_SHARD_0_GC_REMOVER_SLEEP_INTERVAL", "1m")
|
||||
|
||||
|
@ -37,32 +37,37 @@ S3_GATE = os.getenv("S3_GATE", 'https://s3.neofs.devenv:8080')
|
|||
GAS_HASH = '0xd2a4cff31913016155e38e474a2c06d08be276cf'
|
||||
|
||||
NEOFS_CONTRACT = (os.getenv("NEOFS_CONTRACT") if os.getenv("NEOFS_CONTRACT")
|
||||
else os.getenv("NEOFS_IR_CONTRACTS_NEOFS", '008b43d3de8741b896015f79ac0fbfa4055b4574'))
|
||||
else os.getenv("NEOFS_IR_CONTRACTS_NEOFS", '008b43d3de8741b896015f79ac0fbfa4055b4574'))
|
||||
|
||||
COMMON_PLACEMENT_RULE = "REP 2 IN X CBF 1 SELECT 4 FROM * AS X"
|
||||
|
||||
ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
|
||||
|
||||
MORPH_MAGIC = os.environ["MORPH_MAGIC"]
|
||||
MORPH_MAGIC = os.getenv("MORPH_MAGIC")
|
||||
GATE_PUB_KEY = '0313b1ac3a8076e155a7e797b24f0b650cccad5941ea59d7cfd51a024a8b2a06bf'
|
||||
|
||||
NEOFS_NETMAP_DICT = {'s01': {'rpc': 's01.neofs.devenv:8080',
|
||||
'control': 's01.neofs.devenv:8081',
|
||||
'wif': 'Kwk6k2eC3L3QuPvD8aiaNyoSXgQ2YL1bwS5CP1oKoA9waeAze97s',
|
||||
'UN-LOCODE': 'RU MOW'},
|
||||
's02': {'rpc': 's02.neofs.devenv:8080',
|
||||
'control': 's02.neofs.devenv:8081',
|
||||
'wif': 'L1NdHdnrTNGQZH1fJSrdUZJyeYFHvaQSSHZHxhK3udiGFdr5YaZ6',
|
||||
'UN-LOCODE': 'RU LED'},
|
||||
's03': {'rpc': 's03.neofs.devenv:8080',
|
||||
'control': 's03.neofs.devenv:8081',
|
||||
'wif': 'KzN38k39af6ACWJjK8YrnARWo86ddcc1EuBWz7xFEdcELcP3ZTym',
|
||||
'UN-LOCODE': 'SE STO'},
|
||||
's04': {'rpc': 's04.neofs.devenv:8080',
|
||||
'control': 's04.neofs.devenv:8081',
|
||||
'wif': 'Kzk1Z3dowAqfNyjqeYKWenZMduFV3NAKgXg9K1sA4jRKYxEc8HEW',
|
||||
'UN-LOCODE': 'FI HEL'}
|
||||
}
|
||||
STORAGE_NODE_1 = os.getenv('DATA_NODE_1', 's01.neofs.devenv:8080')
|
||||
STORAGE_NODE_2 = os.getenv('DATA_NODE_2', 's02.neofs.devenv:8080')
|
||||
STORAGE_NODE_3 = os.getenv('DATA_NODE_3', 's03.neofs.devenv:8080')
|
||||
STORAGE_NODE_4 = os.getenv('DATA_NODE_4', 's04.neofs.devenv:8080')
|
||||
|
||||
NEOFS_NETMAP_DICT = {'s01': {'rpc': STORAGE_NODE_1,
|
||||
'control': 's01.neofs.devenv:8081',
|
||||
'wif': 'Kwk6k2eC3L3QuPvD8aiaNyoSXgQ2YL1bwS5CP1oKoA9waeAze97s',
|
||||
'UN-LOCODE': 'RU MOW'},
|
||||
's02': {'rpc': STORAGE_NODE_2,
|
||||
'control': 's02.neofs.devenv:8081',
|
||||
'wif': 'L1NdHdnrTNGQZH1fJSrdUZJyeYFHvaQSSHZHxhK3udiGFdr5YaZ6',
|
||||
'UN-LOCODE': 'RU LED'},
|
||||
's03': {'rpc': STORAGE_NODE_3,
|
||||
'control': 's03.neofs.devenv:8081',
|
||||
'wif': 'KzN38k39af6ACWJjK8YrnARWo86ddcc1EuBWz7xFEdcELcP3ZTym',
|
||||
'UN-LOCODE': 'SE STO'},
|
||||
's04': {'rpc': STORAGE_NODE_4,
|
||||
'control': 's04.neofs.devenv:8081',
|
||||
'wif': 'Kzk1Z3dowAqfNyjqeYKWenZMduFV3NAKgXg9K1sA4jRKYxEc8HEW',
|
||||
'UN-LOCODE': 'FI HEL'}
|
||||
}
|
||||
NEOFS_NETMAP = [i['rpc'] for i in NEOFS_NETMAP_DICT.values()]
|
||||
NEOGO_EXECUTABLE = os.getenv('NEOGO_EXECUTABLE', 'neo-go')
|
||||
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# ACLs with set F flag
|
||||
PUBLIC_ACL_F = "0x1FBFBFFF"
|
||||
PUBLIC_ACL_F = "0x1FBFBFFF"
|
||||
PRIVATE_ACL_F = "0x1C8C8CCC"
|
||||
READONLY_ACL_F = "0x1FBF8CFF"
|
||||
|
||||
# ACLs without F flag set
|
||||
PUBLIC_ACL = "0x0FBFBFFF"
|
||||
PUBLIC_ACL = "0x0FBFBFFF"
|
||||
INACCESSIBLE_ACL = "0x40000000"
|
||||
STICKYBIT_PUB_ACL = "0x3FFFFFFF"
|
||||
|
|
Loading…
Reference in a new issue