2022-03-15 11:58:59 +00:00
|
|
|
#!/usr/bin/python3
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-08-25 17:08:54 +00:00
|
|
|
import base64
|
|
|
|
from datetime import datetime
|
|
|
|
import json
|
2020-07-01 02:28:31 +00:00
|
|
|
import os
|
|
|
|
import re
|
2021-08-25 17:08:54 +00:00
|
|
|
import random
|
2021-11-03 12:48:31 +00:00
|
|
|
import uuid
|
|
|
|
import docker
|
|
|
|
import base58
|
2021-08-25 17:08:54 +00:00
|
|
|
|
|
|
|
from neo3 import wallet
|
2022-05-20 11:18:14 +00:00
|
|
|
from common import (NEOFS_NETMAP, WALLET_PASS, NEOFS_ENDPOINT,
|
|
|
|
NEOFS_NETMAP_DICT, ASSETS_DIR)
|
|
|
|
from cli_helpers import _cmd_run
|
|
|
|
import json_transformers
|
2020-07-01 02:28:31 +00:00
|
|
|
from robot.api.deco import keyword
|
|
|
|
from robot.api import logger
|
2022-03-15 11:58:59 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
ROBOT_AUTO_KEYWORDS = False
|
|
|
|
|
2021-01-17 11:55:10 +00:00
|
|
|
# path to neofs-cli executable
|
|
|
|
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
2020-11-18 15:15:57 +00:00
|
|
|
|
|
|
|
|
2022-03-15 11:58:59 +00:00
|
|
|
# TODO: move to neofs-keywords
|
2021-03-09 10:08:40 +00:00
|
|
|
@keyword('Get ScriptHash')
|
2021-08-25 17:08:54 +00:00
|
|
|
def get_scripthash(wif: str):
|
|
|
|
acc = wallet.Account.from_wif(wif, '')
|
|
|
|
return str(acc.script_hash)
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-11-29 03:44:38 +00:00
|
|
|
@keyword('Stop nodes')
|
|
|
|
def stop_nodes(down_num: int, *nodes_list):
|
|
|
|
|
|
|
|
# select nodes to stop from list
|
2022-05-20 11:18:14 +00:00
|
|
|
nodes = random.sample(nodes_list, down_num)
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2022-05-20 11:18:14 +00:00
|
|
|
for node in nodes:
|
2020-11-29 03:44:38 +00:00
|
|
|
m = re.search(r'(s\d+).', node)
|
|
|
|
node = m.group(1)
|
|
|
|
|
2020-12-16 11:19:24 +00:00
|
|
|
client = docker.APIClient()
|
|
|
|
client.stop(node)
|
2020-11-29 03:44:38 +00:00
|
|
|
|
|
|
|
return stop_nodes
|
|
|
|
|
|
|
|
|
|
|
|
@keyword('Start nodes')
|
|
|
|
def start_nodes(*nodes_list):
|
|
|
|
|
|
|
|
for node in nodes_list:
|
|
|
|
m = re.search(r'(s\d+).', node)
|
|
|
|
node = m.group(1)
|
2020-12-16 11:19:24 +00:00
|
|
|
client = docker.APIClient()
|
|
|
|
client.start(node)
|
2022-03-11 16:08:14 +00:00
|
|
|
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
@keyword('Get nodes with object')
|
2022-02-01 13:42:41 +00:00
|
|
|
def get_nodes_with_object(wallet: str, cid: str, oid: str):
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
nodes_list = []
|
|
|
|
|
2021-09-09 08:13:37 +00:00
|
|
|
for node in NEOFS_NETMAP:
|
2022-05-20 11:18:14 +00:00
|
|
|
res = _search_object(node, wallet, cid, oid)
|
|
|
|
if res:
|
|
|
|
if oid in res:
|
2020-09-01 03:23:17 +00:00
|
|
|
nodes_list.append(node)
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Nodes with object: {nodes_list}")
|
2020-11-29 03:44:38 +00:00
|
|
|
return nodes_list
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Get nodes without object')
|
2022-02-01 13:42:41 +00:00
|
|
|
def get_nodes_without_object(wallet: str, cid: str, oid: str):
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
nodes_list = []
|
|
|
|
|
2021-09-09 08:13:37 +00:00
|
|
|
for node in NEOFS_NETMAP:
|
2022-02-01 13:42:41 +00:00
|
|
|
search_res = _search_object(node, wallet, cid, oid)
|
2020-09-01 03:23:17 +00:00
|
|
|
if search_res:
|
2021-09-01 14:48:09 +00:00
|
|
|
if not re.search(fr'({oid})', search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
nodes_list.append(node)
|
|
|
|
else:
|
2020-07-14 00:05:22 +00:00
|
|
|
nodes_list.append(node)
|
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Nodes without object: {nodes_list}")
|
2020-11-29 03:44:38 +00:00
|
|
|
return nodes_list
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
@keyword('Validate storage policy for object')
|
2022-02-01 13:42:41 +00:00
|
|
|
def validate_storage_policy_for_object(wallet: str, expected_copies: int, cid, oid,
|
2021-07-20 11:10:13 +00:00
|
|
|
expected_node_list=[], storage_nodes=[]):
|
2021-09-09 08:13:37 +00:00
|
|
|
storage_nodes = storage_nodes if len(storage_nodes) != 0 else NEOFS_NETMAP
|
2020-07-01 02:28:31 +00:00
|
|
|
copies = 0
|
2020-09-01 03:23:17 +00:00
|
|
|
found_nodes = []
|
2022-03-15 11:58:59 +00:00
|
|
|
oid = oid.strip()
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
for node in storage_nodes:
|
2022-05-20 11:18:14 +00:00
|
|
|
res = _search_object(node, wallet, cid, oid)
|
|
|
|
if res:
|
|
|
|
if oid in res:
|
2020-09-01 03:23:17 +00:00
|
|
|
copies += 1
|
|
|
|
found_nodes.append(node)
|
|
|
|
|
|
|
|
if copies != expected_copies:
|
2022-05-20 11:18:14 +00:00
|
|
|
raise Exception("Object copies is not match storage policy."
|
2021-07-20 11:10:13 +00:00
|
|
|
f"Found: {copies}, expected: {expected_copies}.")
|
2020-09-01 03:23:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Found copies: {copies}, expected: {expected_copies}")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Found nodes: {found_nodes}")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
|
|
|
if expected_node_list:
|
|
|
|
if sorted(found_nodes) == sorted(expected_node_list):
|
2022-05-20 11:18:14 +00:00
|
|
|
logger.info(f"Found node list '{found_nodes}' "
|
|
|
|
f"is equal for expected list '{expected_node_list}'")
|
2020-09-01 03:23:17 +00:00
|
|
|
else:
|
2022-05-20 11:18:14 +00:00
|
|
|
raise Exception(f"Found node list '{found_nodes}' "
|
|
|
|
f"is not equal to expected list '{expected_node_list}'")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
@keyword('Verify Head Tombstone')
|
2022-02-01 13:42:41 +00:00
|
|
|
def verify_head_tombstone(wallet: str, cid: str, oid_ts: str, oid: str, addr: str):
|
2022-03-15 11:58:59 +00:00
|
|
|
# TODO: replace with HEAD from neofs_verbs.py
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2022-02-01 13:42:41 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wallet {wallet} '
|
|
|
|
f'--config {WALLET_PASS} object head --cid {cid} --oid {oid_ts} --json'
|
2021-01-17 11:55:10 +00:00
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
output = _cmd_run(object_cmd)
|
|
|
|
full_headers = json.loads(output)
|
|
|
|
logger.info(f"Output: {full_headers}")
|
|
|
|
|
|
|
|
# Header verification
|
|
|
|
header_cid = full_headers["header"]["containerID"]["value"]
|
2022-05-20 11:18:14 +00:00
|
|
|
if json_transformers.json_reencode(header_cid) == cid:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Header CID is expected: {cid} ({header_cid} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header CID is not expected.")
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_owner = full_headers["header"]["ownerID"]["value"]
|
2022-05-20 11:18:14 +00:00
|
|
|
if json_transformers.json_reencode(header_owner) == addr:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Header ownerID is expected: {addr} ({header_owner} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header ownerID is not expected.")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_type = full_headers["header"]["objectType"]
|
2022-05-20 11:18:14 +00:00
|
|
|
if header_type == "TOMBSTONE":
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Header Type is expected: {header_type}")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Type is not expected.")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_session_type = full_headers["header"]["sessionToken"]["body"]["object"]["verb"]
|
2022-05-20 11:18:14 +00:00
|
|
|
if header_session_type == "DELETE":
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Header Session Type is expected: {header_session_type}")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Session Type is not expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_session_cid = full_headers["header"]["sessionToken"]["body"]["object"]["address"]["containerID"]["value"]
|
2022-05-20 11:18:14 +00:00
|
|
|
if json_transformers.json_reencode(header_session_cid) == cid:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Header ownerID is expected: {addr} ({header_session_cid} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Session CID is not expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_session_oid = full_headers["header"]["sessionToken"]["body"]["object"]["address"]["objectID"]["value"]
|
2022-05-20 11:18:14 +00:00
|
|
|
if json_transformers.json_reencode(header_session_oid) == oid:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Header Session OID (deleted object) is expected: {oid} ({header_session_oid} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Session OID (deleted object) is not expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
|
2021-09-09 08:13:37 +00:00
|
|
|
@keyword('Get control endpoint with wif')
|
|
|
|
def get_control_endpoint_with_wif(endpoint_number: str = ''):
|
|
|
|
if endpoint_number == '':
|
|
|
|
netmap = []
|
|
|
|
for key in NEOFS_NETMAP_DICT.keys():
|
|
|
|
netmap.append(key)
|
|
|
|
endpoint_num = random.sample(netmap, 1)[0]
|
|
|
|
logger.info(f'Random node chosen: {endpoint_num}')
|
|
|
|
else:
|
|
|
|
endpoint_num = endpoint_number
|
|
|
|
|
|
|
|
endpoint_values = NEOFS_NETMAP_DICT[f'{endpoint_num}']
|
|
|
|
endpoint_control = endpoint_values['control']
|
|
|
|
wif = endpoint_values['wif']
|
2022-03-11 16:08:14 +00:00
|
|
|
|
2021-09-09 08:13:37 +00:00
|
|
|
return endpoint_num, endpoint_control, wif
|
|
|
|
|
2022-04-25 09:53:20 +00:00
|
|
|
|
2021-10-04 16:16:49 +00:00
|
|
|
@keyword('Get Locode')
|
|
|
|
def get_locode():
|
|
|
|
endpoint_values = random.choice(list(NEOFS_NETMAP_DICT.values()))
|
|
|
|
locode = endpoint_values['UN-LOCODE']
|
|
|
|
logger.info(f'Random locode chosen: {locode}')
|
|
|
|
|
|
|
|
return locode
|
|
|
|
|
|
|
|
|
2021-02-08 05:05:17 +00:00
|
|
|
@keyword('Get Nodes Log Latest Timestamp')
|
|
|
|
def get_logs_latest_timestamp():
|
|
|
|
"""
|
|
|
|
Keyword return:
|
|
|
|
nodes_logs_time -- structure (dict) of nodes container name (key) and latest logs timestamp (value)
|
|
|
|
"""
|
|
|
|
client_api = docker.APIClient()
|
|
|
|
|
|
|
|
nodes_logs_time = dict()
|
|
|
|
|
2021-09-09 08:13:37 +00:00
|
|
|
for node in NEOFS_NETMAP:
|
2021-02-08 05:05:17 +00:00
|
|
|
container = node.split('.')[0]
|
|
|
|
log_line = client_api.logs(container, tail=1)
|
|
|
|
|
|
|
|
m = re.search(r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z)', str(log_line))
|
|
|
|
if m != None:
|
|
|
|
timestamp = m.group(1)
|
|
|
|
|
|
|
|
timestamp_date = datetime.fromisoformat(timestamp[:-1])
|
|
|
|
|
|
|
|
nodes_logs_time[container] = timestamp_date
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Latest logs timestamp list: {nodes_logs_time}")
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
return nodes_logs_time
|
|
|
|
|
|
|
|
|
2021-04-26 10:30:40 +00:00
|
|
|
@keyword('Find in Nodes Log')
|
2022-05-20 11:18:14 +00:00
|
|
|
def find_in_nodes_log(line: str, nodes_logs_time: dict):
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
client_api = docker.APIClient()
|
|
|
|
container_names = list()
|
|
|
|
|
|
|
|
for docker_container in client_api.containers():
|
|
|
|
container_names.append(docker_container['Names'][0][1:])
|
|
|
|
|
|
|
|
global_count = 0
|
|
|
|
|
|
|
|
for container in nodes_logs_time.keys():
|
|
|
|
# check if container exists
|
|
|
|
if container in container_names:
|
|
|
|
# Get log since timestamp
|
|
|
|
timestamp_date = nodes_logs_time[container]
|
|
|
|
log_lines = client_api.logs(container, since=timestamp_date)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Timestamp since: {timestamp_date}")
|
2021-02-08 05:05:17 +00:00
|
|
|
found_count = len(re.findall(line, log_lines.decode("utf-8") ))
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Node {container} log - found counter: {found_count}")
|
2021-02-08 05:05:17 +00:00
|
|
|
global_count += found_count
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-02-08 05:05:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Container {container} has not been found.")
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
if global_count > 0:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Expected line '{line}' has been found in the logs.")
|
2021-02-08 05:05:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"Expected line '{line}' has not been found in the logs.")
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
2021-11-03 12:48:31 +00:00
|
|
|
@keyword('Generate Session Token')
|
|
|
|
def generate_session_token(owner: str, pub_key: str, cid: str = "", wildcard: bool = False) -> str:
|
|
|
|
|
|
|
|
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
|
|
|
|
|
|
|
|
owner_64 = base64.b64encode(base58.b58decode(owner)).decode('utf-8')
|
|
|
|
cid_64 = base64.b64encode(cid.encode('utf-8')).decode('utf-8')
|
|
|
|
pub_key_64 = base64.b64encode(bytes.fromhex(pub_key)).decode('utf-8')
|
|
|
|
id_64 = base64.b64encode(uuid.uuid4().bytes).decode('utf-8')
|
|
|
|
|
|
|
|
session_token = {
|
|
|
|
"body":{
|
|
|
|
"id":f"{id_64}",
|
|
|
|
"ownerID":{
|
|
|
|
"value":f"{owner_64}"
|
|
|
|
},
|
|
|
|
"lifetime":{
|
|
|
|
"exp":"100000000",
|
|
|
|
"nbf":"0",
|
|
|
|
"iat":"0"
|
|
|
|
},
|
|
|
|
"sessionKey":f"{pub_key_64}",
|
|
|
|
"container":{
|
|
|
|
"verb":"PUT",
|
|
|
|
"wildcard": wildcard,
|
|
|
|
**({ "containerID":{"value":f"{cid_64}"} } if not wildcard else {})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info(f"Got this Session Token: {session_token}")
|
|
|
|
|
|
|
|
with open(file_path, 'w', encoding='utf-8') as session_token_file:
|
|
|
|
json.dump(session_token, session_token_file, ensure_ascii=False, indent=4)
|
|
|
|
|
|
|
|
return file_path
|
|
|
|
|
|
|
|
|
|
|
|
@keyword ('Sign Session Token')
|
|
|
|
def sign_session_token(session_token: str, wallet: str, to_file: str=''):
|
|
|
|
if to_file:
|
2022-03-11 16:08:14 +00:00
|
|
|
to_file = f'--to {to_file}'
|
2021-11-03 12:48:31 +00:00
|
|
|
cmd = (
|
|
|
|
f'{NEOFS_CLI_EXEC} util sign session-token --from {session_token} '
|
2022-02-01 13:42:41 +00:00
|
|
|
f'-w {wallet} {to_file} --config {WALLET_PASS}'
|
2021-11-03 12:48:31 +00:00
|
|
|
)
|
|
|
|
logger.info(f"cmd: {cmd}")
|
2022-02-01 13:42:41 +00:00
|
|
|
_cmd_run(cmd)
|
2021-11-03 12:48:31 +00:00
|
|
|
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-05-04 09:27:43 +00:00
|
|
|
def _parse_oid(input_str: str):
|
2020-07-01 02:28:31 +00:00
|
|
|
"""
|
2021-05-04 09:27:43 +00:00
|
|
|
This function parses OID from given CLI output. The input string we
|
|
|
|
expect:
|
|
|
|
Object successfully stored
|
|
|
|
ID: 4MhrLA7RXTBXCsaNnbahYVAPuoQdiUPuyNEWnywvoSEs
|
|
|
|
CID: HeZu2DXBuPve6HXbuHZx64knS7KcGtfSj2L59Li72kkg
|
|
|
|
We want to take 'ID' value from the string.
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
Parameters:
|
2021-05-04 09:27:43 +00:00
|
|
|
- input_str: a string with command run output
|
2020-07-01 02:28:31 +00:00
|
|
|
"""
|
2021-05-04 09:27:43 +00:00
|
|
|
try:
|
|
|
|
# taking second string from command output
|
|
|
|
snd_str = input_str.split('\n')[1]
|
|
|
|
except:
|
|
|
|
logger.error(f"Got empty input: {input_str}")
|
|
|
|
splitted = snd_str.split(": ")
|
|
|
|
if len(splitted) != 2:
|
|
|
|
raise Exception(f"no OID was parsed from command output: \t{snd_str}")
|
|
|
|
return splitted[1]
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2022-02-01 13:42:41 +00:00
|
|
|
def _search_object(node:str, wallet: str, cid:str, oid: str):
|
2022-05-20 11:18:14 +00:00
|
|
|
cmd = (
|
2022-02-01 13:42:41 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {node} --wallet {wallet} --ttl 1 '
|
|
|
|
f'object search --root --cid {cid} --oid {oid} --config {WALLET_PASS}'
|
2021-01-17 11:55:10 +00:00
|
|
|
)
|
2022-05-20 11:18:14 +00:00
|
|
|
output = _cmd_run(cmd)
|
2022-03-15 11:58:59 +00:00
|
|
|
return output
|