2021-07-20 11:10:13 +00:00
|
|
|
#!/usr/bin/python3.8
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-08-25 17:08:54 +00:00
|
|
|
import base58
|
|
|
|
import base64
|
|
|
|
import binascii
|
|
|
|
from datetime import datetime
|
|
|
|
import docker
|
|
|
|
import hashlib
|
|
|
|
import json
|
2020-07-01 02:28:31 +00:00
|
|
|
import os
|
|
|
|
import re
|
2021-08-25 17:08:54 +00:00
|
|
|
import random
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
from neo3 import wallet
|
2020-07-01 02:28:31 +00:00
|
|
|
from robot.api.deco import keyword
|
|
|
|
from robot.api import logger
|
|
|
|
|
2021-04-20 09:51:53 +00:00
|
|
|
from common import *
|
2021-09-10 12:44:40 +00:00
|
|
|
from cli_helpers import _cmd_run
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
ROBOT_AUTO_KEYWORDS = False
|
|
|
|
|
2021-01-17 11:55:10 +00:00
|
|
|
# path to neofs-cli executable
|
|
|
|
NEOFS_CLI_EXEC = os.getenv('NEOFS_CLI_EXEC', 'neofs-cli')
|
2020-11-18 15:15:57 +00:00
|
|
|
|
|
|
|
|
2021-03-09 10:08:40 +00:00
|
|
|
@keyword('Get ScriptHash')
|
2021-08-25 17:08:54 +00:00
|
|
|
def get_scripthash(wif: str):
|
|
|
|
acc = wallet.Account.from_wif(wif, '')
|
|
|
|
return str(acc.script_hash)
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-11-29 03:44:38 +00:00
|
|
|
@keyword('Stop nodes')
|
|
|
|
def stop_nodes(down_num: int, *nodes_list):
|
|
|
|
|
|
|
|
# select nodes to stop from list
|
|
|
|
stop_nodes = random.sample(nodes_list, down_num)
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-11-29 03:44:38 +00:00
|
|
|
for node in stop_nodes:
|
|
|
|
m = re.search(r'(s\d+).', node)
|
|
|
|
node = m.group(1)
|
|
|
|
|
2020-12-16 11:19:24 +00:00
|
|
|
client = docker.APIClient()
|
|
|
|
client.stop(node)
|
2020-11-29 03:44:38 +00:00
|
|
|
|
|
|
|
return stop_nodes
|
|
|
|
|
|
|
|
|
|
|
|
@keyword('Start nodes')
|
|
|
|
def start_nodes(*nodes_list):
|
|
|
|
|
|
|
|
for node in nodes_list:
|
|
|
|
m = re.search(r'(s\d+).', node)
|
|
|
|
node = m.group(1)
|
2020-12-16 11:19:24 +00:00
|
|
|
client = docker.APIClient()
|
|
|
|
client.start(node)
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
@keyword('Get nodes with object')
|
2020-11-29 03:44:38 +00:00
|
|
|
def get_nodes_with_object(private_key: str, cid: str, oid: str):
|
2021-02-08 05:05:17 +00:00
|
|
|
storage_nodes = _get_storage_nodes()
|
2020-07-14 00:05:22 +00:00
|
|
|
copies = 0
|
|
|
|
|
|
|
|
nodes_list = []
|
|
|
|
|
|
|
|
for node in storage_nodes:
|
2020-09-01 03:23:17 +00:00
|
|
|
search_res = _search_object(node, private_key, cid, oid)
|
|
|
|
if search_res:
|
2021-09-01 14:48:09 +00:00
|
|
|
if re.search(fr'({oid})', search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
nodes_list.append(node)
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Nodes with object: {nodes_list}")
|
2020-11-29 03:44:38 +00:00
|
|
|
return nodes_list
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Get nodes without object')
|
2020-11-29 03:44:38 +00:00
|
|
|
def get_nodes_without_object(private_key: str, cid: str, oid: str):
|
2021-02-08 05:05:17 +00:00
|
|
|
storage_nodes = _get_storage_nodes()
|
2020-07-14 00:05:22 +00:00
|
|
|
copies = 0
|
|
|
|
|
|
|
|
nodes_list = []
|
|
|
|
|
|
|
|
for node in storage_nodes:
|
2020-09-01 03:23:17 +00:00
|
|
|
search_res = _search_object(node, private_key, cid, oid)
|
|
|
|
if search_res:
|
2021-09-01 14:48:09 +00:00
|
|
|
if not re.search(fr'({oid})', search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
nodes_list.append(node)
|
|
|
|
else:
|
2020-07-14 00:05:22 +00:00
|
|
|
nodes_list.append(node)
|
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Nodes without object: {nodes_list}")
|
2020-11-29 03:44:38 +00:00
|
|
|
return nodes_list
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
@keyword('Validate storage policy for object')
|
2021-07-20 11:10:13 +00:00
|
|
|
def validate_storage_policy_for_object(private_key: str, expected_copies: int, cid, oid,
|
|
|
|
expected_node_list=[], storage_nodes=[]):
|
|
|
|
storage_nodes = storage_nodes if len(storage_nodes) != 0 else _get_storage_nodes()
|
2020-07-01 02:28:31 +00:00
|
|
|
copies = 0
|
2020-09-01 03:23:17 +00:00
|
|
|
found_nodes = []
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
for node in storage_nodes:
|
2020-09-01 03:23:17 +00:00
|
|
|
search_res = _search_object(node, private_key, cid, oid)
|
|
|
|
if search_res:
|
2021-09-01 14:48:09 +00:00
|
|
|
if re.search(fr'({oid})', search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
copies += 1
|
|
|
|
found_nodes.append(node)
|
|
|
|
|
|
|
|
if copies != expected_copies:
|
2021-07-20 11:10:13 +00:00
|
|
|
raise Exception(f"Object copies is not match storage policy.",
|
|
|
|
f"Found: {copies}, expected: {expected_copies}.")
|
2020-09-01 03:23:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Found copies: {copies}, expected: {expected_copies}")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Found nodes: {found_nodes}")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
|
|
|
if expected_node_list:
|
|
|
|
if sorted(found_nodes) == sorted(expected_node_list):
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Found node list '{found_nodes}' is equal for expected list '{expected_node_list}'")
|
2020-09-01 03:23:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"Found node list '{found_nodes}' is not equal to expected list '{expected_node_list}'")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
@keyword('Get Range')
|
2021-01-17 11:55:10 +00:00
|
|
|
def get_range(private_key: str, cid: str, oid: str, range_file: str, bearer: str,
|
|
|
|
range_cut: str, options:str=""):
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = ""
|
2020-11-30 10:33:05 +00:00
|
|
|
if bearer:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer}"
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-01-17 11:55:10 +00:00
|
|
|
Cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'object range --cid {cid} --oid {oid} {bearer_token} --range {range_cut} '
|
2021-06-01 12:07:31 +00:00
|
|
|
f'--file {ASSETS_DIR}/{range_file} {options}'
|
2021-01-17 11:55:10 +00:00
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {Cmd}")
|
|
|
|
_cmd_run(Cmd)
|
2021-09-10 12:44:40 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Create container')
|
2021-08-25 17:08:54 +00:00
|
|
|
def create_container(private_key: str, basic_acl:str, rule:str, user_headers: str=''):
|
2021-05-04 09:27:43 +00:00
|
|
|
if rule == "":
|
|
|
|
logger.error("Cannot create container with empty placement rule")
|
2021-05-26 14:14:46 +00:00
|
|
|
|
|
|
|
if basic_acl:
|
2021-05-04 09:27:43 +00:00
|
|
|
basic_acl = f"--basic-acl {basic_acl}"
|
2021-07-15 17:57:57 +00:00
|
|
|
if user_headers:
|
|
|
|
user_headers = f"--attributes {user_headers}"
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-01-17 11:55:10 +00:00
|
|
|
createContainerCmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-07-15 17:57:57 +00:00
|
|
|
f'container create --policy "{rule}" {basic_acl} {user_headers} --await'
|
2021-01-17 11:55:10 +00:00
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {createContainerCmd}")
|
|
|
|
output = _cmd_run(createContainerCmd)
|
|
|
|
cid = _parse_cid(output)
|
|
|
|
logger.info(f"Created container {cid} with rule {rule}")
|
2021-07-29 14:58:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
return cid
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
@keyword('Container List')
|
|
|
|
def container_list(private_key: str):
|
2021-01-17 11:55:10 +00:00
|
|
|
Cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'container list'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {Cmd}")
|
|
|
|
output = _cmd_run(Cmd)
|
2020-12-11 11:35:02 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
container_list = re.findall(r'(\w{43,44})', output)
|
|
|
|
logger.info(f"Containers list: {container_list}")
|
2020-12-11 11:35:02 +00:00
|
|
|
return container_list
|
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Container Existing')
|
2020-11-18 15:15:57 +00:00
|
|
|
def container_existing(private_key: str, cid: str):
|
2021-01-17 11:55:10 +00:00
|
|
|
Cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'container list'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {Cmd}")
|
|
|
|
output = _cmd_run(Cmd)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
_find_cid(output, cid)
|
2020-07-01 02:28:31 +00:00
|
|
|
return
|
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Search object')
|
2021-01-17 11:55:10 +00:00
|
|
|
def search_object(private_key: str, cid: str, keys: str, bearer: str, filters: str,
|
|
|
|
expected_objects_list=[], options:str=""):
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = ""
|
2021-02-16 11:56:52 +00:00
|
|
|
filters_result = ""
|
|
|
|
|
2020-11-30 10:33:05 +00:00
|
|
|
if bearer:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer}"
|
2020-11-18 15:15:57 +00:00
|
|
|
if filters:
|
2021-02-16 11:56:52 +00:00
|
|
|
for filter_item in filters.split(','):
|
|
|
|
filter_item = re.sub(r'=', ' EQ ', filter_item)
|
|
|
|
filters_result += f"--filters '{filter_item}' "
|
2020-11-18 15:15:57 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-02-16 11:56:52 +00:00
|
|
|
f'object search {keys} --cid {cid} {bearer_token} {filters_result} {options}'
|
2021-01-17 11:55:10 +00:00
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
|
|
|
output = _cmd_run(object_cmd)
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
found_objects = re.findall(r'(\w{43,44})', output)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
if expected_objects_list:
|
|
|
|
if sorted(found_objects) == sorted(expected_objects_list):
|
|
|
|
logger.info(f"Found objects list '{found_objects}' ",
|
|
|
|
f"is equal for expected list '{expected_objects_list}'")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Found object list {found_objects} ",
|
2021-08-25 17:08:54 +00:00
|
|
|
f"is not equal to expected list '{expected_objects_list}'")
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
return found_objects
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-02-11 21:34:44 +00:00
|
|
|
@keyword('Get Split objects')
|
|
|
|
def get_component_objects(private_key: str, cid: str, oid: str):
|
|
|
|
logger.info("Collect Split objects list from Linked object.")
|
2021-02-20 14:05:03 +00:00
|
|
|
split_id = ""
|
2021-02-11 21:34:44 +00:00
|
|
|
nodes = _get_storage_nodes()
|
|
|
|
for node in nodes:
|
2021-09-07 08:04:14 +00:00
|
|
|
try:
|
|
|
|
header_virtual = head_object(private_key, cid, oid, '', '', '--raw --ttl 1', node, True)
|
|
|
|
if header_virtual:
|
|
|
|
parsed_header_virtual = parse_object_virtual_raw_header(header_virtual)
|
2021-02-11 21:34:44 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
if 'Linking object' in parsed_header_virtual.keys():
|
|
|
|
return _collect_split_objects_from_header(private_key, cid, parsed_header_virtual)
|
2021-02-11 21:34:44 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
elif 'Split ID' in parsed_header_virtual.keys():
|
|
|
|
logger.info(f"parsed_header_virtual: !@ {parsed_header_virtual}" )
|
|
|
|
split_id = parsed_header_virtual['Split ID']
|
2021-02-20 14:05:03 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
except:
|
|
|
|
logger.warn("Linking object has not been found.")
|
2021-02-20 14:05:03 +00:00
|
|
|
|
|
|
|
# Get all existing objects
|
|
|
|
full_obj_list = search_object(private_key, cid, None, None, None, None, '--phy')
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-02-20 14:05:03 +00:00
|
|
|
# Search expected Linking object
|
|
|
|
for targer_oid in full_obj_list:
|
|
|
|
header = head_object(private_key, cid, targer_oid, '', '', '--raw')
|
2021-07-29 07:49:50 +00:00
|
|
|
header_parsed = _get_raw_split_information(header)
|
2021-02-20 14:05:03 +00:00
|
|
|
if header_parsed['Split ID'] == split_id and 'Split ChildID' in header_parsed.keys():
|
|
|
|
logger.info("Linking object has been found in additional check (head of all objects).")
|
|
|
|
return _collect_split_objects_from_header(private_key, cid, parsed_header_virtual)
|
|
|
|
|
|
|
|
raise Exception("Linking object is not found at all - all existed objects have been headed.")
|
|
|
|
|
|
|
|
def _collect_split_objects_from_header(private_key, cid, parsed_header):
|
|
|
|
header_link = head_object(private_key, cid, parsed_header['Linking object'], '', '', '--raw')
|
2021-07-29 07:49:50 +00:00
|
|
|
header_link_parsed = _get_raw_split_information(header_link)
|
2021-02-20 14:05:03 +00:00
|
|
|
return header_link_parsed['Split ChildID']
|
2021-02-11 21:34:44 +00:00
|
|
|
|
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
@keyword('Verify Split Chain')
|
|
|
|
def verify_split_chain(private_key: str, cid: str, oid: str):
|
2020-12-11 11:35:02 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
header_virtual_parsed = dict()
|
|
|
|
header_last_parsed = dict()
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
marker_last_obj = 0
|
|
|
|
marker_link_obj = 0
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
final_verif_data = dict()
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
# Get Latest object
|
|
|
|
logger.info("Collect Split objects information and verify chain of the objects.")
|
2021-02-08 05:05:17 +00:00
|
|
|
nodes = _get_storage_nodes()
|
2020-12-23 22:38:16 +00:00
|
|
|
for node in nodes:
|
2021-09-07 08:04:14 +00:00
|
|
|
try:
|
|
|
|
header_virtual = head_object(private_key, cid, oid, '', '', '--raw --ttl 1', node, True)
|
|
|
|
parsed_header_virtual = parse_object_virtual_raw_header(header_virtual)
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
if 'Last object' in parsed_header_virtual.keys():
|
2021-09-10 12:44:40 +00:00
|
|
|
header_last = head_object(private_key, cid,
|
|
|
|
parsed_header_virtual['Last object'],
|
2021-09-07 08:04:14 +00:00
|
|
|
'', '', '--raw')
|
|
|
|
header_last_parsed = _get_raw_split_information(header_last)
|
|
|
|
marker_last_obj = 1
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
# Recursive chain validation up to the first object
|
|
|
|
final_verif_data = _verify_child_link(private_key, cid, oid, header_last_parsed, final_verif_data)
|
|
|
|
break
|
2021-09-20 14:17:15 +00:00
|
|
|
logger.info(f"Found Split Object with header:\n\t{parsed_header_virtual}")
|
|
|
|
logger.info("Continue to search Last Split Object")
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2021-09-20 14:17:15 +00:00
|
|
|
except RuntimeError as e:
|
|
|
|
logger.info(f"Failed while collectiong Split Objects: {e}")
|
|
|
|
continue
|
|
|
|
|
|
|
|
if marker_last_obj == 0:
|
|
|
|
raise Exception("Last object has not been found")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
|
|
|
# Get Linking object
|
|
|
|
logger.info("Compare Split objects result information with Linking object.")
|
|
|
|
for node in nodes:
|
2021-09-07 08:04:14 +00:00
|
|
|
try:
|
|
|
|
header_virtual = head_object(private_key, cid, oid, '', '', '--raw --ttl 1', node, True)
|
|
|
|
parsed_header_virtual = parse_object_virtual_raw_header(header_virtual)
|
|
|
|
if 'Linking object' in parsed_header_virtual.keys():
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-10 12:44:40 +00:00
|
|
|
header_link = head_object(private_key, cid,
|
|
|
|
parsed_header_virtual['Linking object'],
|
2021-09-07 08:04:14 +00:00
|
|
|
'', '', '--raw')
|
|
|
|
header_link_parsed = _get_raw_split_information(header_link)
|
|
|
|
marker_link_obj = 1
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
reversed_list = final_verif_data['ID List'][::-1]
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
if header_link_parsed['Split ChildID'] == reversed_list:
|
|
|
|
logger.info(f"Split objects list from Linked Object is equal to expected "
|
|
|
|
f"{', '.join(header_link_parsed['Split ChildID'])}")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Split objects list from Linking Object "
|
|
|
|
f"({', '.join(header_link_parsed['Split ChildID'])}) "
|
|
|
|
f"is not equal to expected ({', '.join(reversed_list)})")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
if int(header_link_parsed['PayloadLength']) == 0:
|
|
|
|
logger.info("Linking object Payload is equal to expected - zero size.")
|
|
|
|
else:
|
|
|
|
raise Exception("Linking object Payload is not equal to expected. Should be zero.")
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
if header_link_parsed['Type'] == 'regular':
|
|
|
|
logger.info("Linking Object Type is 'regular' as expected.")
|
|
|
|
else:
|
|
|
|
raise Exception("Object Type is not 'regular'.")
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
if header_link_parsed['Split ID'] == final_verif_data['Split ID']:
|
|
|
|
logger.info(f"Linking Object Split ID is equal to expected {final_verif_data['Split ID']}.")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Split ID from Linking Object ({header_link_parsed['Split ID']}) "
|
|
|
|
f"is not equal to expected ({final_verif_data['Split ID']})")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-07 08:04:14 +00:00
|
|
|
break
|
2021-09-20 14:17:15 +00:00
|
|
|
logger.info(f"Found Linking Object with header:\n\t{parsed_header_virtual}")
|
|
|
|
logger.info("Continue to search Linking Object")
|
|
|
|
except RuntimeError as e:
|
|
|
|
logger.info(f"Failed while collecting Split Object: {e}")
|
|
|
|
continue
|
|
|
|
|
|
|
|
if marker_link_obj == 0:
|
|
|
|
raise Exception("Linked object has not been found")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
logger.info("Compare Split objects result information with Virtual object.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
header_virtual = head_object(private_key, cid, oid, '', '', '')
|
2021-07-29 07:49:50 +00:00
|
|
|
header_virtual_parsed = _get_raw_split_information(header_virtual)
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
if int(header_virtual_parsed['PayloadLength']) == int(final_verif_data['PayloadLength']):
|
2021-09-07 08:04:14 +00:00
|
|
|
logger.info(f"Split objects PayloadLength are equal to Virtual Object Payload "
|
|
|
|
f"{header_virtual_parsed['PayloadLength']}")
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2021-09-07 08:04:14 +00:00
|
|
|
raise Exception(f"Split objects PayloadLength from Virtual Object "
|
|
|
|
f"({header_virtual_parsed['PayloadLength']}) is not equal "
|
|
|
|
f"to expected ({final_verif_data['PayloadLength']})")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
|
|
|
if header_link_parsed['Type'] == 'regular':
|
|
|
|
logger.info("Virtual Object Type is 'regular' as expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2020-12-23 22:38:16 +00:00
|
|
|
raise Exception("Object Type is not 'regular'.")
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
return 1
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
def _verify_child_link(private_key: str, cid: str, oid: str, header_last_parsed: dict, final_verif_data: dict):
|
2020-11-28 03:41:35 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
if 'PayloadLength' in final_verif_data.keys():
|
|
|
|
final_verif_data['PayloadLength'] = int(final_verif_data['PayloadLength']) + int(header_last_parsed['PayloadLength'])
|
2021-01-17 11:55:10 +00:00
|
|
|
else:
|
2020-12-23 22:38:16 +00:00
|
|
|
final_verif_data['PayloadLength'] = int(header_last_parsed['PayloadLength'])
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
if header_last_parsed['Type'] != 'regular':
|
|
|
|
raise Exception("Object Type is not 'regular'.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
if 'Split ID' in final_verif_data.keys():
|
|
|
|
if final_verif_data['Split ID'] != header_last_parsed['Split ID']:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"Object Split ID ({header_last_parsed['Split ID']}) is not expected ({final_verif_data['Split ID']}).")
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2020-12-23 22:38:16 +00:00
|
|
|
final_verif_data['Split ID'] = header_last_parsed['Split ID']
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
if 'ID List' in final_verif_data.keys():
|
|
|
|
final_verif_data['ID List'].append(header_last_parsed['ID'])
|
2021-01-17 11:55:10 +00:00
|
|
|
else:
|
2020-12-23 22:38:16 +00:00
|
|
|
final_verif_data['ID List'] = []
|
|
|
|
final_verif_data['ID List'].append(header_last_parsed['ID'])
|
|
|
|
|
2021-01-17 11:55:10 +00:00
|
|
|
if 'Split PreviousID' in header_last_parsed.keys():
|
2020-12-23 22:38:16 +00:00
|
|
|
header_virtual = head_object(private_key, cid, header_last_parsed['Split PreviousID'], '', '', '--raw')
|
2021-07-29 07:49:50 +00:00
|
|
|
parsed_header_virtual = _get_raw_split_information(header_virtual)
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
final_verif_data = _verify_child_link(private_key, cid, oid, parsed_header_virtual, final_verif_data)
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2020-12-23 22:38:16 +00:00
|
|
|
logger.info("Chain of the objects has been parsed from the last object ot the first.")
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
return final_verif_data
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
def _get_raw_split_information(header):
|
|
|
|
result_header = dict()
|
|
|
|
|
|
|
|
# Header - Constant attributes
|
|
|
|
|
|
|
|
# ID
|
|
|
|
m = re.search(r'^ID: (\w+)', header)
|
|
|
|
if m is not None:
|
|
|
|
result_header['ID'] = m.group(1)
|
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no ID was parsed from object header: \t{header}")
|
2021-07-29 07:49:50 +00:00
|
|
|
|
|
|
|
# Type
|
|
|
|
m = re.search(r'Type:\s+(\w+)', header)
|
|
|
|
if m is not None:
|
|
|
|
result_header['Type'] = m.group(1)
|
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no Type was parsed from object header: \t{header}")
|
2021-08-25 17:08:54 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
# PayloadLength
|
|
|
|
m = re.search(r'Size: (\d+)', header)
|
|
|
|
if m is not None:
|
|
|
|
result_header['PayloadLength'] = m.group(1)
|
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no PayloadLength was parsed from object header: \t{header}")
|
2021-07-29 07:49:50 +00:00
|
|
|
|
|
|
|
# Header - Optional attributes
|
|
|
|
|
|
|
|
# SplitID
|
|
|
|
m = re.search(r'Split ID:\s+([\w-]+)', header)
|
|
|
|
if m is not None:
|
|
|
|
result_header['Split ID'] = m.group(1)
|
|
|
|
|
|
|
|
# Split PreviousID
|
|
|
|
m = re.search(r'Split PreviousID:\s+(\w+)', header)
|
|
|
|
if m is not None:
|
|
|
|
result_header['Split PreviousID'] = m.group(1)
|
|
|
|
|
|
|
|
# Split ParentID
|
|
|
|
m = re.search(r'Split ParentID:\s+(\w+)', header)
|
|
|
|
if m is not None:
|
|
|
|
result_header['Split ParentID'] = m.group(1)
|
|
|
|
|
|
|
|
# Split ChildID list
|
|
|
|
found_objects = re.findall(r'Split ChildID:\s+(\w+)', header)
|
|
|
|
if found_objects:
|
|
|
|
result_header['Split ChildID'] = found_objects
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Result: {result_header}")
|
2021-07-29 07:49:50 +00:00
|
|
|
|
2021-08-25 17:08:54 +00:00
|
|
|
return result_header
|
2021-07-29 07:49:50 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
@keyword('Verify Head Tombstone')
|
|
|
|
def verify_head_tombstone(private_key: str, cid: str, oid_ts: str, oid: str, addr: str):
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'object head --cid {cid} --oid {oid_ts} --json'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
|
|
|
output = _cmd_run(object_cmd)
|
|
|
|
full_headers = json.loads(output)
|
|
|
|
logger.info(f"Output: {full_headers}")
|
|
|
|
|
|
|
|
# Header verification
|
|
|
|
header_cid = full_headers["header"]["containerID"]["value"]
|
|
|
|
if (_json_cli_decode(header_cid) == cid):
|
|
|
|
logger.info(f"Header CID is expected: {cid} ({header_cid} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header CID is not expected.")
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_owner = full_headers["header"]["ownerID"]["value"]
|
|
|
|
if (_json_cli_decode(header_owner) == addr):
|
|
|
|
logger.info(f"Header ownerID is expected: {addr} ({header_owner} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header ownerID is not expected.")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_type = full_headers["header"]["objectType"]
|
|
|
|
if (header_type == "TOMBSTONE"):
|
|
|
|
logger.info(f"Header Type is expected: {header_type}")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Type is not expected.")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_session_type = full_headers["header"]["sessionToken"]["body"]["object"]["verb"]
|
|
|
|
if (header_session_type == "DELETE"):
|
|
|
|
logger.info(f"Header Session Type is expected: {header_session_type}")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Session Type is not expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_session_cid = full_headers["header"]["sessionToken"]["body"]["object"]["address"]["containerID"]["value"]
|
|
|
|
if (_json_cli_decode(header_session_cid) == cid):
|
|
|
|
logger.info(f"Header ownerID is expected: {addr} ({header_session_cid} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Session CID is not expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
header_session_oid = full_headers["header"]["sessionToken"]["body"]["object"]["address"]["objectID"]["value"]
|
|
|
|
if (_json_cli_decode(header_session_oid) == oid):
|
|
|
|
logger.info(f"Header Session OID (deleted object) is expected: {oid} ({header_session_oid} in the output)")
|
|
|
|
else:
|
|
|
|
raise Exception("Header Session OID (deleted object) is not expected.")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
def _json_cli_decode(data: str):
|
|
|
|
return base58.b58encode(base64.b64decode(data)).decode("utf-8")
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Head object')
|
2021-01-17 11:55:10 +00:00
|
|
|
def head_object(private_key: str, cid: str, oid: str, bearer_token: str="",
|
2021-09-01 14:48:09 +00:00
|
|
|
user_headers:str="", options:str="", endpoint: str="", json_output: bool = False):
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2020-12-23 22:38:16 +00:00
|
|
|
if endpoint == "":
|
|
|
|
endpoint = NEOFS_ENDPOINT
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint} --wif {private_key} object '
|
2021-07-15 17:57:57 +00:00
|
|
|
f'head --cid {cid} --oid {oid} {bearer_token} {options} {"--json" if json_output else ""}'
|
2021-01-17 11:55:10 +00:00
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
|
|
|
output = _cmd_run(object_cmd)
|
|
|
|
|
|
|
|
if user_headers:
|
|
|
|
for key in user_headers.split(","):
|
|
|
|
if re.search(fr'({key})', output):
|
|
|
|
logger.info(f"User header {key} was parsed from command output")
|
|
|
|
else:
|
|
|
|
raise Exception(f"User header {key} was not found in the command output: \t{output}")
|
|
|
|
return output
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
@keyword('Get container attributes')
|
|
|
|
def get_container_attributes(private_key: str, cid: str, endpoint: str="", json_output: bool = False):
|
2021-07-20 15:24:52 +00:00
|
|
|
|
|
|
|
if endpoint == "":
|
|
|
|
endpoint = NEOFS_ENDPOINT
|
|
|
|
|
|
|
|
container_cmd = (
|
2021-09-07 08:04:14 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint} --wif {private_key} '
|
|
|
|
f'--cid {cid} container get {"--json" if json_output else ""}'
|
2021-07-20 15:24:52 +00:00
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {container_cmd}")
|
|
|
|
output = _cmd_run(container_cmd)
|
|
|
|
return output
|
2021-07-20 15:24:52 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
@keyword('Parse Object Virtual Raw Header')
|
|
|
|
def parse_object_virtual_raw_header(header: str):
|
|
|
|
result_header = dict()
|
|
|
|
m = re.search(r'Split ID:\s+([\w-]+)', header)
|
|
|
|
if m != None:
|
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['Split ID'] = m.group(1)
|
|
|
|
|
|
|
|
m = re.search(r'Linking object:\s+(\w+)', header)
|
|
|
|
if m != None:
|
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['Linking object'] = m.group(1)
|
|
|
|
|
|
|
|
m = re.search(r'Last object:\s+(\w+)', header)
|
|
|
|
if m != None:
|
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['Last object'] = m.group(1)
|
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Result: {result_header}")
|
2020-12-23 22:38:16 +00:00
|
|
|
return result_header
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
@keyword('Decode Object System Header Json')
|
|
|
|
def decode_object_system_header_json(header):
|
2020-08-13 22:09:00 +00:00
|
|
|
result_header = dict()
|
2021-07-29 07:49:50 +00:00
|
|
|
json_header = json.loads(header)
|
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
# Header - Constant attributes
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
# ID
|
2021-07-29 07:49:50 +00:00
|
|
|
ID = json_header["objectID"]["value"]
|
|
|
|
if ID is not None:
|
|
|
|
result_header["ID"] = _json_cli_decode(ID)
|
2020-08-19 22:31:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no ID was parsed from header: \t{header}" )
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
# CID
|
2021-07-29 07:49:50 +00:00
|
|
|
CID = json_header["header"]["containerID"]["value"]
|
|
|
|
if CID is not None:
|
|
|
|
result_header["CID"] = _json_cli_decode(CID)
|
2020-08-19 22:31:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no CID was parsed from header: \t{header}")
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
# OwnerID
|
|
|
|
OwnerID = json_header["header"]["ownerID"]["value"]
|
|
|
|
if OwnerID is not None:
|
|
|
|
result_header["OwnerID"] = _json_cli_decode(OwnerID)
|
2020-08-19 22:31:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no OwnerID was parsed from header: \t{header}")
|
2021-01-17 11:55:10 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
# CreatedAtEpoch
|
2021-07-29 07:49:50 +00:00
|
|
|
CreatedAtEpoch = json_header["header"]["creationEpoch"]
|
|
|
|
if CreatedAtEpoch is not None:
|
|
|
|
result_header["CreatedAtEpoch"] = CreatedAtEpoch
|
2020-12-23 22:38:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no CreatedAtEpoch was parsed from header: \t{header}")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
# PayloadLength
|
2021-07-29 07:49:50 +00:00
|
|
|
PayloadLength = json_header["header"]["payloadLength"]
|
|
|
|
if PayloadLength is not None:
|
|
|
|
result_header["PayloadLength"] = PayloadLength
|
2020-08-19 22:31:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no PayloadLength was parsed from header: \t{header}")
|
2021-08-25 17:08:54 +00:00
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
# HomoHash
|
2021-07-29 07:49:50 +00:00
|
|
|
HomoHash = json_header["header"]["homomorphicHash"]["sum"]
|
|
|
|
if HomoHash is not None:
|
|
|
|
result_header["HomoHash"] = _json_cli_decode(HomoHash)
|
2020-08-19 22:31:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no HomoHash was parsed from header: \t{header}")
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
# Checksum
|
2021-07-29 07:49:50 +00:00
|
|
|
Checksum = json_header["header"]["payloadHash"]["sum"]
|
|
|
|
if Checksum is not None:
|
|
|
|
Checksum_64_d = base64.b64decode(Checksum)
|
|
|
|
result_header["Checksum"] = binascii.hexlify(Checksum_64_d)
|
2020-08-19 22:31:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no Checksum was parsed from header: \t{header}")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
|
|
|
# Type
|
2021-07-29 07:49:50 +00:00
|
|
|
Type = json_header["header"]["objectType"]
|
|
|
|
if Type is not None:
|
|
|
|
result_header["Type"] = Type
|
2020-12-23 22:38:16 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no Type was parsed from header: \t{header}")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
|
|
|
# Header - Optional attributes
|
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
# Attributes
|
|
|
|
attributes = []
|
|
|
|
attribute_list = json_header["header"]["attributes"]
|
|
|
|
if attribute_list is not None:
|
|
|
|
for e in attribute_list:
|
|
|
|
values_list = list(e.values())
|
|
|
|
attribute = values_list[0] + '=' + values_list[1]
|
|
|
|
attributes.append(attribute)
|
|
|
|
result_header["Attributes"] = attributes
|
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no Attributes were parsed from header: \t{header}")
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
return result_header
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
@keyword('Decode Container Attributes Json')
|
|
|
|
def decode_container_attributes_json(header):
|
|
|
|
result_header = dict()
|
|
|
|
json_header = json.loads(header)
|
|
|
|
|
|
|
|
attributes = []
|
|
|
|
attribute_list = json_header["attributes"]
|
|
|
|
if attribute_list is not None:
|
|
|
|
for e in attribute_list:
|
|
|
|
values_list = list(e.values())
|
|
|
|
attribute = values_list[0] + '=' + values_list[1]
|
|
|
|
attributes.append(attribute)
|
|
|
|
result_header["Attributes"] = attributes
|
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no Attributes were parsed from header: \t{header}")
|
2021-07-29 07:49:50 +00:00
|
|
|
|
|
|
|
return result_header
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
|
2021-07-29 07:49:50 +00:00
|
|
|
@keyword('Verify Head Attribute')
|
|
|
|
def verify_head_attribute(header, attribute):
|
|
|
|
attribute_list = header["Attributes"]
|
|
|
|
if (attribute in attribute_list):
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Attribute {attribute} is found")
|
2021-07-29 07:49:50 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"Attribute {attribute} was not found")
|
|
|
|
|
2020-12-23 22:38:16 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Delete object')
|
2020-12-29 19:55:33 +00:00
|
|
|
def delete_object(private_key: str, cid: str, oid: str, bearer: str, options: str=""):
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = ""
|
|
|
|
if bearer:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer}"
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'object delete --cid {cid} --oid {oid} {bearer_token} {options}'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
|
|
|
output = _cmd_run(object_cmd)
|
|
|
|
tombstone = _parse_oid(output)
|
2021-09-10 12:44:40 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
return tombstone
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-08-25 15:18:26 +00:00
|
|
|
@keyword('Delete Container')
|
|
|
|
# TODO: make the error message about a non-found container more user-friendly https://github.com/nspcc-dev/neofs-contract/issues/121
|
|
|
|
def delete_container(cid: str, private_key: str):
|
|
|
|
|
|
|
|
deleteContainerCmd = (
|
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
|
|
|
f'container delete --cid {cid} --await'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {deleteContainerCmd}")
|
|
|
|
_cmd_run(deleteContainerCmd)
|
2021-08-25 15:18:26 +00:00
|
|
|
|
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
@keyword('Get file name')
|
|
|
|
def get_file_name(filepath):
|
|
|
|
filename = os.path.basename(filepath)
|
|
|
|
return filename
|
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Get file hash')
|
2021-03-29 10:18:24 +00:00
|
|
|
def get_file_hash(filename : str):
|
2020-07-01 02:28:31 +00:00
|
|
|
file_hash = _get_file_hash(filename)
|
|
|
|
return file_hash
|
2021-09-10 12:44:40 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
@keyword('Verify file hash')
|
|
|
|
def verify_file_hash(filename, expected_hash):
|
|
|
|
file_hash = _get_file_hash(filename)
|
|
|
|
if file_hash == expected_hash:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Hash is equal to expected: {file_hash}")
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"File hash '{file_hash}' is not equal to {expected_hash}")
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-05-04 09:27:43 +00:00
|
|
|
|
2021-02-10 18:53:26 +00:00
|
|
|
@keyword('Put object')
|
2021-01-17 11:55:10 +00:00
|
|
|
def put_object(private_key: str, path: str, cid: str, bearer: str, user_headers: str,
|
|
|
|
endpoint: str="", options: str="" ):
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Going to put the object")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2020-12-04 12:28:59 +00:00
|
|
|
if not endpoint:
|
2021-02-08 05:05:17 +00:00
|
|
|
endpoint = random.sample(_get_storage_nodes(), 1)[0]
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
if user_headers:
|
|
|
|
user_headers = f"--attributes {user_headers}"
|
2021-04-06 14:18:48 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
if bearer:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer = f"--bearer {bearer}"
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
putobject_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint} --wif {private_key} object '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'put --file {path} --cid {cid} {bearer} {user_headers} {options}'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {putobject_cmd}")
|
|
|
|
output = _cmd_run(putobject_cmd)
|
|
|
|
oid = _parse_oid(output)
|
|
|
|
return oid
|
2020-11-26 08:32:18 +00:00
|
|
|
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
@keyword('Get Nodes Log Latest Timestamp')
|
|
|
|
def get_logs_latest_timestamp():
|
|
|
|
"""
|
|
|
|
Keyword return:
|
|
|
|
nodes_logs_time -- structure (dict) of nodes container name (key) and latest logs timestamp (value)
|
|
|
|
"""
|
|
|
|
nodes = _get_storage_nodes()
|
|
|
|
client_api = docker.APIClient()
|
|
|
|
|
|
|
|
nodes_logs_time = dict()
|
|
|
|
|
|
|
|
for node in nodes:
|
|
|
|
container = node.split('.')[0]
|
|
|
|
log_line = client_api.logs(container, tail=1)
|
|
|
|
|
|
|
|
m = re.search(r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z)', str(log_line))
|
|
|
|
if m != None:
|
|
|
|
timestamp = m.group(1)
|
|
|
|
|
|
|
|
timestamp_date = datetime.fromisoformat(timestamp[:-1])
|
|
|
|
|
|
|
|
nodes_logs_time[container] = timestamp_date
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Latest logs timestamp list: {nodes_logs_time}")
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
return nodes_logs_time
|
|
|
|
|
|
|
|
|
2021-04-26 10:30:40 +00:00
|
|
|
@keyword('Find in Nodes Log')
|
2021-02-08 05:05:17 +00:00
|
|
|
def find_in_nodes_Log(line: str, nodes_logs_time: dict):
|
|
|
|
|
|
|
|
client_api = docker.APIClient()
|
|
|
|
container_names = list()
|
|
|
|
|
|
|
|
for docker_container in client_api.containers():
|
|
|
|
container_names.append(docker_container['Names'][0][1:])
|
|
|
|
|
|
|
|
global_count = 0
|
|
|
|
|
|
|
|
for container in nodes_logs_time.keys():
|
|
|
|
# check if container exists
|
|
|
|
if container in container_names:
|
|
|
|
# Get log since timestamp
|
|
|
|
timestamp_date = nodes_logs_time[container]
|
|
|
|
log_lines = client_api.logs(container, since=timestamp_date)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Timestamp since: {timestamp_date}")
|
2021-02-08 05:05:17 +00:00
|
|
|
found_count = len(re.findall(line, log_lines.decode("utf-8") ))
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Node {container} log - found counter: {found_count}")
|
2021-02-08 05:05:17 +00:00
|
|
|
global_count += found_count
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-02-08 05:05:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Container {container} has not been found.")
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
if global_count > 0:
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Expected line '{line}' has been found in the logs.")
|
2021-02-08 05:05:17 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"Expected line '{line}' has not been found in the logs.")
|
2021-02-08 05:05:17 +00:00
|
|
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
@keyword('Get Range Hash')
|
2021-01-17 11:55:10 +00:00
|
|
|
def get_range_hash(private_key: str, cid: str, oid: str, bearer_token: str,
|
|
|
|
range_cut: str, options: str=""):
|
2020-11-30 10:33:05 +00:00
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'object hash --cid {cid} --oid {oid} --range {range_cut} '
|
|
|
|
f'{bearer_token} {options}'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
|
|
|
_cmd_run(object_cmd)
|
2021-09-10 12:44:40 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2021-02-10 18:53:26 +00:00
|
|
|
@keyword('Get object')
|
2021-01-17 11:55:10 +00:00
|
|
|
def get_object(private_key: str, cid: str, oid: str, bearer_token: str,
|
|
|
|
write_object: str, endpoint: str="", options: str="" ):
|
2020-12-04 12:28:59 +00:00
|
|
|
|
2021-06-01 12:07:31 +00:00
|
|
|
file_path = f"{ASSETS_DIR}/{write_object}"
|
2021-03-29 10:18:24 +00:00
|
|
|
|
2021-09-20 14:17:15 +00:00
|
|
|
logger.info("Going to get the object")
|
2020-12-04 12:28:59 +00:00
|
|
|
if not endpoint:
|
2021-02-08 05:05:17 +00:00
|
|
|
endpoint = random.sample(_get_storage_nodes(), 1)[0]
|
|
|
|
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2020-11-18 15:15:57 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {endpoint} --wif {private_key} '
|
2021-03-29 10:18:24 +00:00
|
|
|
f'object get --cid {cid} --oid {oid} --file {file_path} {bearer_token} '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'{options}'
|
|
|
|
)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
|
|
|
_cmd_run(object_cmd)
|
2021-03-29 10:18:24 +00:00
|
|
|
return file_path
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-02-10 18:53:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Put Storagegroup')
|
2021-04-02 14:29:41 +00:00
|
|
|
def put_storagegroup(private_key: str, cid: str, bearer_token: str="", *oid_list):
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-04-26 10:30:40 +00:00
|
|
|
cmd_oid_line = ",".join(oid_list)
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2021-04-02 14:29:41 +00:00
|
|
|
|
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} storagegroup '
|
2021-04-02 14:29:41 +00:00
|
|
|
f'put --cid {cid} --members {cmd_oid_line} {bearer_token}'
|
2021-02-10 18:53:26 +00:00
|
|
|
)
|
2021-04-02 14:29:41 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
2021-09-01 14:48:09 +00:00
|
|
|
output = _cmd_run(object_cmd)
|
|
|
|
oid = _parse_oid(output)
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
return oid
|
2021-02-10 18:53:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('List Storagegroup')
|
2021-04-02 14:29:41 +00:00
|
|
|
def list_storagegroup(private_key: str, cid: str, bearer_token: str="", *expected_list):
|
|
|
|
|
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-04-26 10:30:40 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} '
|
2021-04-02 14:29:41 +00:00
|
|
|
f'storagegroup list --cid {cid} {bearer_token}'
|
|
|
|
)
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
2021-09-01 14:48:09 +00:00
|
|
|
output = _cmd_run(object_cmd)
|
|
|
|
found_objects = re.findall(r'(\w{43,44})', output)
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
if expected_list:
|
|
|
|
if sorted(found_objects) == sorted(expected_list):
|
|
|
|
logger.info(f"Found storage group list '{found_objects}' is equal for expected list '{expected_list}'")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Found storage group '{found_objects}' is not equal to expected list '{expected_list}'")
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
return found_objects
|
2021-02-10 18:53:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Get Storagegroup')
|
2021-04-02 14:29:41 +00:00
|
|
|
def get_storagegroup(private_key: str, cid: str, oid: str, bearer_token: str, expected_size, *expected_objects_list):
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2021-04-02 14:29:41 +00:00
|
|
|
|
2021-06-30 21:18:37 +00:00
|
|
|
object_cmd = f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} storagegroup get --cid {cid} --id {oid} {bearer_token}'
|
2021-04-02 14:29:41 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
2021-09-01 14:48:09 +00:00
|
|
|
output = _cmd_run(object_cmd)
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
if expected_size:
|
|
|
|
if re.search(fr'Group size: {expected_size}', output):
|
|
|
|
logger.info(f"Group size {expected_size} has been found in the output")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Group size {expected_size} has not been found in the output")
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
found_objects = re.findall(r'\s(\w{43,44})\s', output)
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
if expected_objects_list:
|
|
|
|
if sorted(found_objects) == sorted(expected_objects_list):
|
|
|
|
logger.info(f"Found objects list '{found_objects}' is equal for expected list '{expected_objects_list}'")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Found object list '{found_objects}' is not equal to expected list '{expected_objects_list}'")
|
2021-02-10 18:53:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Delete Storagegroup')
|
2021-04-02 14:29:41 +00:00
|
|
|
def delete_storagegroup(private_key: str, cid: str, oid: str, bearer_token: str=""):
|
|
|
|
|
|
|
|
if bearer_token:
|
2021-09-10 12:44:40 +00:00
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-04-02 14:29:41 +00:00
|
|
|
object_cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {NEOFS_ENDPOINT} --wif {private_key} storagegroup '
|
2021-04-02 14:29:41 +00:00
|
|
|
f'delete --cid {cid} --id {oid} {bearer_token}'
|
2021-02-10 18:53:26 +00:00
|
|
|
)
|
2021-04-02 14:29:41 +00:00
|
|
|
logger.info(f"Cmd: {object_cmd}")
|
2021-09-01 14:48:09 +00:00
|
|
|
output = _cmd_run(object_cmd)
|
2021-02-10 18:53:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
m = re.search(r'Tombstone: ([a-zA-Z0-9-]+)', output)
|
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
oid = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception(f"no Tombstone ID was parsed from command output: \t{output}")
|
|
|
|
return oid
|
2021-02-10 18:53:26 +00:00
|
|
|
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
def _get_file_hash(filename):
|
|
|
|
blocksize = 65536
|
|
|
|
hash = hashlib.md5()
|
|
|
|
with open(filename, "rb") as f:
|
|
|
|
for block in iter(lambda: f.read(blocksize), b""):
|
|
|
|
hash.update(block)
|
2021-09-01 14:48:09 +00:00
|
|
|
logger.info(f"Hash: {hash.hexdigest()}")
|
2020-07-01 02:28:31 +00:00
|
|
|
return hash.hexdigest()
|
|
|
|
|
|
|
|
def _find_cid(output: str, cid: str):
|
|
|
|
"""
|
|
|
|
This function parses CID from given CLI output.
|
|
|
|
Parameters:
|
|
|
|
- output: a string with command run output
|
|
|
|
"""
|
2021-09-01 14:48:09 +00:00
|
|
|
if re.search(fr'({cid})', output):
|
|
|
|
logger.info(f"CID {cid} was parsed from command output: \t{output}")
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2021-09-01 14:48:09 +00:00
|
|
|
raise Exception(f"no CID {cid} was parsed from command output: \t{output}")
|
2020-07-01 02:28:31 +00:00
|
|
|
return cid
|
|
|
|
|
2021-05-04 09:27:43 +00:00
|
|
|
def _parse_oid(input_str: str):
|
2020-07-01 02:28:31 +00:00
|
|
|
"""
|
2021-05-04 09:27:43 +00:00
|
|
|
This function parses OID from given CLI output. The input string we
|
|
|
|
expect:
|
|
|
|
Object successfully stored
|
|
|
|
ID: 4MhrLA7RXTBXCsaNnbahYVAPuoQdiUPuyNEWnywvoSEs
|
|
|
|
CID: HeZu2DXBuPve6HXbuHZx64knS7KcGtfSj2L59Li72kkg
|
|
|
|
We want to take 'ID' value from the string.
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
Parameters:
|
2021-05-04 09:27:43 +00:00
|
|
|
- input_str: a string with command run output
|
2020-07-01 02:28:31 +00:00
|
|
|
"""
|
2021-05-04 09:27:43 +00:00
|
|
|
try:
|
|
|
|
# taking second string from command output
|
|
|
|
snd_str = input_str.split('\n')[1]
|
|
|
|
except:
|
|
|
|
logger.error(f"Got empty input: {input_str}")
|
|
|
|
splitted = snd_str.split(": ")
|
|
|
|
if len(splitted) != 2:
|
|
|
|
raise Exception(f"no OID was parsed from command output: \t{snd_str}")
|
|
|
|
return splitted[1]
|
|
|
|
|
|
|
|
def _parse_cid(input_str: str):
|
2020-07-01 02:28:31 +00:00
|
|
|
"""
|
2021-05-04 09:27:43 +00:00
|
|
|
This function parses CID from given CLI output. The input string we
|
|
|
|
expect:
|
|
|
|
container ID: 2tz86kVTDpJxWHrhw3h6PbKMwkLtBEwoqhHQCKTre1FN
|
|
|
|
awaiting...
|
|
|
|
container has been persisted on sidechain
|
|
|
|
We want to take 'container ID' value from the string.
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
Parameters:
|
2021-05-04 09:27:43 +00:00
|
|
|
- input_str: a string with command run output
|
2020-07-01 02:28:31 +00:00
|
|
|
"""
|
2021-05-04 09:27:43 +00:00
|
|
|
try:
|
|
|
|
# taking first string from command output
|
|
|
|
fst_str = input_str.split('\n')[0]
|
|
|
|
except:
|
|
|
|
logger.error(f"Got empty output: {input_str}")
|
|
|
|
splitted = fst_str.split(": ")
|
|
|
|
if len(splitted) != 2:
|
|
|
|
raise Exception(f"no CID was parsed from command output: \t{fst_str}")
|
|
|
|
return splitted[1]
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2021-02-08 05:05:17 +00:00
|
|
|
def _get_storage_nodes():
|
|
|
|
# TODO: fix to get netmap from neofs-cli
|
2021-07-20 11:10:13 +00:00
|
|
|
logger.info(f"Storage nodes: {NEOFS_NETMAP}")
|
2020-12-16 11:19:24 +00:00
|
|
|
return NEOFS_NETMAP
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
def _search_object(node:str, private_key: str, cid:str, oid: str):
|
2020-11-29 03:44:38 +00:00
|
|
|
if oid:
|
|
|
|
oid_cmd = "--oid %s" % oid
|
2021-01-17 11:55:10 +00:00
|
|
|
Cmd = (
|
2021-06-30 21:18:37 +00:00
|
|
|
f'{NEOFS_CLI_EXEC} --rpc-endpoint {node} --wif {private_key} --ttl 1 '
|
2021-01-17 11:55:10 +00:00
|
|
|
f'object search --root --cid {cid} {oid_cmd}'
|
|
|
|
)
|
2021-08-25 15:18:26 +00:00
|
|
|
|
2021-09-01 14:48:09 +00:00
|
|
|
output = _cmd_run(Cmd)
|
|
|
|
if re.search(fr'{oid}', output):
|
|
|
|
return oid
|
|
|
|
else:
|
|
|
|
logger.info("Object is not found.")
|
|
|
|
|
|
|
|
if re.search(r'local node is outside of object placement', output):
|
|
|
|
logger.info("Server is not presented in container.")
|
|
|
|
elif ( re.search(r'timed out after 30 seconds', output) or re.search(r'no route to host', output) or re.search(r'i/o timeout', output)):
|
|
|
|
logger.warn("Node is unavailable")
|