2020-07-01 02:28:31 +00:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import subprocess
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import binascii
|
|
|
|
import uuid
|
|
|
|
import hashlib
|
|
|
|
from robot.api.deco import keyword
|
|
|
|
from robot.api import logger
|
2020-11-30 10:43:19 +00:00
|
|
|
import random
|
2020-12-16 11:19:24 +00:00
|
|
|
import base64
|
|
|
|
import base58
|
|
|
|
import docker
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-30 10:33:05 +00:00
|
|
|
if os.getenv('ROBOT_PROFILE') == 'selectel_smoke':
|
|
|
|
from selectelcdn_smoke_vars import (NEOGO_CLI_PREFIX, NEO_MAINNET_ENDPOINT,
|
2020-12-16 11:19:24 +00:00
|
|
|
NEOFS_NEO_API_ENDPOINT, NEOFS_ENDPOINT, NEOFS_NETMAP)
|
2020-11-30 10:33:05 +00:00
|
|
|
else:
|
|
|
|
from neofs_int_vars import (NEOGO_CLI_PREFIX, NEO_MAINNET_ENDPOINT,
|
2020-12-16 11:19:24 +00:00
|
|
|
NEOFS_NEO_API_ENDPOINT, NEOFS_ENDPOINT, NEOFS_NETMAP)
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
ROBOT_AUTO_KEYWORDS = False
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
CLI_PREFIX = ""
|
|
|
|
|
|
|
|
@keyword('Form WIF from String')
|
|
|
|
def form_wif_from_string(private_key: str):
|
|
|
|
wif = ""
|
2020-12-01 21:48:20 +00:00
|
|
|
Cmd = f'neofs-cli util keyer {private_key}'
|
2020-11-18 15:15:57 +00:00
|
|
|
logger.info("Cmd: %s" % Cmd)
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=150, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % output)
|
|
|
|
|
|
|
|
m = re.search(r'WIF\s+(\w+)', output)
|
2020-11-30 10:33:05 +00:00
|
|
|
if m.start() != m.end():
|
2020-11-18 15:15:57 +00:00
|
|
|
wif = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("Can not get WIF.")
|
|
|
|
|
|
|
|
return wif
|
|
|
|
|
|
|
|
|
|
|
|
@keyword('Get ScripHash')
|
|
|
|
def get_scripthash(privkey: str):
|
|
|
|
scripthash = ""
|
|
|
|
Cmd = f'neofs-cli util keyer -u {privkey}'
|
|
|
|
logger.info("Cmd: %s" % Cmd)
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=150, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % output)
|
|
|
|
|
|
|
|
m = re.search(r'ScriptHash3.0 (\w+)', output)
|
2020-11-30 10:33:05 +00:00
|
|
|
if m.start() != m.end():
|
2020-11-18 15:15:57 +00:00
|
|
|
scripthash = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("Can not get ScriptHash.")
|
|
|
|
|
|
|
|
return scripthash
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-11-29 03:44:38 +00:00
|
|
|
@keyword('Stop nodes')
|
|
|
|
def stop_nodes(down_num: int, *nodes_list):
|
|
|
|
|
|
|
|
# select nodes to stop from list
|
|
|
|
stop_nodes = random.sample(nodes_list, down_num)
|
2020-12-16 11:19:24 +00:00
|
|
|
|
2020-11-29 03:44:38 +00:00
|
|
|
for node in stop_nodes:
|
|
|
|
m = re.search(r'(s\d+).', node)
|
|
|
|
node = m.group(1)
|
|
|
|
|
2020-12-16 11:19:24 +00:00
|
|
|
client = docker.APIClient()
|
|
|
|
client.stop(node)
|
2020-11-29 03:44:38 +00:00
|
|
|
|
|
|
|
return stop_nodes
|
|
|
|
|
|
|
|
|
|
|
|
@keyword('Start nodes')
|
|
|
|
def start_nodes(*nodes_list):
|
|
|
|
|
|
|
|
for node in nodes_list:
|
|
|
|
m = re.search(r'(s\d+).', node)
|
|
|
|
node = m.group(1)
|
2020-12-16 11:19:24 +00:00
|
|
|
client = docker.APIClient()
|
|
|
|
client.start(node)
|
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
@keyword('Get nodes with object')
|
2020-11-29 03:44:38 +00:00
|
|
|
def get_nodes_with_object(private_key: str, cid: str, oid: str):
|
2020-07-14 00:05:22 +00:00
|
|
|
storage_nodes = _get_storage_nodes(private_key)
|
|
|
|
copies = 0
|
|
|
|
|
|
|
|
nodes_list = []
|
|
|
|
|
|
|
|
for node in storage_nodes:
|
2020-09-01 03:23:17 +00:00
|
|
|
search_res = _search_object(node, private_key, cid, oid)
|
|
|
|
if search_res:
|
2020-11-18 15:15:57 +00:00
|
|
|
if re.search(r'(%s)' % (oid), search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
nodes_list.append(node)
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
logger.info("Nodes with object: %s" % nodes_list)
|
2020-11-29 03:44:38 +00:00
|
|
|
return nodes_list
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Get nodes without object')
|
2020-11-29 03:44:38 +00:00
|
|
|
def get_nodes_without_object(private_key: str, cid: str, oid: str):
|
2020-07-14 00:05:22 +00:00
|
|
|
storage_nodes = _get_storage_nodes(private_key)
|
|
|
|
copies = 0
|
|
|
|
|
|
|
|
nodes_list = []
|
|
|
|
|
|
|
|
for node in storage_nodes:
|
2020-09-01 03:23:17 +00:00
|
|
|
search_res = _search_object(node, private_key, cid, oid)
|
|
|
|
if search_res:
|
2020-11-18 15:15:57 +00:00
|
|
|
if not re.search(r'(%s)' % (oid), search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
nodes_list.append(node)
|
|
|
|
else:
|
2020-07-14 00:05:22 +00:00
|
|
|
nodes_list.append(node)
|
|
|
|
|
2020-11-29 03:44:38 +00:00
|
|
|
logger.info("Nodes without object: %s" % nodes_list)
|
|
|
|
return nodes_list
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
@keyword('Validate storage policy for object')
|
2020-11-18 15:15:57 +00:00
|
|
|
def validate_storage_policy_for_object(private_key: str, expected_copies: int, cid, oid, *expected_node_list):
|
2020-07-01 02:28:31 +00:00
|
|
|
storage_nodes = _get_storage_nodes(private_key)
|
|
|
|
copies = 0
|
2020-09-01 03:23:17 +00:00
|
|
|
found_nodes = []
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
for node in storage_nodes:
|
2020-09-01 03:23:17 +00:00
|
|
|
search_res = _search_object(node, private_key, cid, oid)
|
|
|
|
if search_res:
|
2020-11-18 15:15:57 +00:00
|
|
|
if re.search(r'(%s)' % (oid), search_res):
|
2020-09-01 03:23:17 +00:00
|
|
|
copies += 1
|
|
|
|
found_nodes.append(node)
|
|
|
|
|
|
|
|
if copies != expected_copies:
|
|
|
|
raise Exception("Object copies is not match storage policy. Found: %s, expexted: %s." % (copies, expected_copies))
|
|
|
|
else:
|
|
|
|
logger.info("Found copies: %s, expected: %s" % (copies, expected_copies))
|
|
|
|
|
|
|
|
logger.info("Found nodes: %s" % found_nodes)
|
|
|
|
|
|
|
|
if expected_node_list:
|
|
|
|
if sorted(found_nodes) == sorted(expected_node_list):
|
|
|
|
logger.info("Found node list '{}' is equal for expected list '{}'".format(found_nodes, expected_node_list))
|
|
|
|
else:
|
|
|
|
raise Exception("Found node list '{}' is not equal to expected list '{}'".format(found_nodes, expected_node_list))
|
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-08-13 22:09:00 +00:00
|
|
|
@keyword('Get eACL')
|
2020-12-01 21:48:20 +00:00
|
|
|
def get_eacl(private_key: str, cid: str):
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container get-eacl --cid {cid}'
|
2020-08-13 22:09:00 +00:00
|
|
|
logger.info("Cmd: %s" % Cmd)
|
2020-11-26 08:32:18 +00:00
|
|
|
try:
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=150, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % output)
|
2020-11-30 10:43:19 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
return output
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
if re.search(r'extended ACL table is not set for this container', e.output):
|
|
|
|
logger.info("Server is not presented in container.")
|
|
|
|
else:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
|
|
|
2020-11-30 10:43:19 +00:00
|
|
|
|
2020-08-13 22:09:00 +00:00
|
|
|
|
|
|
|
@keyword('Set eACL')
|
2020-11-26 08:32:18 +00:00
|
|
|
def set_eacl(private_key: str, cid: str, eacl: str, add_keys: str = ""):
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container set-eacl --cid {cid} --table {eacl} {add_keys}'
|
2020-08-13 22:09:00 +00:00
|
|
|
logger.info("Cmd: %s" % Cmd)
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=150, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % output)
|
|
|
|
|
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
@keyword('Form BearerToken file for all ops')
|
2020-11-30 10:43:19 +00:00
|
|
|
def form_bearertoken_file_for_all_ops(file_name: str, private_key: str, cid: str, action: str, target_role: str, lifetime_exp: str ):
|
2020-11-26 08:32:18 +00:00
|
|
|
|
|
|
|
eacl = get_eacl(private_key, cid)
|
|
|
|
input_records = ""
|
2020-12-16 11:19:24 +00:00
|
|
|
|
|
|
|
cid_base58_b = base58.b58decode(cid)
|
|
|
|
cid_base64 = base64.b64encode(cid_base58_b).decode("utf-8")
|
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
if eacl:
|
|
|
|
res_json = re.split(r'[\s\n]+\][\s\n]+\}[\s\n]+Signature:', eacl)
|
|
|
|
records = re.split(r'"records": \[', res_json[0])
|
|
|
|
input_records = ",\n" + records[1]
|
|
|
|
|
|
|
|
myjson = """
|
|
|
|
{
|
|
|
|
"body": {
|
|
|
|
"eaclTable": {
|
|
|
|
"containerID": {
|
2020-12-16 11:19:24 +00:00
|
|
|
"value": \"""" + str(cid_base64) + """"
|
2020-11-26 08:32:18 +00:00
|
|
|
},
|
|
|
|
"records": [
|
|
|
|
{
|
|
|
|
"operation": "GET",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "PUT",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "HEAD",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "DELETE",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "SEARCH",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "GETRANGE",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "GETRANGEHASH",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}""" + input_records + """
|
|
|
|
]
|
|
|
|
},
|
|
|
|
"lifetime": {
|
|
|
|
"exp": \"""" + lifetime_exp + """",
|
|
|
|
"nbf": "1",
|
|
|
|
"iat": "0"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with open(file_name,'w') as out:
|
|
|
|
out.write(myjson)
|
|
|
|
logger.info("Output: %s" % myjson)
|
|
|
|
|
|
|
|
# Sign bearer token
|
|
|
|
Cmd = f'neofs-cli util sign bearer-token --from {file_name} --to {file_name} --key {private_key} --json'
|
|
|
|
logger.info("Cmd: %s" % Cmd)
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
try:
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % str(output))
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
return file_name
|
|
|
|
|
2020-11-30 10:43:19 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
|
|
|
|
@keyword('Form BearerToken file filter for all ops')
|
2020-11-30 10:43:19 +00:00
|
|
|
def form_bearertoken_file_filter_for_all_ops(file_name: str, private_key: str, cid: str, action: str, target_role: str, lifetime_exp: str, matchType: str, key: str, value: str):
|
2020-11-26 08:32:18 +00:00
|
|
|
|
2020-11-27 00:33:11 +00:00
|
|
|
# SEARCH should be allowed without filters to use GET, HEAD, DELETE, and SEARCH? Need to clarify.
|
2020-11-26 08:32:18 +00:00
|
|
|
|
|
|
|
eacl = get_eacl(private_key, cid)
|
2020-12-16 11:19:24 +00:00
|
|
|
|
|
|
|
cid_base58_b = base58.b58decode(cid)
|
|
|
|
cid_base64 = base64.b64encode(cid_base58_b).decode("utf-8")
|
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
input_records = ""
|
|
|
|
if eacl:
|
|
|
|
res_json = re.split(r'[\s\n]+\][\s\n]+\}[\s\n]+Signature:', eacl)
|
|
|
|
records = re.split(r'"records": \[', res_json[0])
|
|
|
|
input_records = ",\n" + records[1]
|
|
|
|
|
|
|
|
myjson = """
|
|
|
|
{
|
|
|
|
"body": {
|
|
|
|
"eaclTable": {
|
|
|
|
"containerID": {
|
2020-12-16 11:19:24 +00:00
|
|
|
"value": \"""" + str(cid_base64) + """"
|
2020-11-26 08:32:18 +00:00
|
|
|
},
|
|
|
|
"records": [
|
|
|
|
{
|
|
|
|
"operation": "GET",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "PUT",
|
|
|
|
"action": \"""" + action + """",
|
2020-11-27 00:33:11 +00:00
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
2020-11-26 08:32:18 +00:00
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "HEAD",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "DELETE",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "SEARCH",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "GETRANGE",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"operation": "GETRANGEHASH",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}""" + input_records + """
|
|
|
|
]
|
|
|
|
},
|
|
|
|
"lifetime": {
|
|
|
|
"exp": \"""" + lifetime_exp + """",
|
|
|
|
"nbf": "1",
|
|
|
|
"iat": "0"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with open(file_name,'w') as out:
|
|
|
|
out.write(myjson)
|
|
|
|
logger.info("Output: %s" % myjson)
|
|
|
|
|
|
|
|
# Sign bearer token
|
|
|
|
Cmd = f'neofs-cli util sign bearer-token --from {file_name} --to {file_name} --key {private_key} --json'
|
|
|
|
logger.info("Cmd: %s" % Cmd)
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
try:
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % str(output))
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
return file_name
|
2020-08-19 22:31:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
@keyword('Form eACL json file')
|
2020-11-30 10:43:19 +00:00
|
|
|
def form_eacl_json_file(file_name: str, operation: str, action: str, matchType: str, key: str, value: str, target_role: str):
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
myjson = """
|
|
|
|
{
|
|
|
|
"records": [
|
|
|
|
{
|
|
|
|
"operation": \"""" + operation + """",
|
|
|
|
"action": \"""" + action + """",
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"headerType": "OBJECT",
|
|
|
|
"matchType": \"""" + matchType + """",
|
|
|
|
"key": \"""" + key + """",
|
|
|
|
"value": \"""" + value + """"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"targets": [
|
|
|
|
{
|
|
|
|
"role": \"""" + target_role + """"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with open(file_name,'w') as out:
|
|
|
|
out.write(myjson)
|
|
|
|
logger.info("Output: %s" % myjson)
|
2020-08-19 22:31:16 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
return file_name
|
2020-08-13 22:09:00 +00:00
|
|
|
|
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Get Range')
|
2020-11-26 08:32:18 +00:00
|
|
|
def get_range(private_key: str, cid: str, oid: str, range_file: str, bearer: str, range_cut: str):
|
2020-07-14 00:05:22 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = ""
|
2020-11-30 10:33:05 +00:00
|
|
|
if bearer:
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = f"--bearer {bearer}"
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-11-26 08:32:18 +00:00
|
|
|
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} object range --cid {cid} --oid {oid} {bearer_token} --range {range_cut} --file {range_file} '
|
2020-07-14 00:05:22 +00:00
|
|
|
logger.info("Cmd: %s" % Cmd)
|
2020-11-26 08:32:18 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=150, shell=True)
|
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % str(output))
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-07-14 00:05:22 +00:00
|
|
|
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Create container')
|
2020-11-18 15:15:57 +00:00
|
|
|
def create_container(private_key: str, basic_acl:str="", rule:str="REP 2 IN X CBF 1 SELECT 2 FROM * AS X"):
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-14 00:05:22 +00:00
|
|
|
if basic_acl != "":
|
2020-11-18 15:15:57 +00:00
|
|
|
basic_acl = "--basic-acl " + basic_acl
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
createContainerCmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container create --policy "{rule}" {basic_acl} --await'
|
2020-07-14 00:05:22 +00:00
|
|
|
logger.info("Cmd: %s" % createContainerCmd)
|
2020-07-01 02:28:31 +00:00
|
|
|
complProc = subprocess.run(createContainerCmd, check=True, universal_newlines=True,
|
2020-12-16 11:19:24 +00:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=300, shell=True)
|
2020-07-01 02:28:31 +00:00
|
|
|
output = complProc.stdout
|
|
|
|
logger.info("Output: %s" % output)
|
|
|
|
cid = _parse_cid(output)
|
|
|
|
logger.info("Created container %s with rule '%s'" % (cid, rule))
|
|
|
|
|
|
|
|
return cid
|
|
|
|
|
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
@keyword('Container List')
|
|
|
|
def container_list(private_key: str):
|
|
|
|
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container list'
|
|
|
|
logger.info("Cmd: %s" % Cmd)
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
|
|
|
|
container_list = re.findall(r'(\w{43,44})', complProc.stdout)
|
|
|
|
|
|
|
|
logger.info("Containers list: %s" % container_list)
|
|
|
|
|
|
|
|
return container_list
|
|
|
|
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Container Existing')
|
2020-11-18 15:15:57 +00:00
|
|
|
def container_existing(private_key: str, cid: str):
|
|
|
|
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container list'
|
|
|
|
logger.info("Cmd: %s" % Cmd)
|
2020-07-01 02:28:31 +00:00
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
_find_cid(complProc.stdout, cid)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
@keyword('Generate file of bytes')
|
|
|
|
def generate_file_of_bytes(size):
|
|
|
|
"""
|
|
|
|
generate big binary file with the specified size in bytes
|
|
|
|
:param size: the size in bytes, can be declared as 6e+6 for example
|
|
|
|
:return:string filename
|
|
|
|
"""
|
|
|
|
|
|
|
|
size = int(float(size))
|
|
|
|
|
|
|
|
filename = str(uuid.uuid4())
|
|
|
|
with open('%s'%filename, 'wb') as fout:
|
|
|
|
fout.write(os.urandom(size))
|
|
|
|
|
|
|
|
logger.info("Random binary file with size %s bytes has been generated." % str(size))
|
2020-11-30 10:33:05 +00:00
|
|
|
return os.path.abspath(os.getcwd()) + '/' + filename
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Search object')
|
2020-11-18 15:15:57 +00:00
|
|
|
def search_object(private_key: str, cid: str, keys: str, bearer: str, filters: str, *expected_objects_list ):
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = ""
|
2020-11-30 10:33:05 +00:00
|
|
|
if bearer:
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = f"--bearer {bearer}"
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
if filters:
|
|
|
|
filters = f"--filters {filters}"
|
|
|
|
|
|
|
|
ObjectCmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} object search {keys} --cid {cid} {bearer_token} {filters}'
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
found_objects = re.findall(r'(\w{43,44})', complProc.stdout)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
if expected_objects_list:
|
2020-07-01 02:28:31 +00:00
|
|
|
if sorted(found_objects) == sorted(expected_objects_list):
|
|
|
|
logger.info("Found objects list '{}' is equal for expected list '{}'".format(found_objects, expected_objects_list))
|
|
|
|
else:
|
|
|
|
raise Exception("Found object list '{}' is not equal to expected list '{}'".format(found_objects, expected_objects_list))
|
2020-12-11 11:35:02 +00:00
|
|
|
|
|
|
|
return found_objects
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
2020-11-30 10:33:05 +00:00
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
|
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
'''
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Verify Head Tombstone')
|
2020-11-18 15:15:57 +00:00
|
|
|
def verify_head_tombstone(private_key: str, cid: str, oid: str):
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
ObjectCmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} object head --cid {cid} --oid {oid} --full-headers'
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
|
|
|
|
if re.search(r'Type=Tombstone\s+Value=MARKED', complProc.stdout):
|
|
|
|
logger.info("Tombstone header 'Type=Tombstone Value=MARKED' was parsed from command output")
|
|
|
|
else:
|
|
|
|
raise Exception("Tombstone header 'Type=Tombstone Value=MARKED' was not found in the command output: \t%s" % (complProc.stdout))
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
|
|
|
|
|
|
@keyword('Verify linked objects')
|
|
|
|
def verify_linked_objects(private_key: bytes, cid: str, oid: str, payload_size: float):
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
payload_size = int(float(payload_size))
|
|
|
|
|
|
|
|
# Get linked objects from first
|
|
|
|
postfix = f'object head --cid {cid} --oid {oid} --full-headers'
|
|
|
|
output = _exec_cli_cmd(private_key, postfix)
|
|
|
|
child_obj_list = []
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
for m in re.finditer(r'Type=Child ID=([\w-]+)', output):
|
|
|
|
child_obj_list.append(m.group(1))
|
|
|
|
|
|
|
|
if not re.search(r'PayloadLength=0', output):
|
|
|
|
raise Exception("Payload is not equal to zero in the parent object %s." % obj)
|
|
|
|
|
|
|
|
if not child_obj_list:
|
|
|
|
raise Exception("Child objects was not found.")
|
|
|
|
else:
|
|
|
|
logger.info("Child objects: %s" % child_obj_list)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
# HEAD and validate each child object:
|
|
|
|
payload = 0
|
|
|
|
parent_id = "00000000-0000-0000-0000-000000000000"
|
|
|
|
first_obj = None
|
|
|
|
child_obj_list_headers = {}
|
|
|
|
|
|
|
|
for obj in child_obj_list:
|
|
|
|
postfix = f'object head --cid {cid} --oid {obj} --full-headers'
|
|
|
|
output = _exec_cli_cmd(private_key, postfix)
|
|
|
|
child_obj_list_headers[obj] = output
|
|
|
|
if re.search(r'Type=Previous ID=00000000-0000-0000-0000-000000000000', output):
|
|
|
|
first_obj = obj
|
|
|
|
logger.info("First child object %s has been found" % first_obj)
|
|
|
|
|
|
|
|
if not first_obj:
|
|
|
|
raise Exception("Can not find first object with zero Parent ID.")
|
|
|
|
else:
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
_check_linked_object(first_obj, child_obj_list_headers, payload_size, payload, parent_id)
|
|
|
|
|
|
|
|
return child_obj_list_headers.keys()
|
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
def _check_linked_object(obj:str, child_obj_list_headers:dict, payload_size:int, payload:int, parent_id:str):
|
|
|
|
|
|
|
|
output = child_obj_list_headers[obj]
|
|
|
|
logger.info("Verify headers of the child object %s" % obj)
|
|
|
|
|
|
|
|
if not re.search(r'Type=Previous ID=%s' % parent_id, output):
|
|
|
|
raise Exception("Incorrect previos ID %s in the child object %s." % parent_id, obj)
|
|
|
|
else:
|
|
|
|
logger.info("Previous ID is equal for expected: %s" % parent_id)
|
|
|
|
|
|
|
|
m = re.search(r'PayloadLength=(\d+)', output)
|
2020-11-30 10:33:05 +00:00
|
|
|
if m.start() != m.end():
|
2020-07-01 02:28:31 +00:00
|
|
|
payload += int(m.group(1))
|
|
|
|
else:
|
|
|
|
raise Exception("Can not get payload for the object %s." % obj)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
if payload > payload_size:
|
|
|
|
raise Exception("Payload exceeds expected total payload %s." % payload_size)
|
|
|
|
|
|
|
|
elif payload == payload_size:
|
|
|
|
if not re.search(r'Type=Next ID=00000000-0000-0000-0000-000000000000', output):
|
|
|
|
raise Exception("Incorrect previos ID in the last child object %s." % obj)
|
|
|
|
else:
|
|
|
|
logger.info("Next ID is correct for the final child object: %s" % obj)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
|
|
|
m = re.search(r'Type=Next ID=([\w-]+)', output)
|
2020-11-30 10:33:05 +00:00
|
|
|
if m:
|
2020-07-01 02:28:31 +00:00
|
|
|
# next object should be in the expected list
|
|
|
|
logger.info(m.group(1))
|
|
|
|
if m.group(1) not in child_obj_list_headers.keys():
|
|
|
|
raise Exception(f'Next object {m.group(1)} is not in the expected list: {child_obj_list_headers.keys()}.')
|
|
|
|
else:
|
|
|
|
logger.info(f'Next object {m.group(1)} is in the expected list: {child_obj_list_headers.keys()}.')
|
|
|
|
|
|
|
|
_check_linked_object(m.group(1), child_obj_list_headers, payload_size, payload, obj)
|
|
|
|
|
|
|
|
else:
|
|
|
|
raise Exception("Can not get Next object ID for the object %s." % obj)
|
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
'''
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
@keyword('Head object')
|
2020-11-18 15:15:57 +00:00
|
|
|
def head_object(private_key: str, cid: str, oid: str, bearer: str, user_headers:str=""):
|
2020-07-01 02:28:31 +00:00
|
|
|
options = ""
|
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = ""
|
|
|
|
if bearer:
|
|
|
|
bearer_token = f"--bearer {bearer}"
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
ObjectCmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} object head --cid {cid} --oid {oid} {bearer_token} {options}'
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
for key in user_headers.split(","):
|
|
|
|
# user_header = f'Key={key} Val={user_headers_dict[key]}'
|
|
|
|
if re.search(r'(%s)' % key, complProc.stdout):
|
|
|
|
logger.info("User header %s was parsed from command output" % key)
|
2020-07-01 02:28:31 +00:00
|
|
|
else:
|
2020-11-18 15:15:57 +00:00
|
|
|
raise Exception("User header %s was not found in the command output: \t%s" % (key, complProc.stdout))
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-08-13 22:09:00 +00:00
|
|
|
return complProc.stdout
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
|
|
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-08-13 22:09:00 +00:00
|
|
|
|
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
@keyword('Parse Object System Header')
|
|
|
|
def parse_object_system_header(header: str):
|
2020-08-13 22:09:00 +00:00
|
|
|
result_header = dict()
|
|
|
|
|
|
|
|
#SystemHeader
|
2020-08-19 22:31:16 +00:00
|
|
|
logger.info("Input: %s" % header)
|
|
|
|
# ID
|
2020-11-26 08:32:18 +00:00
|
|
|
m = re.search(r'ID: (\w+)', header)
|
2020-08-19 22:31:16 +00:00
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['ID'] = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no ID was parsed from object header: \t%s" % output)
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-08-19 22:31:16 +00:00
|
|
|
# CID
|
2020-11-26 08:32:18 +00:00
|
|
|
m = re.search(r'CID: (\w+)', header)
|
2020-08-19 22:31:16 +00:00
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['CID'] = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no CID was parsed from object header: \t%s" % output)
|
|
|
|
|
|
|
|
# Owner
|
2020-11-26 08:32:18 +00:00
|
|
|
m = re.search(r'Owner: ([a-zA-Z0-9]+)', header)
|
2020-08-19 22:31:16 +00:00
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['OwnerID'] = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no OwnerID was parsed from object header: \t%s" % output)
|
|
|
|
# PayloadLength
|
2020-11-26 08:32:18 +00:00
|
|
|
m = re.search(r'Size: (\d+)', header)
|
2020-08-19 22:31:16 +00:00
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['PayloadLength'] = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no PayloadLength was parsed from object header: \t%s" % output)
|
|
|
|
|
|
|
|
# CreatedAtUnixTime
|
2020-11-26 08:32:18 +00:00
|
|
|
m = re.search(r'Timestamp=(\d+)', header)
|
2020-08-19 22:31:16 +00:00
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['CreatedAtUnixTime'] = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no CreatedAtUnixTime was parsed from object header: \t%s" % output)
|
|
|
|
|
|
|
|
# CreatedAtEpoch
|
2020-11-26 08:32:18 +00:00
|
|
|
m = re.search(r'CreatedAt: (\d+)', header)
|
2020-08-19 22:31:16 +00:00
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
result_header['CreatedAtEpoch'] = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no CreatedAtEpoch was parsed from object header: \t%s" % output)
|
|
|
|
|
|
|
|
logger.info("Result: %s" % result_header)
|
|
|
|
return result_header
|
2020-08-13 22:09:00 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Delete object')
|
2020-11-18 15:15:57 +00:00
|
|
|
def delete_object(private_key: str, cid: str, oid: str, bearer: str):
|
2020-09-01 03:23:17 +00:00
|
|
|
|
|
|
|
bearer_token = ""
|
|
|
|
if bearer:
|
|
|
|
bearer_token = f"--bearer {bearer}"
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
ObjectCmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} object delete --cid {cid} --oid {oid} {bearer_token}'
|
2020-12-16 11:19:24 +00:00
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
2020-07-01 02:28:31 +00:00
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
2020-12-16 11:19:24 +00:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=30, shell=True)
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
except subprocess.CalledProcessError as e:
|
2020-11-30 10:33:05 +00:00
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
|
2020-12-11 11:35:02 +00:00
|
|
|
@keyword('Get file name')
|
|
|
|
def get_file_name(filepath):
|
|
|
|
filename = os.path.basename(filepath)
|
|
|
|
return filename
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
@keyword('Get file hash')
|
|
|
|
def get_file_hash(filename):
|
|
|
|
file_hash = _get_file_hash(filename)
|
|
|
|
return file_hash
|
|
|
|
|
|
|
|
|
|
|
|
@keyword('Verify file hash')
|
|
|
|
def verify_file_hash(filename, expected_hash):
|
|
|
|
file_hash = _get_file_hash(filename)
|
|
|
|
if file_hash == expected_hash:
|
|
|
|
logger.info("Hash is equal to expected: %s" % file_hash)
|
|
|
|
else:
|
|
|
|
raise Exception("File hash '{}' is not equal to {}".format(file_hash, expected_hash))
|
|
|
|
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-12-02 23:08:05 +00:00
|
|
|
@keyword('Cleanup Files')
|
|
|
|
def cleanup_file(*filename_list):
|
|
|
|
|
|
|
|
for filename in filename_list:
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
try:
|
|
|
|
os.remove(filename)
|
|
|
|
except OSError as e:
|
|
|
|
raise Exception("Error: '%s' - %s." % (e.filename, e.strerror))
|
|
|
|
else:
|
|
|
|
logger.warn("Error: '%s' file not found" % filename)
|
|
|
|
|
|
|
|
logger.info("File '%s' has been deleted." % filename)
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
@keyword('Put object to NeoFS')
|
2020-12-04 12:28:59 +00:00
|
|
|
def put_object(private_key: str, path: str, cid: str, bearer: str, user_headers: str, endpoint: str="" ):
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Going to put the object")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2020-12-04 12:28:59 +00:00
|
|
|
if not endpoint:
|
|
|
|
endpoint = random.sample(_get_storage_nodes(private_key), 1)[0]
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
if user_headers:
|
|
|
|
user_headers = f"--attributes {user_headers}"
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
if bearer:
|
2020-11-18 15:15:57 +00:00
|
|
|
bearer = f"--bearer {bearer}"
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-12-04 12:28:59 +00:00
|
|
|
putObjectCmd = f'neofs-cli --rpc-endpoint {endpoint} --key {private_key} object put --file {path} --cid {cid} {bearer} {user_headers}'
|
2020-07-01 02:28:31 +00:00
|
|
|
logger.info("Cmd: %s" % putObjectCmd)
|
2020-11-26 08:32:18 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(putObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=60, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
oid = _parse_oid(complProc.stdout)
|
|
|
|
return oid
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
@keyword('Get Range Hash')
|
|
|
|
def get_range_hash(private_key: str, cid: str, oid: str, bearer_token: str, range_cut: str):
|
2020-11-30 10:33:05 +00:00
|
|
|
|
|
|
|
if bearer_token:
|
2020-09-01 03:23:17 +00:00
|
|
|
bearer_token = f"--bearer {bearer}"
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
ObjectCmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} object hash --cid {cid} --oid {oid} --range {range_cut} {bearer_token}'
|
2020-09-01 03:23:17 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=60, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-09-01 03:23:17 +00:00
|
|
|
|
|
|
|
|
2020-12-04 12:28:59 +00:00
|
|
|
@keyword('Get object from NeoFS')
|
|
|
|
def get_object(private_key: str, cid: str, oid: str, bearer_token: str, read_object: str, endpoint: str="" ):
|
|
|
|
# TODO: add object return instead of read_object (uuid)
|
|
|
|
|
|
|
|
logger.info("Going to put the object")
|
|
|
|
|
|
|
|
if not endpoint:
|
|
|
|
endpoint = random.sample(_get_storage_nodes(private_key), 1)[0]
|
|
|
|
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
if bearer_token:
|
|
|
|
bearer_token = f"--bearer {bearer_token}"
|
|
|
|
|
2020-12-04 12:28:59 +00:00
|
|
|
ObjectCmd = f'neofs-cli --rpc-endpoint {endpoint} --key {private_key} object get --cid {cid} --oid {oid} --file {read_object} {bearer_token}'
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=60, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
|
|
|
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
|
|
|
|
def _exec_cli_cmd(private_key: bytes, postfix: str):
|
|
|
|
|
|
|
|
# Get linked objects from first
|
|
|
|
ObjectCmd = f'{CLI_PREFIX}neofs-cli --raw --host {NEOFS_ENDPOINT} --key {binascii.hexlify(private_key).decode()} {postfix}'
|
|
|
|
logger.info("Cmd: %s" % ObjectCmd)
|
|
|
|
try:
|
|
|
|
complProc = subprocess.run(ObjectCmd, check=True, universal_newlines=True,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
|
|
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
2020-11-30 10:43:19 +00:00
|
|
|
|
2020-11-28 03:41:35 +00:00
|
|
|
return complProc.stdout
|
|
|
|
|
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
def _get_file_hash(filename):
|
|
|
|
blocksize = 65536
|
|
|
|
hash = hashlib.md5()
|
|
|
|
with open(filename, "rb") as f:
|
|
|
|
for block in iter(lambda: f.read(blocksize), b""):
|
|
|
|
hash.update(block)
|
|
|
|
logger.info("Hash: %s" % hash.hexdigest())
|
|
|
|
|
|
|
|
return hash.hexdigest()
|
|
|
|
|
|
|
|
def _find_cid(output: str, cid: str):
|
|
|
|
"""
|
|
|
|
This function parses CID from given CLI output.
|
|
|
|
Parameters:
|
|
|
|
- output: a string with command run output
|
|
|
|
"""
|
|
|
|
|
|
|
|
if re.search(r'(%s)' % cid, output):
|
|
|
|
logger.info("CID %s was parsed from command output: \t%s" % (cid, output))
|
|
|
|
else:
|
|
|
|
raise Exception("no CID %s was parsed from command output: \t%s" % (cid, output))
|
|
|
|
return cid
|
|
|
|
|
|
|
|
def _parse_oid(output: str):
|
|
|
|
"""
|
|
|
|
This function parses OID from given CLI output.
|
|
|
|
Parameters:
|
|
|
|
- output: a string with command run output
|
|
|
|
"""
|
|
|
|
m = re.search(r'ID: ([a-zA-Z0-9-]+)', output)
|
|
|
|
if m.start() != m.end(): # e.g., if match found something
|
|
|
|
oid = m.group(1)
|
|
|
|
else:
|
|
|
|
raise Exception("no OID was parsed from command output: \t%s" % output)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
return oid
|
|
|
|
|
|
|
|
def _parse_cid(output: str):
|
|
|
|
"""
|
|
|
|
This function parses CID from given CLI output.
|
|
|
|
Parameters:
|
|
|
|
- output: a string with command run output
|
|
|
|
"""
|
2020-11-18 15:15:57 +00:00
|
|
|
m = re.search(r'container ID: (\w+)', output)
|
|
|
|
if not m.start() != m.end(): # e.g., if match found something
|
2020-07-01 02:28:31 +00:00
|
|
|
raise Exception("no CID was parsed from command output: \t%s" % (output))
|
2020-11-18 15:15:57 +00:00
|
|
|
cid = m.group(1)
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-07-01 02:28:31 +00:00
|
|
|
return cid
|
|
|
|
|
|
|
|
def _get_storage_nodes(private_key: bytes):
|
2020-12-16 11:19:24 +00:00
|
|
|
#storage_nodes = ['s01.neofs.devenv:8080', 's02.neofs.devenv:8080','s03.neofs.devenv:8080','s04.neofs.devenv:8080']
|
2020-11-18 15:15:57 +00:00
|
|
|
#NetmapCmd = f'{CLI_PREFIX}neofs-cli --host {NEOFS_ENDPOINT} --key {binascii.hexlify(private_key).decode()} status netmap'
|
|
|
|
#complProc = subprocess.run(NetmapCmd, check=True, universal_newlines=True,
|
|
|
|
# stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
|
|
|
#output = complProc.stdout
|
|
|
|
#logger.info("Netmap: %s" % output)
|
|
|
|
#for m in re.finditer(r'"address":"/ip4/(\d+\.\d+\.\d+\.\d+)/tcp/(\d+)"', output):
|
|
|
|
# storage_nodes.append(m.group(1)+":"+m.group(2))
|
2020-11-30 10:33:05 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
#if not storage_nodes:
|
|
|
|
# raise Exception("Storage nodes was not found.")
|
|
|
|
|
|
|
|
|
|
|
|
# Will be fixed when netmap will be added to cli
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
#storage_nodes.append()
|
2020-12-16 11:19:24 +00:00
|
|
|
logger.info("Storage nodes: %s" % NEOFS_NETMAP)
|
|
|
|
return NEOFS_NETMAP
|
2020-07-01 02:28:31 +00:00
|
|
|
|
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
def _search_object(node:str, private_key: str, cid:str, oid: str):
|
|
|
|
# --filters objectID={oid}
|
2020-11-29 03:44:38 +00:00
|
|
|
if oid:
|
|
|
|
oid_cmd = "--oid %s" % oid
|
|
|
|
Cmd = f'{CLI_PREFIX}neofs-cli --rpc-endpoint {node} --key {private_key} --ttl 1 object search --root --cid {cid} {oid_cmd}'
|
2020-07-01 02:28:31 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
try:
|
|
|
|
logger.info(Cmd)
|
|
|
|
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
|
2020-11-29 03:44:38 +00:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=30, shell=True)
|
2020-09-01 03:23:17 +00:00
|
|
|
logger.info("Output: %s" % complProc.stdout)
|
2020-11-18 15:15:57 +00:00
|
|
|
|
|
|
|
if re.search(r'%s' % oid, complProc.stdout):
|
|
|
|
return oid
|
|
|
|
else:
|
|
|
|
logger.info("Object is not found.")
|
2020-09-01 03:23:17 +00:00
|
|
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
2020-11-29 03:44:38 +00:00
|
|
|
|
2020-11-18 15:15:57 +00:00
|
|
|
if re.search(r'local node is outside of object placement', e.output):
|
2020-12-01 21:48:20 +00:00
|
|
|
logger.info("Server is not presented in container.")
|
2020-11-29 03:44:38 +00:00
|
|
|
|
2020-11-29 03:54:18 +00:00
|
|
|
elif ( re.search(r'timed out after 30 seconds', e.output) or re.search(r'no route to host', e.output) ):
|
2020-11-29 03:44:38 +00:00
|
|
|
logger.warn("Node is unavailable")
|
2020-11-30 10:43:19 +00:00
|
|
|
|
2020-09-01 03:23:17 +00:00
|
|
|
else:
|
|
|
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
|
|
|
|
|
|
|
2020-11-30 10:33:05 +00:00
|
|
|
|