2021-04-26 10:30:40 +00:00
#!/usr/bin/python3.7
2020-07-01 02:28:31 +00:00
import subprocess
import os
import re
import binascii
import uuid
import hashlib
from robot . api . deco import keyword
from robot . api import logger
2020-11-30 10:43:19 +00:00
import random
2020-12-16 11:19:24 +00:00
import base64
import base58
import docker
2020-12-23 22:38:16 +00:00
import json
2020-12-29 19:55:33 +00:00
import tarfile
2021-03-29 10:18:24 +00:00
import shutil
2020-07-01 02:28:31 +00:00
2021-02-08 05:05:17 +00:00
import time
from datetime import datetime
2021-04-20 09:51:53 +00:00
from common import *
2020-07-01 02:28:31 +00:00
ROBOT_AUTO_KEYWORDS = False
2021-01-17 11:55:10 +00:00
# path to neofs-cli executable
NEOFS_CLI_EXEC = os . getenv ( ' NEOFS_CLI_EXEC ' , ' neofs-cli ' )
2020-11-18 15:15:57 +00:00
@keyword ( ' Form WIF from String ' )
def form_wif_from_string ( private_key : str ) :
wif = " "
2021-01-17 11:55:10 +00:00
Cmd = f ' { NEOFS_CLI_EXEC } util keyer { private_key } '
2020-11-18 15:15:57 +00:00
logger . info ( " Cmd: %s " % Cmd )
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 150 , shell = True )
output = complProc . stdout
logger . info ( " Output: %s " % output )
m = re . search ( r ' WIF \ s+( \ w+) ' , output )
2020-11-30 10:33:05 +00:00
if m . start ( ) != m . end ( ) :
2020-11-18 15:15:57 +00:00
wif = m . group ( 1 )
else :
raise Exception ( " Can not get WIF. " )
return wif
2021-03-09 10:08:40 +00:00
@keyword ( ' Get ScriptHash ' )
2020-11-18 15:15:57 +00:00
def get_scripthash ( privkey : str ) :
scripthash = " "
2021-01-17 11:55:10 +00:00
Cmd = f ' { NEOFS_CLI_EXEC } util keyer -u { privkey } '
2020-11-18 15:15:57 +00:00
logger . info ( " Cmd: %s " % Cmd )
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 150 , shell = True )
output = complProc . stdout
logger . info ( " Output: %s " % output )
m = re . search ( r ' ScriptHash3.0 ( \ w+) ' , output )
2020-11-30 10:33:05 +00:00
if m . start ( ) != m . end ( ) :
2020-11-18 15:15:57 +00:00
scripthash = m . group ( 1 )
else :
raise Exception ( " Can not get ScriptHash. " )
return scripthash
2020-07-01 02:28:31 +00:00
2020-07-14 00:05:22 +00:00
2020-11-29 03:44:38 +00:00
@keyword ( ' Stop nodes ' )
def stop_nodes ( down_num : int , * nodes_list ) :
# select nodes to stop from list
stop_nodes = random . sample ( nodes_list , down_num )
2021-01-17 11:55:10 +00:00
2020-11-29 03:44:38 +00:00
for node in stop_nodes :
m = re . search ( r ' (s \ d+). ' , node )
node = m . group ( 1 )
2020-12-16 11:19:24 +00:00
client = docker . APIClient ( )
client . stop ( node )
2020-11-29 03:44:38 +00:00
return stop_nodes
@keyword ( ' Start nodes ' )
def start_nodes ( * nodes_list ) :
for node in nodes_list :
m = re . search ( r ' (s \ d+). ' , node )
node = m . group ( 1 )
2020-12-16 11:19:24 +00:00
client = docker . APIClient ( )
client . start ( node )
2021-01-17 11:55:10 +00:00
2020-07-14 00:05:22 +00:00
@keyword ( ' Get nodes with object ' )
2020-11-29 03:44:38 +00:00
def get_nodes_with_object ( private_key : str , cid : str , oid : str ) :
2021-02-08 05:05:17 +00:00
storage_nodes = _get_storage_nodes ( )
2020-07-14 00:05:22 +00:00
copies = 0
nodes_list = [ ]
for node in storage_nodes :
2020-09-01 03:23:17 +00:00
search_res = _search_object ( node , private_key , cid , oid )
if search_res :
2020-11-18 15:15:57 +00:00
if re . search ( r ' ( %s ) ' % ( oid ) , search_res ) :
2020-09-01 03:23:17 +00:00
nodes_list . append ( node )
2020-07-14 00:05:22 +00:00
logger . info ( " Nodes with object: %s " % nodes_list )
2020-11-29 03:44:38 +00:00
return nodes_list
2020-07-14 00:05:22 +00:00
@keyword ( ' Get nodes without object ' )
2020-11-29 03:44:38 +00:00
def get_nodes_without_object ( private_key : str , cid : str , oid : str ) :
2021-02-08 05:05:17 +00:00
storage_nodes = _get_storage_nodes ( )
2020-07-14 00:05:22 +00:00
copies = 0
nodes_list = [ ]
for node in storage_nodes :
2020-09-01 03:23:17 +00:00
search_res = _search_object ( node , private_key , cid , oid )
if search_res :
2020-11-18 15:15:57 +00:00
if not re . search ( r ' ( %s ) ' % ( oid ) , search_res ) :
2020-09-01 03:23:17 +00:00
nodes_list . append ( node )
else :
2020-07-14 00:05:22 +00:00
nodes_list . append ( node )
2020-11-29 03:44:38 +00:00
logger . info ( " Nodes without object: %s " % nodes_list )
return nodes_list
2020-07-14 00:05:22 +00:00
2020-07-01 02:28:31 +00:00
@keyword ( ' Validate storage policy for object ' )
2020-11-18 15:15:57 +00:00
def validate_storage_policy_for_object ( private_key : str , expected_copies : int , cid , oid , * expected_node_list ) :
2021-02-08 05:05:17 +00:00
storage_nodes = _get_storage_nodes ( )
2020-07-01 02:28:31 +00:00
copies = 0
2020-09-01 03:23:17 +00:00
found_nodes = [ ]
2020-07-01 02:28:31 +00:00
for node in storage_nodes :
2020-09-01 03:23:17 +00:00
search_res = _search_object ( node , private_key , cid , oid )
if search_res :
2020-11-18 15:15:57 +00:00
if re . search ( r ' ( %s ) ' % ( oid ) , search_res ) :
2020-09-01 03:23:17 +00:00
copies + = 1
found_nodes . append ( node )
if copies != expected_copies :
raise Exception ( " Object copies is not match storage policy. Found: %s , expexted: %s . " % ( copies , expected_copies ) )
else :
logger . info ( " Found copies: %s , expected: %s " % ( copies , expected_copies ) )
logger . info ( " Found nodes: %s " % found_nodes )
if expected_node_list :
if sorted ( found_nodes ) == sorted ( expected_node_list ) :
logger . info ( " Found node list ' {} ' is equal for expected list ' {} ' " . format ( found_nodes , expected_node_list ) )
else :
raise Exception ( " Found node list ' {} ' is not equal to expected list ' {} ' " . format ( found_nodes , expected_node_list ) )
2020-07-14 00:05:22 +00:00
2020-08-13 22:09:00 +00:00
@keyword ( ' Get eACL ' )
2020-12-01 21:48:20 +00:00
def get_eacl ( private_key : str , cid : str ) :
2020-08-13 22:09:00 +00:00
2021-01-17 11:55:10 +00:00
Cmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' container get-eacl --cid { cid } '
)
2020-08-13 22:09:00 +00:00
logger . info ( " Cmd: %s " % Cmd )
2020-11-26 08:32:18 +00:00
try :
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 150 , shell = True )
output = complProc . stdout
logger . info ( " Output: %s " % output )
2020-11-30 10:43:19 +00:00
2020-11-26 08:32:18 +00:00
return output
2020-08-13 22:09:00 +00:00
2020-11-26 08:32:18 +00:00
except subprocess . CalledProcessError as e :
if re . search ( r ' extended ACL table is not set for this container ' , e . output ) :
2020-12-29 19:55:33 +00:00
logger . info ( " Extended ACL table is not set for this container. " )
2020-11-26 08:32:18 +00:00
else :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-11-30 10:43:19 +00:00
2021-04-06 14:18:48 +00:00
@keyword ( ' Get Epoch ' )
def get_epoch ( private_key : str ) :
cmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' netmap epoch '
)
logger . info ( f " Cmd: { cmd } " )
try :
complProc = subprocess . run ( cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 150 , shell = True )
output = complProc . stdout
logger . info ( f " Output: { output } " )
return int ( output )
except subprocess . CalledProcessError as e :
raise Exception ( f " command ' { e . cmd } ' return with error (code { e . returncode } ): { e . output } " )
2020-08-13 22:09:00 +00:00
@keyword ( ' Set eACL ' )
2020-11-26 08:32:18 +00:00
def set_eacl ( private_key : str , cid : str , eacl : str , add_keys : str = " " ) :
2021-04-02 14:29:41 +00:00
file_path = TEMP_DIR + eacl
2021-04-06 14:18:48 +00:00
cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
2021-04-02 14:29:41 +00:00
f ' container set-eacl --cid { cid } --table { file_path } { add_keys } '
2021-01-17 11:55:10 +00:00
)
2021-04-06 14:18:48 +00:00
logger . info ( f " Cmd: { cmd } " )
try :
complProc = subprocess . run ( cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 150 , shell = True )
output = complProc . stdout
logger . info ( f " Output: { output } " )
except subprocess . CalledProcessError as e :
raise Exception ( f " command ' { e . cmd } ' return with error (code { e . returncode } ): { e . output } " )
2020-08-13 22:09:00 +00:00
2020-12-29 19:55:33 +00:00
@keyword ( ' Form BearerToken file ' )
2021-01-17 11:55:10 +00:00
def form_bearertoken_file ( private_key : str , cid : str , file_name : str , eacl_oper_list ,
lifetime_exp : str ) :
2020-12-16 11:19:24 +00:00
cid_base58_b = base58 . b58decode ( cid )
2021-01-17 11:55:10 +00:00
cid_base64 = base64 . b64encode ( cid_base58_b ) . decode ( " utf-8 " )
2020-12-29 19:55:33 +00:00
eacl = get_eacl ( private_key , cid )
json_eacl = { }
2021-03-29 10:18:24 +00:00
file_path = TEMP_DIR + file_name
2020-12-16 11:19:24 +00:00
2020-11-26 08:32:18 +00:00
if eacl :
2020-12-29 19:55:33 +00:00
res_json = re . split ( r ' [ \ s \ n]+Signature: ' , eacl )
input_eacl = res_json [ 0 ] . replace ( ' eACL: ' , ' ' )
json_eacl = json . loads ( input_eacl )
2020-11-26 08:32:18 +00:00
2020-12-29 19:55:33 +00:00
eacl_result = { " body " : { " eaclTable " : { " containerID " : { " value " : cid_base64 } , " records " : [ ] } , " lifetime " : { " exp " : lifetime_exp , " nbf " : " 1 " , " iat " : " 0 " } } }
2020-11-30 10:43:19 +00:00
2020-12-29 19:55:33 +00:00
if eacl_oper_list :
2021-01-17 11:55:10 +00:00
for record in eacl_oper_list :
2020-12-29 19:55:33 +00:00
op_data = dict ( )
2020-11-26 08:32:18 +00:00
2020-12-29 19:55:33 +00:00
if record [ ' Role ' ] == " USER " or record [ ' Role ' ] == " SYSTEM " or record [ ' Role ' ] == " OTHERS " :
op_data = { " operation " : record [ ' Operation ' ] , " action " : record [ ' Access ' ] , " filters " : [ ] , " targets " : [ { " role " : record [ ' Role ' ] } ] }
else :
op_data = { " operation " : record [ ' Operation ' ] , " action " : record [ ' Access ' ] , " filters " : [ ] , " targets " : [ { " keys " : [ record [ ' Role ' ] ] } ] }
2021-01-17 11:55:10 +00:00
2020-12-29 19:55:33 +00:00
if ' Filters ' in record . keys ( ) :
op_data [ " filters " ] . append ( record [ ' Filters ' ] )
2020-11-26 08:32:18 +00:00
2020-12-29 19:55:33 +00:00
eacl_result [ " body " ] [ " eaclTable " ] [ " records " ] . append ( op_data )
2020-11-26 08:32:18 +00:00
2020-12-29 19:55:33 +00:00
# Add records from current eACL
if " records " in json_eacl . keys ( ) :
for record in json_eacl [ " records " ] :
eacl_result [ " body " ] [ " eaclTable " ] [ " records " ] . append ( record )
2020-12-16 11:19:24 +00:00
2021-03-29 10:18:24 +00:00
with open ( file_path , ' w ' , encoding = ' utf-8 ' ) as f :
2020-12-29 19:55:33 +00:00
json . dump ( eacl_result , f , ensure_ascii = False , indent = 4 )
2020-12-16 11:19:24 +00:00
2020-12-29 19:55:33 +00:00
logger . info ( eacl_result )
2020-11-26 08:32:18 +00:00
# Sign bearer token
2021-01-17 11:55:10 +00:00
Cmd = (
2021-03-29 10:18:24 +00:00
f ' { NEOFS_CLI_EXEC } util sign bearer-token --from { file_path } '
f ' --to { file_path } --key { private_key } --json '
2021-01-17 11:55:10 +00:00
)
2020-11-26 08:32:18 +00:00
logger . info ( " Cmd: %s " % Cmd )
2020-08-19 22:31:16 +00:00
2020-11-26 08:32:18 +00:00
try :
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
output = complProc . stdout
logger . info ( " Output: %s " % str ( output ) )
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-11-30 10:33:05 +00:00
2021-03-29 10:18:24 +00:00
return file_path
2020-08-19 22:31:16 +00:00
2020-12-29 19:55:33 +00:00
@keyword ( ' Form eACL json common file ' )
def form_eacl_json_common_file ( file_name , eacl_oper_list ) :
# Input role can be Role (USER, SYSTEM, OTHERS) or public key.
2021-04-02 14:29:41 +00:00
file_path = TEMP_DIR + file_name
2020-12-29 19:55:33 +00:00
eacl = { " records " : [ ] }
2020-08-19 22:31:16 +00:00
2020-12-29 19:55:33 +00:00
logger . info ( eacl_oper_list )
2020-08-13 22:09:00 +00:00
2020-12-29 19:55:33 +00:00
if eacl_oper_list :
2021-01-17 11:55:10 +00:00
for record in eacl_oper_list :
2020-12-29 19:55:33 +00:00
op_data = dict ( )
2020-08-13 22:09:00 +00:00
2020-12-29 19:55:33 +00:00
if record [ ' Role ' ] == " USER " or record [ ' Role ' ] == " SYSTEM " or record [ ' Role ' ] == " OTHERS " :
op_data = { " operation " : record [ ' Operation ' ] , " action " : record [ ' Access ' ] , " filters " : [ ] , " targets " : [ { " role " : record [ ' Role ' ] } ] }
else :
op_data = { " operation " : record [ ' Operation ' ] , " action " : record [ ' Access ' ] , " filters " : [ ] , " targets " : [ { " keys " : [ record [ ' Role ' ] ] } ] }
2021-01-17 11:55:10 +00:00
2020-12-29 19:55:33 +00:00
if ' Filters ' in record . keys ( ) :
op_data [ " filters " ] . append ( record [ ' Filters ' ] )
eacl [ " records " ] . append ( op_data )
logger . info ( eacl )
2021-04-02 14:29:41 +00:00
with open ( file_path , ' w ' , encoding = ' utf-8 ' ) as f :
2020-12-29 19:55:33 +00:00
json . dump ( eacl , f , ensure_ascii = False , indent = 4 )
return file_name
2020-07-14 00:05:22 +00:00
@keyword ( ' Get Range ' )
2021-01-17 11:55:10 +00:00
def get_range ( private_key : str , cid : str , oid : str , range_file : str , bearer : str ,
range_cut : str , options : str = " " ) :
2020-09-01 03:23:17 +00:00
bearer_token = " "
2020-11-30 10:33:05 +00:00
if bearer :
2021-03-29 10:18:24 +00:00
bearer_token = f " --bearer { TEMP_DIR } { bearer } "
2020-11-30 10:33:05 +00:00
2021-01-17 11:55:10 +00:00
Cmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' object range --cid { cid } --oid { oid } { bearer_token } --range { range_cut } '
2021-03-29 10:18:24 +00:00
f ' --file { TEMP_DIR } { range_file } { options } '
2021-01-17 11:55:10 +00:00
)
2020-07-14 00:05:22 +00:00
logger . info ( " Cmd: %s " % Cmd )
2020-11-26 08:32:18 +00:00
try :
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 150 , shell = True )
output = complProc . stdout
logger . info ( " Output: %s " % str ( output ) )
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-07-14 00:05:22 +00:00
2020-07-01 02:28:31 +00:00
@keyword ( ' Create container ' )
2021-05-04 09:27:43 +00:00
def create_container ( private_key : str , basic_acl : str , rule : str ) :
if rule == " " :
logger . error ( " Cannot create container with empty placement rule " )
2021-05-26 14:14:46 +00:00
if basic_acl :
2021-05-04 09:27:43 +00:00
basic_acl = f " --basic-acl { basic_acl } "
2020-11-30 10:33:05 +00:00
2021-01-17 11:55:10 +00:00
createContainerCmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' container create --policy " { rule } " { basic_acl } --await '
)
2020-07-14 00:05:22 +00:00
logger . info ( " Cmd: %s " % createContainerCmd )
2020-07-01 02:28:31 +00:00
complProc = subprocess . run ( createContainerCmd , check = True , universal_newlines = True ,
2020-12-16 11:19:24 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 300 , shell = True )
2020-07-01 02:28:31 +00:00
output = complProc . stdout
logger . info ( " Output: %s " % output )
cid = _parse_cid ( output )
logger . info ( " Created container %s with rule ' %s ' " % ( cid , rule ) )
return cid
2020-12-11 11:35:02 +00:00
@keyword ( ' Container List ' )
def container_list ( private_key : str ) :
2021-01-17 11:55:10 +00:00
Cmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' container list '
)
2020-12-11 11:35:02 +00:00
logger . info ( " Cmd: %s " % Cmd )
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
logger . info ( " Output: %s " % complProc . stdout )
container_list = re . findall ( r ' ( \ w { 43,44}) ' , complProc . stdout )
logger . info ( " Containers list: %s " % container_list )
return container_list
2020-07-01 02:28:31 +00:00
@keyword ( ' Container Existing ' )
2020-11-18 15:15:57 +00:00
def container_existing ( private_key : str , cid : str ) :
2021-01-17 11:55:10 +00:00
Cmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' container list '
)
2020-11-18 15:15:57 +00:00
logger . info ( " Cmd: %s " % Cmd )
2020-07-01 02:28:31 +00:00
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
logger . info ( " Output: %s " % complProc . stdout )
2020-11-30 10:33:05 +00:00
2020-07-01 02:28:31 +00:00
_find_cid ( complProc . stdout , cid )
return
@keyword ( ' Search object ' )
2021-01-17 11:55:10 +00:00
def search_object ( private_key : str , cid : str , keys : str , bearer : str , filters : str ,
expected_objects_list = [ ] , options : str = " " ) :
2020-09-01 03:23:17 +00:00
bearer_token = " "
2021-02-16 11:56:52 +00:00
filters_result = " "
2020-11-30 10:33:05 +00:00
if bearer :
2021-03-29 10:18:24 +00:00
bearer_token = f " --bearer { TEMP_DIR } { bearer } "
2020-11-18 15:15:57 +00:00
if filters :
2021-02-16 11:56:52 +00:00
for filter_item in filters . split ( ' , ' ) :
filter_item = re . sub ( r ' = ' , ' EQ ' , filter_item )
filters_result + = f " --filters ' { filter_item } ' "
2020-11-18 15:15:57 +00:00
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
2021-02-16 11:56:52 +00:00
f ' object search { keys } --cid { cid } { bearer_token } { filters_result } { options } '
2021-01-17 11:55:10 +00:00
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % object_cmd )
2020-07-01 02:28:31 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2020-07-01 02:28:31 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
logger . info ( " Output: %s " % complProc . stdout )
2020-12-11 11:35:02 +00:00
found_objects = re . findall ( r ' ( \ w { 43,44}) ' , complProc . stdout )
2020-11-30 10:33:05 +00:00
2021-01-17 11:55:10 +00:00
if expected_objects_list :
2020-07-01 02:28:31 +00:00
if sorted ( found_objects ) == sorted ( expected_objects_list ) :
logger . info ( " Found objects list ' {} ' is equal for expected list ' {} ' " . format ( found_objects , expected_objects_list ) )
else :
raise Exception ( " Found object list ' {} ' is not equal to expected list ' {} ' " . format ( found_objects , expected_objects_list ) )
2021-01-17 11:55:10 +00:00
2020-12-11 11:35:02 +00:00
return found_objects
2020-07-01 02:28:31 +00:00
except subprocess . CalledProcessError as e :
2020-11-30 10:33:05 +00:00
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2021-04-26 10:30:40 +00:00
2020-07-01 02:28:31 +00:00
2021-02-11 21:34:44 +00:00
@keyword ( ' Get Split objects ' )
def get_component_objects ( private_key : str , cid : str , oid : str ) :
logger . info ( " Collect Split objects list from Linked object. " )
2021-02-20 14:05:03 +00:00
split_id = " "
2021-02-11 21:34:44 +00:00
nodes = _get_storage_nodes ( )
for node in nodes :
header_virtual = head_object ( private_key , cid , oid , ' ' , ' ' , ' --raw --ttl 1 ' , node , True )
2021-02-20 14:05:03 +00:00
if header_virtual :
parsed_header_virtual = parse_object_virtual_raw_header ( header_virtual )
2021-02-11 21:34:44 +00:00
2021-02-20 14:05:03 +00:00
if ' Linking object ' in parsed_header_virtual . keys ( ) :
return _collect_split_objects_from_header ( private_key , cid , parsed_header_virtual )
2021-02-11 21:34:44 +00:00
2021-02-20 14:05:03 +00:00
elif ' Split ID ' in parsed_header_virtual . keys ( ) :
logger . info ( f " parsed_header_virtual: !@ { parsed_header_virtual } " )
split_id = parsed_header_virtual [ ' Split ID ' ]
logger . warn ( " Linking object has not been found. " )
# Get all existing objects
full_obj_list = search_object ( private_key , cid , None , None , None , None , ' --phy ' )
2021-04-26 10:30:40 +00:00
2021-02-20 14:05:03 +00:00
# Search expected Linking object
for targer_oid in full_obj_list :
header = head_object ( private_key , cid , targer_oid , ' ' , ' ' , ' --raw ' )
header_parsed = parse_object_system_header ( header )
if header_parsed [ ' Split ID ' ] == split_id and ' Split ChildID ' in header_parsed . keys ( ) :
logger . info ( " Linking object has been found in additional check (head of all objects). " )
return _collect_split_objects_from_header ( private_key , cid , parsed_header_virtual )
raise Exception ( " Linking object is not found at all - all existed objects have been headed. " )
def _collect_split_objects_from_header ( private_key , cid , parsed_header ) :
header_link = head_object ( private_key , cid , parsed_header [ ' Linking object ' ] , ' ' , ' ' , ' --raw ' )
header_link_parsed = parse_object_system_header ( header_link )
return header_link_parsed [ ' Split ChildID ' ]
2021-02-11 21:34:44 +00:00
2020-12-23 22:38:16 +00:00
@keyword ( ' Verify Split Chain ' )
def verify_split_chain ( private_key : str , cid : str , oid : str ) :
2020-12-11 11:35:02 +00:00
2020-12-23 22:38:16 +00:00
header_virtual_parsed = dict ( )
header_last_parsed = dict ( )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
marker_last_obj = 0
marker_link_obj = 0
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
final_verif_data = dict ( )
2020-11-30 10:33:05 +00:00
2020-12-23 22:38:16 +00:00
# Get Latest object
logger . info ( " Collect Split objects information and verify chain of the objects. " )
2021-02-08 05:05:17 +00:00
nodes = _get_storage_nodes ( )
2020-12-23 22:38:16 +00:00
for node in nodes :
header_virtual = head_object ( private_key , cid , oid , ' ' , ' ' , ' --raw --ttl 1 ' , node , True )
parsed_header_virtual = parse_object_virtual_raw_header ( header_virtual )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
if ' Last object ' in parsed_header_virtual . keys ( ) :
header_last = head_object ( private_key , cid , parsed_header_virtual [ ' Last object ' ] , ' ' , ' ' , ' --raw ' )
header_last_parsed = parse_object_system_header ( header_last )
2021-01-17 11:55:10 +00:00
marker_last_obj = 1
2020-12-23 22:38:16 +00:00
# Recursive chain validation up to the first object
final_verif_data = _verify_child_link ( private_key , cid , oid , header_last_parsed , final_verif_data )
break
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
if marker_last_obj == 0 :
raise Exception ( " Latest object has not been found. " )
# Get Linking object
logger . info ( " Compare Split objects result information with Linking object. " )
for node in nodes :
header_virtual = head_object ( private_key , cid , oid , ' ' , ' ' , ' --raw --ttl 1 ' , node , True )
parsed_header_virtual = parse_object_virtual_raw_header ( header_virtual )
if ' Linking object ' in parsed_header_virtual . keys ( ) :
header_link = head_object ( private_key , cid , parsed_header_virtual [ ' Linking object ' ] , ' ' , ' ' , ' --raw ' )
header_link_parsed = parse_object_system_header ( header_link )
marker_link_obj = 1
reversed_list = final_verif_data [ ' ID List ' ] [ : : - 1 ]
if header_link_parsed [ ' Split ChildID ' ] == reversed_list :
logger . info ( " Split objects list from Linked Object is equal to expected %s " % ' , ' . join ( header_link_parsed [ ' Split ChildID ' ] ) )
else :
raise Exception ( " Split objects list from Linking Object ( %s ) is not equal to expected ( %s ) " % ' , ' . join ( header_link_parsed [ ' Split ChildID ' ] ) , ' , ' . join ( reversed_list ) )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
if int ( header_link_parsed [ ' PayloadLength ' ] ) == 0 :
logger . info ( " Linking object Payload is equal to expected - zero size. " )
else :
raise Exception ( " Linking object Payload is not equal to expected. Should be zero. " )
if header_link_parsed [ ' Type ' ] == ' regular ' :
logger . info ( " Linking Object Type is ' regular ' as expected. " )
else :
raise Exception ( " Object Type is not ' regular ' . " )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
if header_link_parsed [ ' Split ID ' ] == final_verif_data [ ' Split ID ' ] :
logger . info ( " Linking Object Split ID is equal to expected %s . " % final_verif_data [ ' Split ID ' ] )
else :
raise Exception ( " Split ID from Linking Object ( %s ) is not equal to expected ( %s ) " % header_link_parsed [ ' Split ID ' ] , ffinal_verif_data [ ' Split ID ' ] )
2020-11-30 10:33:05 +00:00
2020-12-23 22:38:16 +00:00
break
if marker_link_obj == 0 :
raise Exception ( " Linked object has not been found. " )
2020-07-01 02:28:31 +00:00
2020-11-30 10:33:05 +00:00
2020-12-23 22:38:16 +00:00
logger . info ( " Compare Split objects result information with Virtual object. " )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
header_virtual = head_object ( private_key , cid , oid , ' ' , ' ' , ' ' )
header_virtual_parsed = parse_object_system_header ( header_virtual )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
if int ( header_virtual_parsed [ ' PayloadLength ' ] ) == int ( final_verif_data [ ' PayloadLength ' ] ) :
logger . info ( " Split objects PayloadLength are equal to Virtual Object Payload %s " % header_virtual_parsed [ ' PayloadLength ' ] )
2020-07-01 02:28:31 +00:00
else :
2020-12-23 22:38:16 +00:00
raise Exception ( " Split objects PayloadLength from Virtual Object ( %s ) is not equal to expected ( %s ) " % header_virtual_parsed [ ' PayloadLength ' ] , final_verif_data [ ' PayloadLength ' ] )
if header_link_parsed [ ' Type ' ] == ' regular ' :
logger . info ( " Virtual Object Type is ' regular ' as expected. " )
2020-07-01 02:28:31 +00:00
else :
2020-12-23 22:38:16 +00:00
raise Exception ( " Object Type is not ' regular ' . " )
2020-11-30 10:33:05 +00:00
2020-12-23 22:38:16 +00:00
return 1
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
def _verify_child_link ( private_key : str , cid : str , oid : str , header_last_parsed : dict , final_verif_data : dict ) :
2020-11-28 03:41:35 +00:00
2020-12-23 22:38:16 +00:00
if ' PayloadLength ' in final_verif_data . keys ( ) :
final_verif_data [ ' PayloadLength ' ] = int ( final_verif_data [ ' PayloadLength ' ] ) + int ( header_last_parsed [ ' PayloadLength ' ] )
2021-01-17 11:55:10 +00:00
else :
2020-12-23 22:38:16 +00:00
final_verif_data [ ' PayloadLength ' ] = int ( header_last_parsed [ ' PayloadLength ' ] )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
if header_last_parsed [ ' Type ' ] != ' regular ' :
raise Exception ( " Object Type is not ' regular ' . " )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
if ' Split ID ' in final_verif_data . keys ( ) :
if final_verif_data [ ' Split ID ' ] != header_last_parsed [ ' Split ID ' ] :
raise Exception ( " Object Split ID ( %s ) is not expected ( %s ). " % header_last_parsed [ ' Split ID ' ] , final_verif_data [ ' Split ID ' ] )
2020-07-01 02:28:31 +00:00
else :
2020-12-23 22:38:16 +00:00
final_verif_data [ ' Split ID ' ] = header_last_parsed [ ' Split ID ' ]
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
if ' ID List ' in final_verif_data . keys ( ) :
final_verif_data [ ' ID List ' ] . append ( header_last_parsed [ ' ID ' ] )
2021-01-17 11:55:10 +00:00
else :
2020-12-23 22:38:16 +00:00
final_verif_data [ ' ID List ' ] = [ ]
final_verif_data [ ' ID List ' ] . append ( header_last_parsed [ ' ID ' ] )
2021-01-17 11:55:10 +00:00
if ' Split PreviousID ' in header_last_parsed . keys ( ) :
2020-12-23 22:38:16 +00:00
header_virtual = head_object ( private_key , cid , header_last_parsed [ ' Split PreviousID ' ] , ' ' , ' ' , ' --raw ' )
parsed_header_virtual = parse_object_system_header ( header_virtual )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
final_verif_data = _verify_child_link ( private_key , cid , oid , parsed_header_virtual , final_verif_data )
2020-07-01 02:28:31 +00:00
else :
2020-12-23 22:38:16 +00:00
logger . info ( " Chain of the objects has been parsed from the last object ot the first. " )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
return final_verif_data
2020-11-30 10:33:05 +00:00
2020-12-23 22:38:16 +00:00
@keyword ( ' Verify Head Tombstone ' )
def verify_head_tombstone ( private_key : str , cid : str , oid_ts : str , oid : str , addr : str ) :
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' object head --cid { cid } --oid { oid_ts } --json '
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % object_cmd )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2020-12-23 22:38:16 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
full_headers = json . loads ( complProc . stdout )
logger . info ( " Output: %s " % full_headers )
# Header verification
header_cid = full_headers [ " header " ] [ " containerID " ] [ " value " ]
if ( _json_cli_decode ( header_cid ) == cid ) :
logger . info ( " Header CID is expected: %s ( %s in the output) " % ( cid , header_cid ) )
2020-07-01 02:28:31 +00:00
else :
2020-12-23 22:38:16 +00:00
raise Exception ( " Header CID is not expected. " )
2020-11-30 10:33:05 +00:00
2020-12-23 22:38:16 +00:00
header_owner = full_headers [ " header " ] [ " ownerID " ] [ " value " ]
if ( _json_cli_decode ( header_owner ) == addr ) :
logger . info ( " Header ownerID is expected: %s ( %s in the output) " % ( addr , header_owner ) )
else :
raise Exception ( " Header ownerID is not expected. " )
header_type = full_headers [ " header " ] [ " objectType " ]
if ( header_type == " TOMBSTONE " ) :
logger . info ( " Header Type is expected: %s " % header_type )
else :
raise Exception ( " Header Type is not expected. " )
header_session_type = full_headers [ " header " ] [ " sessionToken " ] [ " body " ] [ " object " ] [ " verb " ]
if ( header_session_type == " DELETE " ) :
logger . info ( " Header Session Type is expected: %s " % header_session_type )
else :
raise Exception ( " Header Session Type is not expected. " )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
header_session_cid = full_headers [ " header " ] [ " sessionToken " ] [ " body " ] [ " object " ] [ " address " ] [ " containerID " ] [ " value " ]
if ( _json_cli_decode ( header_session_cid ) == cid ) :
logger . info ( " Header ownerID is expected: %s ( %s in the output) " % ( addr , header_session_cid ) )
else :
raise Exception ( " Header Session CID is not expected. " )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
header_session_oid = full_headers [ " header " ] [ " sessionToken " ] [ " body " ] [ " object " ] [ " address " ] [ " objectID " ] [ " value " ]
if ( _json_cli_decode ( header_session_oid ) == oid ) :
logger . info ( " Header Session OID (deleted object) is expected: %s ( %s in the output) " % ( oid , header_session_oid ) )
2020-07-01 02:28:31 +00:00
else :
2020-12-23 22:38:16 +00:00
raise Exception ( " Header Session OID (deleted object) is not expected. " )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-11-28 03:41:35 +00:00
2020-12-23 22:38:16 +00:00
def _json_cli_decode ( data : str ) :
return base58 . b58encode ( base64 . b64decode ( data ) ) . decode ( " utf-8 " )
2020-07-01 02:28:31 +00:00
@keyword ( ' Head object ' )
2021-01-17 11:55:10 +00:00
def head_object ( private_key : str , cid : str , oid : str , bearer_token : str = " " ,
user_headers : str = " " , options : str = " " , endpoint : str = " " , ignore_failure : bool = False ) :
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
if bearer_token :
2021-03-29 10:18:24 +00:00
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
2020-12-23 22:38:16 +00:00
if endpoint == " " :
endpoint = NEOFS_ENDPOINT
2020-09-01 03:23:17 +00:00
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { endpoint } --key { private_key } object '
f ' head --cid { cid } --oid { oid } { bearer_token } { options } '
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % object_cmd )
2020-07-01 02:28:31 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2020-07-01 02:28:31 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
logger . info ( " Output: %s " % complProc . stdout )
2020-12-23 22:38:16 +00:00
if user_headers :
for key in user_headers . split ( " , " ) :
if re . search ( r ' ( %s ) ' % key , complProc . stdout ) :
2021-01-17 11:55:10 +00:00
logger . info ( " User header %s was parsed from command output " % key )
2020-12-23 22:38:16 +00:00
else :
raise Exception ( " User header %s was not found in the command output: \t %s " % ( key , complProc . stdout ) )
2020-08-13 22:09:00 +00:00
return complProc . stdout
2020-07-01 02:28:31 +00:00
except subprocess . CalledProcessError as e :
2020-12-23 22:38:16 +00:00
if ignore_failure :
logger . info ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
return e . output
else :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-07-01 02:28:31 +00:00
2020-12-23 22:38:16 +00:00
@keyword ( ' Parse Object Virtual Raw Header ' )
def parse_object_virtual_raw_header ( header : str ) :
result_header = dict ( )
m = re . search ( r ' Split ID: \ s+([ \ w-]+) ' , header )
if m != None :
if m . start ( ) != m . end ( ) : # e.g., if match found something
result_header [ ' Split ID ' ] = m . group ( 1 )
m = re . search ( r ' Linking object: \ s+( \ w+) ' , header )
if m != None :
if m . start ( ) != m . end ( ) : # e.g., if match found something
result_header [ ' Linking object ' ] = m . group ( 1 )
m = re . search ( r ' Last object: \ s+( \ w+) ' , header )
if m != None :
if m . start ( ) != m . end ( ) : # e.g., if match found something
result_header [ ' Last object ' ] = m . group ( 1 )
logger . info ( " Result: %s " % result_header )
return result_header
2020-08-13 22:09:00 +00:00
2020-08-19 22:31:16 +00:00
@keyword ( ' Parse Object System Header ' )
def parse_object_system_header ( header : str ) :
2020-08-13 22:09:00 +00:00
result_header = dict ( )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
# Header - Constant attributes
2020-08-13 22:09:00 +00:00
2020-08-19 22:31:16 +00:00
# ID
2020-12-23 22:38:16 +00:00
m = re . search ( r ' ^ID: ( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-08-19 22:31:16 +00:00
result_header [ ' ID ' ] = m . group ( 1 )
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no ID was parsed from object header: \t %s " % header )
2020-08-13 22:09:00 +00:00
2020-08-19 22:31:16 +00:00
# CID
2020-11-26 08:32:18 +00:00
m = re . search ( r ' CID: ( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-08-19 22:31:16 +00:00
result_header [ ' CID ' ] = m . group ( 1 )
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no CID was parsed from object header: \t %s " % header )
2020-08-19 22:31:16 +00:00
# Owner
2020-11-26 08:32:18 +00:00
m = re . search ( r ' Owner: ([a-zA-Z0-9]+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-08-19 22:31:16 +00:00
result_header [ ' OwnerID ' ] = m . group ( 1 )
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no OwnerID was parsed from object header: \t %s " % header )
2021-01-17 11:55:10 +00:00
2020-12-23 22:38:16 +00:00
# CreatedAtEpoch
m = re . search ( r ' CreatedAt: ( \ d+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-12-23 22:38:16 +00:00
result_header [ ' CreatedAtEpoch ' ] = m . group ( 1 )
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no CreatedAtEpoch was parsed from object header: \t %s " % header )
2020-12-23 22:38:16 +00:00
2020-08-19 22:31:16 +00:00
# PayloadLength
2020-11-26 08:32:18 +00:00
m = re . search ( r ' Size: ( \ d+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-08-19 22:31:16 +00:00
result_header [ ' PayloadLength ' ] = m . group ( 1 )
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no PayloadLength was parsed from object header: \t %s " % header )
2020-08-19 22:31:16 +00:00
2020-12-23 22:38:16 +00:00
# HomoHash
m = re . search ( r ' HomoHash: \ s+( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-12-23 22:38:16 +00:00
result_header [ ' HomoHash ' ] = m . group ( 1 )
2020-08-19 22:31:16 +00:00
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no HomoHash was parsed from object header: \t %s " % header )
2020-08-19 22:31:16 +00:00
2020-12-23 22:38:16 +00:00
# Checksum
m = re . search ( r ' Checksum: \ s+( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-12-23 22:38:16 +00:00
result_header [ ' Checksum ' ] = m . group ( 1 )
2020-08-19 22:31:16 +00:00
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no Checksum was parsed from object header: \t %s " % header )
2020-12-23 22:38:16 +00:00
# Type
m = re . search ( r ' Type: \ s+( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
2020-12-23 22:38:16 +00:00
result_header [ ' Type ' ] = m . group ( 1 )
else :
2021-02-20 14:05:03 +00:00
raise Exception ( " no Type was parsed from object header: \t %s " % header )
2020-12-23 22:38:16 +00:00
# Header - Optional attributes
m = re . search ( r ' Split ID: \ s+([ \ w-]+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
result_header [ ' Split ID ' ] = m . group ( 1 )
2020-12-23 22:38:16 +00:00
m = re . search ( r ' Split PreviousID: \ s+( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
result_header [ ' Split PreviousID ' ] = m . group ( 1 )
2020-12-23 22:38:16 +00:00
m = re . search ( r ' Split ParentID: \ s+( \ w+) ' , header )
2021-02-20 14:05:03 +00:00
if m is not None :
result_header [ ' Split ParentID ' ] = m . group ( 1 )
2020-12-23 22:38:16 +00:00
# Split ChildID list
found_objects = re . findall ( r ' Split ChildID: \ s+( \ w+) ' , header )
if found_objects :
result_header [ ' Split ChildID ' ] = found_objects
2020-08-19 22:31:16 +00:00
logger . info ( " Result: %s " % result_header )
return result_header
2020-08-13 22:09:00 +00:00
2020-12-23 22:38:16 +00:00
2020-07-01 02:28:31 +00:00
@keyword ( ' Delete object ' )
2020-12-29 19:55:33 +00:00
def delete_object ( private_key : str , cid : str , oid : str , bearer : str , options : str = " " ) :
2020-09-01 03:23:17 +00:00
bearer_token = " "
if bearer :
2021-03-29 10:18:24 +00:00
bearer_token = f " --bearer { TEMP_DIR } { bearer } "
2020-09-01 03:23:17 +00:00
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' object delete --cid { cid } --oid { oid } { bearer_token } { options } '
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % object_cmd )
2020-07-01 02:28:31 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2020-12-16 11:19:24 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 30 , shell = True )
2020-07-01 02:28:31 +00:00
logger . info ( " Output: %s " % complProc . stdout )
2020-12-23 22:38:16 +00:00
tombstone = _parse_oid ( complProc . stdout )
return tombstone
2020-07-01 02:28:31 +00:00
except subprocess . CalledProcessError as e :
2020-11-30 10:33:05 +00:00
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-07-01 02:28:31 +00:00
2020-12-11 11:35:02 +00:00
@keyword ( ' Get file name ' )
def get_file_name ( filepath ) :
filename = os . path . basename ( filepath )
return filename
2020-07-01 02:28:31 +00:00
@keyword ( ' Get file hash ' )
2021-03-29 10:18:24 +00:00
def get_file_hash ( filename : str ) :
2020-07-01 02:28:31 +00:00
file_hash = _get_file_hash ( filename )
return file_hash
@keyword ( ' Verify file hash ' )
def verify_file_hash ( filename , expected_hash ) :
file_hash = _get_file_hash ( filename )
if file_hash == expected_hash :
logger . info ( " Hash is equal to expected: %s " % file_hash )
else :
raise Exception ( " File hash ' {} ' is not equal to {} " . format ( file_hash , expected_hash ) )
2021-05-04 09:27:43 +00:00
2021-02-10 18:53:26 +00:00
@keyword ( ' Put object ' )
2021-01-17 11:55:10 +00:00
def put_object ( private_key : str , path : str , cid : str , bearer : str , user_headers : str ,
endpoint : str = " " , options : str = " " ) :
2020-07-01 02:28:31 +00:00
logger . info ( " Going to put the object " )
2020-09-01 03:23:17 +00:00
2020-12-04 12:28:59 +00:00
if not endpoint :
2021-02-08 05:05:17 +00:00
endpoint = random . sample ( _get_storage_nodes ( ) , 1 ) [ 0 ]
2020-11-18 15:15:57 +00:00
if user_headers :
user_headers = f " --attributes { user_headers } "
2021-04-06 14:18:48 +00:00
2020-09-01 03:23:17 +00:00
if bearer :
2021-03-29 10:18:24 +00:00
bearer = f " --bearer { TEMP_DIR } { bearer } "
2020-07-01 02:28:31 +00:00
2021-04-02 14:29:41 +00:00
putobject_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { endpoint } --key { private_key } object '
f ' put --file { path } --cid { cid } { bearer } { user_headers } { options } '
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % putobject_cmd )
2020-11-26 08:32:18 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( putobject_cmd , check = True , universal_newlines = True ,
2021-02-10 18:53:26 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 120 , shell = True )
2020-11-26 08:32:18 +00:00
logger . info ( " Output: %s " % complProc . stdout )
oid = _parse_oid ( complProc . stdout )
return oid
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2021-02-08 05:05:17 +00:00
@keyword ( ' Get Nodes Log Latest Timestamp ' )
def get_logs_latest_timestamp ( ) :
"""
Keyword return :
nodes_logs_time - - structure ( dict ) of nodes container name ( key ) and latest logs timestamp ( value )
"""
nodes = _get_storage_nodes ( )
client_api = docker . APIClient ( )
nodes_logs_time = dict ( )
for node in nodes :
container = node . split ( ' . ' ) [ 0 ]
log_line = client_api . logs ( container , tail = 1 )
m = re . search ( r ' ( \ d {4} - \ d {2} - \ d {2} T \ d {2} : \ d {2} : \ d {2} \ . \ d+Z) ' , str ( log_line ) )
if m != None :
timestamp = m . group ( 1 )
timestamp_date = datetime . fromisoformat ( timestamp [ : - 1 ] )
nodes_logs_time [ container ] = timestamp_date
2021-04-26 10:30:40 +00:00
2021-02-08 05:05:17 +00:00
logger . info ( " Latest logs timestamp list: %s " % nodes_logs_time )
return nodes_logs_time
2021-04-26 10:30:40 +00:00
@keyword ( ' Find in Nodes Log ' )
2021-02-08 05:05:17 +00:00
def find_in_nodes_Log ( line : str , nodes_logs_time : dict ) :
client_api = docker . APIClient ( )
container_names = list ( )
for docker_container in client_api . containers ( ) :
container_names . append ( docker_container [ ' Names ' ] [ 0 ] [ 1 : ] )
global_count = 0
for container in nodes_logs_time . keys ( ) :
# check if container exists
if container in container_names :
# Get log since timestamp
timestamp_date = nodes_logs_time [ container ]
log_lines = client_api . logs ( container , since = timestamp_date )
logger . info ( " Timestamp since: %s " % timestamp_date )
found_count = len ( re . findall ( line , log_lines . decode ( " utf-8 " ) ) )
logger . info ( " Node %s log - found counter: %s " % ( container , found_count ) )
global_count + = found_count
2021-04-26 10:30:40 +00:00
2021-02-08 05:05:17 +00:00
else :
logger . info ( " Container %s has not been found. " % container )
if global_count > 0 :
logger . info ( " Expected line ' %s ' has been found in the logs. " % line )
else :
raise Exception ( " Expected line ' %s ' has not been found in the logs. " % line )
return 1
2020-11-18 15:15:57 +00:00
@keyword ( ' Get Range Hash ' )
2021-01-17 11:55:10 +00:00
def get_range_hash ( private_key : str , cid : str , oid : str , bearer_token : str ,
range_cut : str , options : str = " " ) :
2020-11-30 10:33:05 +00:00
if bearer_token :
2021-03-29 10:18:24 +00:00
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
2020-11-30 10:33:05 +00:00
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' object hash --cid { cid } --oid { oid } --range { range_cut } '
f ' { bearer_token } { options } '
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % object_cmd )
2020-11-18 15:15:57 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2020-11-18 15:15:57 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 60 , shell = True )
logger . info ( " Output: %s " % complProc . stdout )
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-09-01 03:23:17 +00:00
2021-02-10 18:53:26 +00:00
@keyword ( ' Get object ' )
2021-01-17 11:55:10 +00:00
def get_object ( private_key : str , cid : str , oid : str , bearer_token : str ,
write_object : str , endpoint : str = " " , options : str = " " ) :
2020-12-04 12:28:59 +00:00
2021-03-29 10:18:24 +00:00
file_path = TEMP_DIR + write_object
2020-12-04 12:28:59 +00:00
logger . info ( " Going to put the object " )
if not endpoint :
2021-02-08 05:05:17 +00:00
endpoint = random . sample ( _get_storage_nodes ( ) , 1 ) [ 0 ]
2021-04-26 10:30:40 +00:00
2020-11-18 15:15:57 +00:00
if bearer_token :
2021-03-29 10:18:24 +00:00
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
2020-11-18 15:15:57 +00:00
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-01-17 11:55:10 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { endpoint } --key { private_key } '
2021-03-29 10:18:24 +00:00
f ' object get --cid { cid } --oid { oid } --file { file_path } { bearer_token } '
2021-01-17 11:55:10 +00:00
f ' { options } '
)
2021-04-02 14:29:41 +00:00
logger . info ( " Cmd: %s " % object_cmd )
2020-07-01 02:28:31 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2021-02-11 21:34:44 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 120 , shell = True )
2020-07-01 02:28:31 +00:00
logger . info ( " Output: %s " % complProc . stdout )
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2021-03-29 10:18:24 +00:00
return file_path
2020-07-01 02:28:31 +00:00
2021-02-10 18:53:26 +00:00
@keyword ( ' Put Storagegroup ' )
2021-04-02 14:29:41 +00:00
def put_storagegroup ( private_key : str , cid : str , bearer_token : str = " " , * oid_list ) :
2021-02-10 18:53:26 +00:00
2021-04-26 10:30:40 +00:00
cmd_oid_line = " , " . join ( oid_list )
2021-02-10 18:53:26 +00:00
2021-04-02 14:29:41 +00:00
if bearer_token :
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
object_cmd = (
2021-02-10 18:53:26 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } storagegroup '
2021-04-02 14:29:41 +00:00
f ' put --cid { cid } --members { cmd_oid_line } { bearer_token } '
2021-02-10 18:53:26 +00:00
)
2021-04-02 14:29:41 +00:00
logger . info ( f " Cmd: { object_cmd } " )
2021-02-10 18:53:26 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2021-02-11 21:34:44 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 60 , shell = True )
2021-02-10 18:53:26 +00:00
logger . info ( f " Output: { complProc . stdout } " )
oid = _parse_oid ( complProc . stdout )
return oid
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
@keyword ( ' List Storagegroup ' )
2021-04-02 14:29:41 +00:00
def list_storagegroup ( private_key : str , cid : str , bearer_token : str = " " , * expected_list ) :
if bearer_token :
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
2021-02-10 18:53:26 +00:00
2021-04-26 10:30:40 +00:00
object_cmd = (
2021-04-02 14:29:41 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } '
f ' storagegroup list --cid { cid } { bearer_token } '
)
2021-02-10 18:53:26 +00:00
2021-04-02 14:29:41 +00:00
logger . info ( f " Cmd: { object_cmd } " )
2021-02-10 18:53:26 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2021-02-10 18:53:26 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 15 , shell = True )
logger . info ( f " Output: { complProc . stdout } " )
found_objects = re . findall ( r ' ( \ w { 43,44}) ' , complProc . stdout )
if expected_list :
if sorted ( found_objects ) == sorted ( expected_list ) :
logger . info ( " Found storage group list ' {} ' is equal for expected list ' {} ' " . format ( found_objects , expected_list ) )
else :
raise Exception ( " Found storage group ' {} ' is not equal to expected list ' {} ' " . format ( found_objects , expected_list ) )
return found_objects
2021-04-26 10:30:40 +00:00
2021-02-10 18:53:26 +00:00
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
@keyword ( ' Get Storagegroup ' )
2021-04-02 14:29:41 +00:00
def get_storagegroup ( private_key : str , cid : str , oid : str , bearer_token : str , expected_size , * expected_objects_list ) :
2021-02-10 18:53:26 +00:00
2021-04-02 14:29:41 +00:00
if bearer_token :
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
object_cmd = f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } storagegroup get --cid { cid } --id { oid } { bearer_token } '
logger . info ( f " Cmd: { object_cmd } " )
2021-02-10 18:53:26 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2021-02-11 21:34:44 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 60 , shell = True )
2021-02-10 18:53:26 +00:00
logger . info ( f " Output: { complProc . stdout } " )
2021-04-26 10:30:40 +00:00
2021-02-10 18:53:26 +00:00
if expected_size :
if re . search ( r ' Group size: %s ' % expected_size , complProc . stdout ) :
logger . info ( " Group size %s has been found in the output " % ( expected_size ) )
else :
raise Exception ( " Group size %s has not been found in the output " % ( expected_size ) )
found_objects = re . findall ( r ' \ s( \ w { 43,44}) \ s ' , complProc . stdout )
if expected_objects_list :
if sorted ( found_objects ) == sorted ( expected_objects_list ) :
logger . info ( " Found objects list ' {} ' is equal for expected list ' {} ' " . format ( found_objects , expected_objects_list ) )
else :
raise Exception ( " Found object list ' {} ' is not equal to expected list ' {} ' " . format ( found_objects , expected_objects_list ) )
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
@keyword ( ' Delete Storagegroup ' )
2021-04-02 14:29:41 +00:00
def delete_storagegroup ( private_key : str , cid : str , oid : str , bearer_token : str = " " ) :
if bearer_token :
bearer_token = f " --bearer { TEMP_DIR } { bearer_token } "
2021-02-10 18:53:26 +00:00
2021-04-02 14:29:41 +00:00
object_cmd = (
2021-02-10 18:53:26 +00:00
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { NEOFS_ENDPOINT } --key { private_key } storagegroup '
2021-04-02 14:29:41 +00:00
f ' delete --cid { cid } --id { oid } { bearer_token } '
2021-02-10 18:53:26 +00:00
)
2021-04-02 14:29:41 +00:00
logger . info ( f " Cmd: { object_cmd } " )
2021-02-10 18:53:26 +00:00
try :
2021-04-02 14:29:41 +00:00
complProc = subprocess . run ( object_cmd , check = True , universal_newlines = True ,
2021-02-11 21:34:44 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 60 , shell = True )
2021-02-10 18:53:26 +00:00
logger . info ( f " Output: { complProc . stdout } " )
m = re . search ( r ' Tombstone: ([a-zA-Z0-9-]+) ' , complProc . stdout )
if m . start ( ) != m . end ( ) : # e.g., if match found something
oid = m . group ( 1 )
else :
raise Exception ( " no Tombstone ID was parsed from command output: \t %s " % complProc . stdout )
return oid
except subprocess . CalledProcessError as e :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )
2020-07-01 02:28:31 +00:00
def _get_file_hash ( filename ) :
blocksize = 65536
hash = hashlib . md5 ( )
with open ( filename , " rb " ) as f :
for block in iter ( lambda : f . read ( blocksize ) , b " " ) :
hash . update ( block )
logger . info ( " Hash: %s " % hash . hexdigest ( ) )
return hash . hexdigest ( )
def _find_cid ( output : str , cid : str ) :
"""
This function parses CID from given CLI output .
Parameters :
- output : a string with command run output
"""
if re . search ( r ' ( %s ) ' % cid , output ) :
logger . info ( " CID %s was parsed from command output: \t %s " % ( cid , output ) )
else :
raise Exception ( " no CID %s was parsed from command output: \t %s " % ( cid , output ) )
return cid
2021-05-04 09:27:43 +00:00
def _parse_oid ( input_str : str ) :
2020-07-01 02:28:31 +00:00
"""
2021-05-04 09:27:43 +00:00
This function parses OID from given CLI output . The input string we
expect :
Object successfully stored
ID : 4 MhrLA7RXTBXCsaNnbahYVAPuoQdiUPuyNEWnywvoSEs
CID : HeZu2DXBuPve6HXbuHZx64knS7KcGtfSj2L59Li72kkg
We want to take ' ID ' value from the string .
2020-07-01 02:28:31 +00:00
Parameters :
2021-05-04 09:27:43 +00:00
- input_str : a string with command run output
2020-07-01 02:28:31 +00:00
"""
2021-05-04 09:27:43 +00:00
try :
# taking second string from command output
snd_str = input_str . split ( ' \n ' ) [ 1 ]
except :
logger . error ( f " Got empty input: { input_str } " )
splitted = snd_str . split ( " : " )
if len ( splitted ) != 2 :
raise Exception ( f " no OID was parsed from command output: \t { snd_str } " )
return splitted [ 1 ]
def _parse_cid ( input_str : str ) :
2020-07-01 02:28:31 +00:00
"""
2021-05-04 09:27:43 +00:00
This function parses CID from given CLI output . The input string we
expect :
container ID : 2 tz86kVTDpJxWHrhw3h6PbKMwkLtBEwoqhHQCKTre1FN
awaiting . . .
container has been persisted on sidechain
We want to take ' container ID ' value from the string .
2020-07-01 02:28:31 +00:00
Parameters :
2021-05-04 09:27:43 +00:00
- input_str : a string with command run output
2020-07-01 02:28:31 +00:00
"""
2021-05-04 09:27:43 +00:00
try :
# taking first string from command output
fst_str = input_str . split ( ' \n ' ) [ 0 ]
except :
logger . error ( f " Got empty output: { input_str } " )
splitted = fst_str . split ( " : " )
if len ( splitted ) != 2 :
raise Exception ( f " no CID was parsed from command output: \t { fst_str } " )
return splitted [ 1 ]
2020-07-01 02:28:31 +00:00
2021-02-08 05:05:17 +00:00
def _get_storage_nodes ( ) :
# TODO: fix to get netmap from neofs-cli
2020-12-16 11:19:24 +00:00
logger . info ( " Storage nodes: %s " % NEOFS_NETMAP )
return NEOFS_NETMAP
2020-07-01 02:28:31 +00:00
2020-11-18 15:15:57 +00:00
def _search_object ( node : str , private_key : str , cid : str , oid : str ) :
2020-11-29 03:44:38 +00:00
if oid :
oid_cmd = " --oid %s " % oid
2021-01-17 11:55:10 +00:00
Cmd = (
f ' { NEOFS_CLI_EXEC } --rpc-endpoint { node } --key { private_key } --ttl 1 '
f ' object search --root --cid { cid } { oid_cmd } '
)
2020-09-01 03:23:17 +00:00
try :
logger . info ( Cmd )
complProc = subprocess . run ( Cmd , check = True , universal_newlines = True ,
2020-11-29 03:44:38 +00:00
stdout = subprocess . PIPE , stderr = subprocess . PIPE , timeout = 30 , shell = True )
2020-09-01 03:23:17 +00:00
logger . info ( " Output: %s " % complProc . stdout )
2020-11-18 15:15:57 +00:00
if re . search ( r ' %s ' % oid , complProc . stdout ) :
return oid
else :
logger . info ( " Object is not found. " )
2020-09-01 03:23:17 +00:00
except subprocess . CalledProcessError as e :
2020-11-18 15:15:57 +00:00
if re . search ( r ' local node is outside of object placement ' , e . output ) :
2020-12-01 21:48:20 +00:00
logger . info ( " Server is not presented in container. " )
2020-12-23 22:38:16 +00:00
elif ( re . search ( r ' timed out after 30 seconds ' , e . output ) or re . search ( r ' no route to host ' , e . output ) or re . search ( r ' i/o timeout ' , e . output ) ) :
2020-11-29 03:44:38 +00:00
logger . warn ( " Node is unavailable " )
2020-09-01 03:23:17 +00:00
else :
raise Exception ( " command ' {} ' return with error (code {} ): {} " . format ( e . cmd , e . returncode , e . output ) )