forked from TrueCloudLab/frostfs-testcases
(#70) Decode json Head Object output
Signeoff-by: EliChin <elizaveta@nspcc.ru>
This commit is contained in:
parent
5f1fc77cb2
commit
8e48234e1a
5 changed files with 158 additions and 119 deletions
|
@ -12,6 +12,7 @@ import base58
|
||||||
import docker
|
import docker
|
||||||
import json
|
import json
|
||||||
import tarfile
|
import tarfile
|
||||||
|
import binascii
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from common import *
|
from common import *
|
||||||
|
@ -398,7 +399,7 @@ def get_component_objects(private_key: str, cid: str, oid: str):
|
||||||
# Search expected Linking object
|
# Search expected Linking object
|
||||||
for targer_oid in full_obj_list:
|
for targer_oid in full_obj_list:
|
||||||
header = head_object(private_key, cid, targer_oid, '', '', '--raw')
|
header = head_object(private_key, cid, targer_oid, '', '', '--raw')
|
||||||
header_parsed = parse_object_system_header(header)
|
header_parsed = _get_raw_split_information(header)
|
||||||
if header_parsed['Split ID'] == split_id and 'Split ChildID' in header_parsed.keys():
|
if header_parsed['Split ID'] == split_id and 'Split ChildID' in header_parsed.keys():
|
||||||
logger.info("Linking object has been found in additional check (head of all objects).")
|
logger.info("Linking object has been found in additional check (head of all objects).")
|
||||||
return _collect_split_objects_from_header(private_key, cid, parsed_header_virtual)
|
return _collect_split_objects_from_header(private_key, cid, parsed_header_virtual)
|
||||||
|
@ -407,7 +408,7 @@ def get_component_objects(private_key: str, cid: str, oid: str):
|
||||||
|
|
||||||
def _collect_split_objects_from_header(private_key, cid, parsed_header):
|
def _collect_split_objects_from_header(private_key, cid, parsed_header):
|
||||||
header_link = head_object(private_key, cid, parsed_header['Linking object'], '', '', '--raw')
|
header_link = head_object(private_key, cid, parsed_header['Linking object'], '', '', '--raw')
|
||||||
header_link_parsed = parse_object_system_header(header_link)
|
header_link_parsed = _get_raw_split_information(header_link)
|
||||||
return header_link_parsed['Split ChildID']
|
return header_link_parsed['Split ChildID']
|
||||||
|
|
||||||
|
|
||||||
|
@ -431,7 +432,7 @@ def verify_split_chain(private_key: str, cid: str, oid: str):
|
||||||
|
|
||||||
if 'Last object' in parsed_header_virtual.keys():
|
if 'Last object' in parsed_header_virtual.keys():
|
||||||
header_last = head_object(private_key, cid, parsed_header_virtual['Last object'], '', '', '--raw')
|
header_last = head_object(private_key, cid, parsed_header_virtual['Last object'], '', '', '--raw')
|
||||||
header_last_parsed = parse_object_system_header(header_last)
|
header_last_parsed = _get_raw_split_information(header_last)
|
||||||
marker_last_obj = 1
|
marker_last_obj = 1
|
||||||
|
|
||||||
# Recursive chain validation up to the first object
|
# Recursive chain validation up to the first object
|
||||||
|
@ -450,7 +451,7 @@ def verify_split_chain(private_key: str, cid: str, oid: str):
|
||||||
if 'Linking object' in parsed_header_virtual.keys():
|
if 'Linking object' in parsed_header_virtual.keys():
|
||||||
|
|
||||||
header_link = head_object(private_key, cid, parsed_header_virtual['Linking object'], '', '', '--raw')
|
header_link = head_object(private_key, cid, parsed_header_virtual['Linking object'], '', '', '--raw')
|
||||||
header_link_parsed = parse_object_system_header(header_link)
|
header_link_parsed = _get_raw_split_information(header_link)
|
||||||
marker_link_obj = 1
|
marker_link_obj = 1
|
||||||
|
|
||||||
reversed_list = final_verif_data['ID List'][::-1]
|
reversed_list = final_verif_data['ID List'][::-1]
|
||||||
|
@ -473,7 +474,7 @@ def verify_split_chain(private_key: str, cid: str, oid: str):
|
||||||
if header_link_parsed['Split ID'] == final_verif_data['Split ID']:
|
if header_link_parsed['Split ID'] == final_verif_data['Split ID']:
|
||||||
logger.info("Linking Object Split ID is equal to expected %s." % final_verif_data['Split ID'] )
|
logger.info("Linking Object Split ID is equal to expected %s." % final_verif_data['Split ID'] )
|
||||||
else:
|
else:
|
||||||
raise Exception("Split ID from Linking Object (%s) is not equal to expected (%s)" % header_link_parsed['Split ID'], ffinal_verif_data['Split ID'] )
|
raise Exception("Split ID from Linking Object (%s) is not equal to expected (%s)" % header_link_parsed['Split ID'], final_verif_data['Split ID'] )
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -484,7 +485,7 @@ def verify_split_chain(private_key: str, cid: str, oid: str):
|
||||||
logger.info("Compare Split objects result information with Virtual object.")
|
logger.info("Compare Split objects result information with Virtual object.")
|
||||||
|
|
||||||
header_virtual = head_object(private_key, cid, oid, '', '', '')
|
header_virtual = head_object(private_key, cid, oid, '', '', '')
|
||||||
header_virtual_parsed = parse_object_system_header(header_virtual)
|
header_virtual_parsed = _get_raw_split_information(header_virtual)
|
||||||
|
|
||||||
if int(header_virtual_parsed['PayloadLength']) == int(final_verif_data['PayloadLength']):
|
if int(header_virtual_parsed['PayloadLength']) == int(final_verif_data['PayloadLength']):
|
||||||
logger.info("Split objects PayloadLength are equal to Virtual Object Payload %s" % header_virtual_parsed['PayloadLength'])
|
logger.info("Split objects PayloadLength are equal to Virtual Object Payload %s" % header_virtual_parsed['PayloadLength'])
|
||||||
|
@ -523,7 +524,7 @@ def _verify_child_link(private_key: str, cid: str, oid: str, header_last_parsed:
|
||||||
|
|
||||||
if 'Split PreviousID' in header_last_parsed.keys():
|
if 'Split PreviousID' in header_last_parsed.keys():
|
||||||
header_virtual = head_object(private_key, cid, header_last_parsed['Split PreviousID'], '', '', '--raw')
|
header_virtual = head_object(private_key, cid, header_last_parsed['Split PreviousID'], '', '', '--raw')
|
||||||
parsed_header_virtual = parse_object_system_header(header_virtual)
|
parsed_header_virtual = _get_raw_split_information(header_virtual)
|
||||||
|
|
||||||
final_verif_data = _verify_child_link(private_key, cid, oid, parsed_header_virtual, final_verif_data)
|
final_verif_data = _verify_child_link(private_key, cid, oid, parsed_header_virtual, final_verif_data)
|
||||||
else:
|
else:
|
||||||
|
@ -531,6 +532,57 @@ def _verify_child_link(private_key: str, cid: str, oid: str, header_last_parsed:
|
||||||
|
|
||||||
return final_verif_data
|
return final_verif_data
|
||||||
|
|
||||||
|
def _get_raw_split_information(header):
|
||||||
|
result_header = dict()
|
||||||
|
|
||||||
|
# Header - Constant attributes
|
||||||
|
|
||||||
|
# ID
|
||||||
|
m = re.search(r'^ID: (\w+)', header)
|
||||||
|
if m is not None:
|
||||||
|
result_header['ID'] = m.group(1)
|
||||||
|
else:
|
||||||
|
raise Exception("no ID was parsed from object header: \t%s" % header)
|
||||||
|
|
||||||
|
# Type
|
||||||
|
m = re.search(r'Type:\s+(\w+)', header)
|
||||||
|
if m is not None:
|
||||||
|
result_header['Type'] = m.group(1)
|
||||||
|
else:
|
||||||
|
raise Exception("no Type was parsed from object header: \t%s" % header)
|
||||||
|
|
||||||
|
# PayloadLength
|
||||||
|
m = re.search(r'Size: (\d+)', header)
|
||||||
|
if m is not None:
|
||||||
|
result_header['PayloadLength'] = m.group(1)
|
||||||
|
else:
|
||||||
|
raise Exception("no PayloadLength was parsed from object header: \t%s" % header)
|
||||||
|
|
||||||
|
# Header - Optional attributes
|
||||||
|
|
||||||
|
# SplitID
|
||||||
|
m = re.search(r'Split ID:\s+([\w-]+)', header)
|
||||||
|
if m is not None:
|
||||||
|
result_header['Split ID'] = m.group(1)
|
||||||
|
|
||||||
|
# Split PreviousID
|
||||||
|
m = re.search(r'Split PreviousID:\s+(\w+)', header)
|
||||||
|
if m is not None:
|
||||||
|
result_header['Split PreviousID'] = m.group(1)
|
||||||
|
|
||||||
|
# Split ParentID
|
||||||
|
m = re.search(r'Split ParentID:\s+(\w+)', header)
|
||||||
|
if m is not None:
|
||||||
|
result_header['Split ParentID'] = m.group(1)
|
||||||
|
|
||||||
|
# Split ChildID list
|
||||||
|
found_objects = re.findall(r'Split ChildID:\s+(\w+)', header)
|
||||||
|
if found_objects:
|
||||||
|
result_header['Split ChildID'] = found_objects
|
||||||
|
logger.info("Result: %s" % result_header)
|
||||||
|
|
||||||
|
return result_header
|
||||||
|
|
||||||
@keyword('Verify Head Tombstone')
|
@keyword('Verify Head Tombstone')
|
||||||
def verify_head_tombstone(private_key: str, cid: str, oid_ts: str, oid: str, addr: str):
|
def verify_head_tombstone(private_key: str, cid: str, oid_ts: str, oid: str, addr: str):
|
||||||
object_cmd = (
|
object_cmd = (
|
||||||
|
@ -622,8 +674,8 @@ def head_object(private_key: str, cid: str, oid: str, bearer_token: str="",
|
||||||
else:
|
else:
|
||||||
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
||||||
|
|
||||||
@keyword('Head container')
|
@keyword('Get container attributes')
|
||||||
def head_container(private_key: str, cid: str, endpoint: str="", user_headers:str="", ignore_failure: bool = False, json_output: bool = False):
|
def get_container_attributes(private_key: str, cid: str, endpoint: str="", json_output: bool = False):
|
||||||
|
|
||||||
if endpoint == "":
|
if endpoint == "":
|
||||||
endpoint = NEOFS_ENDPOINT
|
endpoint = NEOFS_ENDPOINT
|
||||||
|
@ -636,21 +688,10 @@ def head_container(private_key: str, cid: str, endpoint: str="", user_headers:st
|
||||||
complProc = subprocess.run(container_cmd, check=True, universal_newlines=True,
|
complProc = subprocess.run(container_cmd, check=True, universal_newlines=True,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
|
||||||
logger.info("Output: %s" % complProc.stdout)
|
logger.info("Output: %s" % complProc.stdout)
|
||||||
|
|
||||||
if user_headers:
|
|
||||||
for key in user_headers.split(","):
|
|
||||||
if re.search(r'(%s)' % key, complProc.stdout):
|
|
||||||
logger.info("User header %s was parsed from command output" % key)
|
|
||||||
else:
|
|
||||||
raise Exception("User header %s was not found in the command output: \t%s" % (key, complProc.stdout))
|
|
||||||
return complProc.stdout
|
return complProc.stdout
|
||||||
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
if ignore_failure:
|
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
||||||
logger.info("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
||||||
return e.output
|
|
||||||
else:
|
|
||||||
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
|
|
||||||
|
|
||||||
@keyword('Parse Object Virtual Raw Header')
|
@keyword('Parse Object Virtual Raw Header')
|
||||||
def parse_object_virtual_raw_header(header: str):
|
def parse_object_virtual_raw_header(header: str):
|
||||||
|
@ -673,88 +714,112 @@ def parse_object_virtual_raw_header(header: str):
|
||||||
logger.info("Result: %s" % result_header)
|
logger.info("Result: %s" % result_header)
|
||||||
return result_header
|
return result_header
|
||||||
|
|
||||||
@keyword('Parse Object System Header')
|
@keyword('Decode Object System Header Json')
|
||||||
def parse_object_system_header(header: str):
|
def decode_object_system_header_json(header):
|
||||||
result_header = dict()
|
result_header = dict()
|
||||||
|
json_header = json.loads(header)
|
||||||
|
|
||||||
# Header - Constant attributes
|
# Header - Constant attributes
|
||||||
|
|
||||||
# ID
|
# ID
|
||||||
m = re.search(r'^ID: (\w+)', header)
|
ID = json_header["objectID"]["value"]
|
||||||
if m is not None:
|
if ID is not None:
|
||||||
result_header['ID'] = m.group(1)
|
result_header["ID"] = _json_cli_decode(ID)
|
||||||
else:
|
else:
|
||||||
raise Exception("no ID was parsed from object header: \t%s" % header)
|
raise Exception("no ID was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# CID
|
# CID
|
||||||
m = re.search(r'CID: (\w+)', header)
|
CID = json_header["header"]["containerID"]["value"]
|
||||||
if m is not None:
|
if CID is not None:
|
||||||
result_header['CID'] = m.group(1)
|
result_header["CID"] = _json_cli_decode(CID)
|
||||||
else:
|
else:
|
||||||
raise Exception("no CID was parsed from object header: \t%s" % header)
|
raise Exception("no CID was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# Owner
|
# OwnerID
|
||||||
m = re.search(r'Owner: ([a-zA-Z0-9]+)', header)
|
OwnerID = json_header["header"]["ownerID"]["value"]
|
||||||
if m is not None:
|
if OwnerID is not None:
|
||||||
result_header['OwnerID'] = m.group(1)
|
result_header["OwnerID"] = _json_cli_decode(OwnerID)
|
||||||
else:
|
else:
|
||||||
raise Exception("no OwnerID was parsed from object header: \t%s" % header)
|
raise Exception("no OwnerID was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# CreatedAtEpoch
|
# CreatedAtEpoch
|
||||||
m = re.search(r'CreatedAt: (\d+)', header)
|
CreatedAtEpoch = json_header["header"]["creationEpoch"]
|
||||||
if m is not None:
|
if CreatedAtEpoch is not None:
|
||||||
result_header['CreatedAtEpoch'] = m.group(1)
|
result_header["CreatedAtEpoch"] = CreatedAtEpoch
|
||||||
else:
|
else:
|
||||||
raise Exception("no CreatedAtEpoch was parsed from object header: \t%s" % header)
|
raise Exception("no CreatedAtEpoch was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# PayloadLength
|
# PayloadLength
|
||||||
m = re.search(r'Size: (\d+)', header)
|
PayloadLength = json_header["header"]["payloadLength"]
|
||||||
if m is not None:
|
if PayloadLength is not None:
|
||||||
result_header['PayloadLength'] = m.group(1)
|
result_header["PayloadLength"] = PayloadLength
|
||||||
else:
|
else:
|
||||||
raise Exception("no PayloadLength was parsed from object header: \t%s" % header)
|
raise Exception("no PayloadLength was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
|
|
||||||
# HomoHash
|
# HomoHash
|
||||||
m = re.search(r'HomoHash:\s+(\w+)', header)
|
HomoHash = json_header["header"]["homomorphicHash"]["sum"]
|
||||||
if m is not None:
|
if HomoHash is not None:
|
||||||
result_header['HomoHash'] = m.group(1)
|
result_header["HomoHash"] = _json_cli_decode(HomoHash)
|
||||||
else:
|
else:
|
||||||
raise Exception("no HomoHash was parsed from object header: \t%s" % header)
|
raise Exception("no HomoHash was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# Checksum
|
# Checksum
|
||||||
m = re.search(r'Checksum:\s+(\w+)', header)
|
Checksum = json_header["header"]["payloadHash"]["sum"]
|
||||||
if m is not None:
|
if Checksum is not None:
|
||||||
result_header['Checksum'] = m.group(1)
|
Checksum_64_d = base64.b64decode(Checksum)
|
||||||
|
result_header["Checksum"] = binascii.hexlify(Checksum_64_d)
|
||||||
else:
|
else:
|
||||||
raise Exception("no Checksum was parsed from object header: \t%s" % header)
|
raise Exception("no Checksum was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# Type
|
# Type
|
||||||
m = re.search(r'Type:\s+(\w+)', header)
|
Type = json_header["header"]["objectType"]
|
||||||
if m is not None:
|
if Type is not None:
|
||||||
result_header['Type'] = m.group(1)
|
result_header["Type"] = Type
|
||||||
else:
|
else:
|
||||||
raise Exception("no Type was parsed from object header: \t%s" % header)
|
raise Exception("no Type was parsed from header: \t%s" % header)
|
||||||
|
|
||||||
# Header - Optional attributes
|
# Header - Optional attributes
|
||||||
m = re.search(r'Split ID:\s+([\w-]+)', header)
|
|
||||||
if m is not None:
|
|
||||||
result_header['Split ID'] = m.group(1)
|
|
||||||
|
|
||||||
m = re.search(r'Split PreviousID:\s+(\w+)', header)
|
# Attributes
|
||||||
if m is not None:
|
attributes = []
|
||||||
result_header['Split PreviousID'] = m.group(1)
|
attribute_list = json_header["header"]["attributes"]
|
||||||
|
if attribute_list is not None:
|
||||||
|
for e in attribute_list:
|
||||||
|
values_list = list(e.values())
|
||||||
|
attribute = values_list[0] + '=' + values_list[1]
|
||||||
|
attributes.append(attribute)
|
||||||
|
result_header["Attributes"] = attributes
|
||||||
|
else:
|
||||||
|
raise Exception("no Attributes were parsed from header: \t%s" % header)
|
||||||
|
|
||||||
m = re.search(r'Split ParentID:\s+(\w+)', header)
|
return result_header
|
||||||
if m is not None:
|
|
||||||
result_header['Split ParentID'] = m.group(1)
|
|
||||||
|
|
||||||
# Split ChildID list
|
@keyword('Decode Container Attributes Json')
|
||||||
found_objects = re.findall(r'Split ChildID:\s+(\w+)', header)
|
def decode_container_attributes_json(header):
|
||||||
if found_objects:
|
result_header = dict()
|
||||||
result_header['Split ChildID'] = found_objects
|
json_header = json.loads(header)
|
||||||
logger.info("Result: %s" % result_header)
|
|
||||||
return result_header
|
|
||||||
|
|
||||||
|
attributes = []
|
||||||
|
attribute_list = json_header["attributes"]
|
||||||
|
if attribute_list is not None:
|
||||||
|
for e in attribute_list:
|
||||||
|
values_list = list(e.values())
|
||||||
|
attribute = values_list[0] + '=' + values_list[1]
|
||||||
|
attributes.append(attribute)
|
||||||
|
result_header["Attributes"] = attributes
|
||||||
|
else:
|
||||||
|
raise Exception("no Attributes were parsed from header: \t%s" % header)
|
||||||
|
|
||||||
|
return result_header
|
||||||
|
|
||||||
|
@keyword('Verify Head Attribute')
|
||||||
|
def verify_head_attribute(header, attribute):
|
||||||
|
attribute_list = header["Attributes"]
|
||||||
|
if (attribute in attribute_list):
|
||||||
|
logger.info("Attribute %s is found" % attribute)
|
||||||
|
else:
|
||||||
|
raise Exception("Attribute %s was not found" % attribute)
|
||||||
|
|
||||||
@keyword('Delete object')
|
@keyword('Delete object')
|
||||||
def delete_object(private_key: str, cid: str, oid: str, bearer: str, options: str=""):
|
def delete_object(private_key: str, cid: str, oid: str, bearer: str, options: str=""):
|
||||||
|
|
|
@ -43,8 +43,8 @@ Check eACL MatchType String Equal Request Deny
|
||||||
${CID} = Create Container Public
|
${CID} = Create Container Public
|
||||||
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
||||||
|
|
||||||
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY}
|
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY} json_output=True
|
||||||
&{HEADER_DICT} = Parse Object System Header ${HEADER}
|
&{HEADER_DICT} = Decode Object System Header Json ${HEADER}
|
||||||
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
||||||
|
|
||||||
|
|
||||||
|
@ -87,8 +87,8 @@ Check eACL MatchType String Equal Request Allow
|
||||||
${CID} = Create Container Public
|
${CID} = Create Container Public
|
||||||
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
||||||
|
|
||||||
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY}
|
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY} json_output=True
|
||||||
&{HEADER_DICT} = Parse Object System Header ${HEADER}
|
&{HEADER_DICT} = Decode Object System Header Json ${HEADER}
|
||||||
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
||||||
|
|
||||||
|
|
||||||
|
@ -131,13 +131,13 @@ Check eACL MatchType String Equal Object
|
||||||
${CID} = Create Container Public
|
${CID} = Create Container Public
|
||||||
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
||||||
|
|
||||||
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY}
|
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY} json_output=True
|
||||||
&{HEADER_DICT} = Parse Object System Header ${HEADER}
|
&{HEADER_DICT} = Decode Object System Header Json ${HEADER}
|
||||||
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
||||||
|
|
||||||
|
|
||||||
Log Set eACL for Deny GET operation with StringEqual Object ID
|
Log Set eACL for Deny GET operation with StringEqual Object ID
|
||||||
${ID_value} = Get From Dictionary ${HEADER_DICT} ID
|
${ID_value} = Get From Dictionary ${HEADER_DICT} ID
|
||||||
|
|
||||||
${filters} = Create Dictionary headerType=OBJECT matchType=STRING_EQUAL key=$Object:objectID value=${ID_value}
|
${filters} = Create Dictionary headerType=OBJECT matchType=STRING_EQUAL key=$Object:objectID value=${ID_value}
|
||||||
${rule1} = Create Dictionary Operation=GET Access=DENY Role=OTHERS Filters=${filters}
|
${rule1} = Create Dictionary Operation=GET Access=DENY Role=OTHERS Filters=${filters}
|
||||||
|
@ -171,10 +171,10 @@ Check eACL MatchType String Not Equal Object
|
||||||
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
${S_OID_USER} = Put object ${USER_KEY} ${FILE_S} ${CID} ${EMPTY} ${FILE_USR_HEADER}
|
||||||
${S_OID_OTHER} = Put object ${OTHER_KEY} ${FILE_S_2} ${CID} ${EMPTY} ${FILE_OTH_HEADER}
|
${S_OID_OTHER} = Put object ${OTHER_KEY} ${FILE_S_2} ${CID} ${EMPTY} ${FILE_OTH_HEADER}
|
||||||
|
|
||||||
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY}
|
${HEADER} = Head object ${USER_KEY} ${CID} ${S_OID_USER} ${EMPTY} json_output=True
|
||||||
Head object ${USER_KEY} ${CID} ${S_OID_OTHER} ${EMPTY}
|
Head object ${USER_KEY} ${CID} ${S_OID_OTHER} ${EMPTY} json_output=True
|
||||||
|
|
||||||
&{HEADER_DICT} = Parse Object System Header ${HEADER}
|
&{HEADER_DICT} = Decode Object System Header Json ${HEADER}
|
||||||
|
|
||||||
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
Get object ${OTHER_KEY} ${CID} ${S_OID_USER} ${EMPTY} local_file_eacl
|
||||||
Get object ${OTHER_KEY} ${CID} ${S_OID_OTHER} ${EMPTY} local_file_eacl
|
Get object ${OTHER_KEY} ${CID} ${S_OID_OTHER} ${EMPTY} local_file_eacl
|
||||||
|
|
|
@ -18,7 +18,7 @@ ${ATTR_NONE} = NoAttribute=''
|
||||||
${ATTR_SINGLE} = AttrNum=one
|
${ATTR_SINGLE} = AttrNum=one
|
||||||
|
|
||||||
*** Test Cases ***
|
*** Test Cases ***
|
||||||
Duplicated Object Attributes
|
Duplicated Container Attributes
|
||||||
[Documentation] Testcase to check duplicated container attributes.
|
[Documentation] Testcase to check duplicated container attributes.
|
||||||
[Tags] Container NeoFS NeoCLI
|
[Tags] Container NeoFS NeoCLI
|
||||||
[Timeout] 10 min
|
[Timeout] 10 min
|
||||||
|
@ -37,9 +37,9 @@ Duplicated Object Attributes
|
||||||
Run Keyword And Expect Error *
|
Run Keyword And Expect Error *
|
||||||
... Create container ${USER_KEY} ${EMPTY} ${POLICY} ${ATTR_DUPLICATE}
|
... Create container ${USER_KEY} ${EMPTY} ${POLICY} ${ATTR_DUPLICATE}
|
||||||
|
|
||||||
#####################################################
|
######################################################
|
||||||
# Checking that container cannot have empty attibute
|
# Checking that container cannot have empty attribute
|
||||||
#####################################################
|
######################################################
|
||||||
|
|
||||||
Run Keyword And Expect Error *
|
Run Keyword And Expect Error *
|
||||||
... Create container ${USER_KEY} ${EMPTY} ${POLICY} ${ATTR_NONE}
|
... Create container ${USER_KEY} ${EMPTY} ${POLICY} ${ATTR_NONE}
|
||||||
|
@ -49,21 +49,8 @@ Duplicated Object Attributes
|
||||||
#####################################################
|
#####################################################
|
||||||
|
|
||||||
${CID} = Create container ${USER_KEY} ${EMPTY} ${POLICY} ${ATTR_SINGLE}
|
${CID} = Create container ${USER_KEY} ${EMPTY} ${POLICY} ${ATTR_SINGLE}
|
||||||
${HEAD} = Head container ${USER_KEY} ${CID} ${EMPTY} json_output=True
|
${ATTRIBUTES} = Get container attributes ${USER_KEY} ${CID} ${EMPTY} json_output=True
|
||||||
${ATTR} = Parse Header Attributes ${HEAD}
|
${ATTRIBUTES_DICT} = Decode Container Attributes Json ${ATTRIBUTES}
|
||||||
Should Be Equal ${ATTR} ${ATTR_SINGLE}
|
Verify Head Attribute ${ATTRIBUTES_DICT} ${ATTR_SINGLE}
|
||||||
|
|
||||||
[Teardown] Teardown container_attributes
|
[Teardown] Teardown container_attributes
|
||||||
|
|
||||||
*** Keywords ***
|
|
||||||
|
|
||||||
Parse Header Attributes
|
|
||||||
|
|
||||||
[Arguments] ${HEADER}
|
|
||||||
&{HEADER_DIC} = Evaluate json.loads('''${HEADER}''') json
|
|
||||||
@{ATTR_DIC} = Get From Dictionary ${HEADER_DIC} attributes
|
|
||||||
&{ATTR_NUM_DIC} = Get From List ${ATTR_DIC} 0
|
|
||||||
${ATTR_KEY} = Get From Dictionary ${ATTR_NUM_DIC} key
|
|
||||||
${ATTR_VALUE} = Get From Dictionary ${ATTR_NUM_DIC} value
|
|
||||||
${ATTRIBUTE} = Catenate SEPARATOR=\= ${ATTR_KEY} ${ATTR_VALUE}
|
|
||||||
[Return] ${ATTRIBUTE}
|
|
|
@ -54,21 +54,7 @@ Duplicated Object Attributes
|
||||||
|
|
||||||
${OID} = Put object ${USER_KEY} ${FILE_S} ${PUBLIC_CID} ${EMPTY} ${ATTR_SINGLE}
|
${OID} = Put object ${USER_KEY} ${FILE_S} ${PUBLIC_CID} ${EMPTY} ${ATTR_SINGLE}
|
||||||
${HEAD} = Head object ${USER_KEY} ${PUBLIC_CID} ${OID} json_output=True
|
${HEAD} = Head object ${USER_KEY} ${PUBLIC_CID} ${OID} json_output=True
|
||||||
${ATTR} = Parse Header Attributes ${HEAD}
|
${HEADER_58} = Decode Object System Header Json ${HEAD}
|
||||||
Should Be Equal ${ATTR} ${ATTR_SINGLE}
|
Verify Head Attribute ${HEADER_58} ${ATTR_SINGLE}
|
||||||
|
|
||||||
[Teardown] Teardown object_attributes
|
[Teardown] Teardown object_attributes
|
||||||
|
|
||||||
*** Keywords ***
|
|
||||||
|
|
||||||
Parse Header Attributes
|
|
||||||
|
|
||||||
[Arguments] ${HEADER}
|
|
||||||
&{HEADER_DIC} = Evaluate json.loads('''${HEADER}''') json
|
|
||||||
&{HEADER_ELEMENT} = Get From Dictionary ${HEADER_DIC} header
|
|
||||||
@{ATTR_DIC} = Get From Dictionary ${HEADER_ELEMENT} attributes
|
|
||||||
&{ATTR_NUM_DIC} = Get From List ${ATTR_DIC} 0
|
|
||||||
${ATTR_KEY} = Get From Dictionary ${ATTR_NUM_DIC} key
|
|
||||||
${ATTR_VALUE} = Get From Dictionary ${ATTR_NUM_DIC} value
|
|
||||||
${ATTRIBUTE} = Catenate SEPARATOR=\= ${ATTR_KEY} ${ATTR_VALUE}
|
|
||||||
[Return] ${ATTRIBUTE}
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ Variables ../../../variables/common.py
|
||||||
|
|
||||||
Library ../${RESOURCES}/neofs.py
|
Library ../${RESOURCES}/neofs.py
|
||||||
Library ../${RESOURCES}/payment_neogo.py
|
Library ../${RESOURCES}/payment_neogo.py
|
||||||
|
Library ${KEYWORDS}/contract_keywords.py
|
||||||
|
|
||||||
Resource common_steps_object.robot
|
Resource common_steps_object.robot
|
||||||
Resource ../${RESOURCES}/setup_teardown.robot
|
Resource ../${RESOURCES}/setup_teardown.robot
|
||||||
|
|
Loading…
Reference in a new issue