fixes and refactoring in HTTP and S3 tests

Signed-off-by: anastasia prasolova <anastasia@nspcc.ru>
This commit is contained in:
anastasia prasolova 2022-03-11 19:08:14 +03:00 committed by Anastasia Prasolova
parent 40db81216a
commit 2b51542ca2
15 changed files with 164 additions and 219 deletions

View file

@ -0,0 +1,35 @@
#!/usr/bin/python3
import shutil
import requests
from common import HTTP_GATE
from robot.api.deco import keyword
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
ROBOT_AUTO_KEYWORDS = False
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
@keyword('Get via HTTP Gate')
def get_via_http_gate(cid: str, oid: str):
"""
This function gets given object from HTTP gate
:param cid: CID to get object from
:param oid: object OID
"""
request = f'{HTTP_GATE}/get/{cid}/{oid}'
resp = requests.get(request, stream=True)
if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate:
request: {resp.request.path_url},
response: {resp.text},
status code: {resp.status_code} {resp.reason}""")
logger.info(f'Request: {request}')
filename = f"{ASSETS_DIR}/{cid}_{oid}"
with open(filename, "wb") as get_file:
shutil.copyfileobj(resp.raw, get_file)
return filename

View file

@ -54,7 +54,7 @@ def start_nodes(*nodes_list):
node = m.group(1)
client = docker.APIClient()
client.start(node)
@keyword('Get nodes with object')
def get_nodes_with_object(private_key: str, cid: str, oid: str):
@ -712,12 +712,6 @@ def delete_container(cid: str, private_key: str):
_cmd_run(deleteContainerCmd)
@keyword('Get file name')
def get_file_name(filepath):
filename = os.path.basename(filepath)
return filename
@keyword('Get file hash')
def get_file_hash(filename : str):
file_hash = _get_file_hash(filename)
@ -771,7 +765,7 @@ def get_control_endpoint_with_wif(endpoint_number: str = ''):
endpoint_values = NEOFS_NETMAP_DICT[f'{endpoint_num}']
endpoint_control = endpoint_values['control']
wif = endpoint_values['wif']
return endpoint_num, endpoint_control, wif
@keyword('Get Locode')
@ -1012,7 +1006,7 @@ def generate_session_token(owner: str, pub_key: str, cid: str = "", wildcard: bo
@keyword ('Sign Session Token')
def sign_session_token(session_token: str, wallet: str, to_file: str=''):
if to_file:
to_file = f'--to {to_file}'
to_file = f'--to {to_file}'
cmd = (
f'{NEOFS_CLI_EXEC} util sign session-token --from {session_token} '
f'-w {wallet} {to_file}'

View file

@ -1,19 +1,23 @@
#!/usr/bin/python3.8
#!/usr/bin/python3
import json
import os
import re
import shutil
import subprocess
import uuid
import requests
import botocore
import boto3
from common import *
from robot.api.deco import keyword
from robot.api import logger
import boto3
import botocore
from cli_helpers import _run_with_passwd
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
import urllib3
from robot.api.deco import keyword
from robot.api import logger
##########################################################
# Disabling warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings()
##########################################################
ROBOT_AUTO_KEYWORDS = False
@ -22,44 +26,37 @@ NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
@keyword('Init S3 Credentials')
def init_s3_credentials(wallet):
bucket = str(uuid.uuid4())
records = ' \' {"records":[{"operation":"PUT","action":"ALLOW","filters":[],"targets":[{"role":"OTHERS","keys":[]}]}, {"operation":"SEARCH","action":"ALLOW","filters":[],"targets":[{"role":"OTHERS","keys":[]}]}, {"operation":"GET","action":"ALLOW","filters":[],"targets":[{"role":"OTHERS","keys":[]}]}]} \' '
Cmd = (
s3_bearer_rules = "robot/resources/files/s3_bearer_rules.json"
cmd = (
f'{NEOFS_EXEC} --debug --with-log issue-secret --wallet {wallet} '
f'--gate-public-key={GATE_PUB_KEY} --peer {NEOFS_ENDPOINT} '
f'--container-friendly-name {bucket} --create-session-token '
f'--bearer-rules {records}'
f'--container-friendly-name {bucket} '
f'--bearer-rules {s3_bearer_rules}'
)
logger.info(f"Executing command: {Cmd}")
logger.info(f"Executing command: {cmd}")
try:
output = _run_with_passwd(Cmd)
output = _run_with_passwd(cmd)
logger.info(f"Command completed with output: {output}")
# first five string are log output, cutting them off and parse
# the rest of the output as JSON
output = '\n'.join(output.split('\n')[5:])
output_dict = json.loads(output)
m = re.search(r'"container_id":\s+"(\w+)"', output)
cid = m.group(1)
logger.info("cid: %s" % cid)
return (output_dict['container_id'],
bucket,
output_dict['access_key_id'],
output_dict['secret_access_key'],
output_dict['owner_private_key'])
m = re.search(r'"access_key_id":\s+"([\w\/]+)"', output)
access_key_id = m.group(1)
logger.info("access_key_id: %s" % access_key_id)
m = re.search(r'"secret_access_key":\s+"(\w+)"', output)
secret_access_key = m.group(1)
logger.info("secret_access_key: %s" % secret_access_key)
m = re.search(r'"owner_private_key":\s+"(\w+)"', output)
owner_private_key = m.group(1)
logger.info("owner_private_key: %s" % owner_private_key)
return cid, bucket, access_key_id, secret_access_key, owner_private_key
except subprocess.CalledProcessError as e:
raise Exception(f"Error: \nreturn code: {e.returncode}. \nOutput: {e.stderr}")
except Exception as exc:
raise RuntimeError("failed to init s3 credentials") from exc
@keyword('Config S3 client')
def config_s3_client(access_key_id, secret_access_key):
try:
session = boto3.session.Session()
s3_client = session.client(
@ -80,11 +77,11 @@ def config_s3_client(access_key_id, secret_access_key):
def list_objects_s3_v2(s3_client, bucket):
try:
response = s3_client.list_objects_v2(Bucket=bucket)
logger.info("S3 v2 List objects result: %s" % response['Contents'])
logger.info(f"S3 v2 List objects result: {response['Contents']}")
obj_list = []
for obj in response['Contents']:
obj_list.append(obj['Key'])
logger.info("Found s3 objects: %s" % obj_list)
logger.info(f"Found s3 objects: {obj_list}")
return obj_list
except botocore.exceptions.ClientError as err:
@ -96,11 +93,11 @@ def list_objects_s3_v2(s3_client, bucket):
def list_objects_s3(s3_client, bucket):
try:
response = s3_client.list_objects(Bucket=bucket)
logger.info("S3 List objects result: %s" % response['Contents'])
logger.info(f"S3 List objects result: {response['Contents']}")
obj_list = []
for obj in response['Contents']:
obj_list.append(obj['Key'])
logger.info("Found s3 objects: %s" % obj_list)
logger.info(f"Found s3 objects: {obj_list}")
return obj_list
except botocore.exceptions.ClientError as err:
@ -114,12 +111,12 @@ def create_bucket_s3(s3_client):
try:
s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
logger.info("Created S3 bucket: %s" % s3_bucket)
logger.info(f"Created S3 bucket: {s3_bucket}")
return bucket_name
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('List buckets S3')
@ -127,7 +124,7 @@ def list_buckets_s3(s3_client):
found_buckets = []
try:
response = s3_client.list_buckets()
logger.info("S3 List buckets result: %s" % response)
logger.info(f"S3 List buckets result: {response}")
for bucket in response['Buckets']:
found_buckets.append(bucket['Name'])
@ -136,7 +133,7 @@ def list_buckets_s3(s3_client):
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Delete bucket S3')
@ -144,7 +141,7 @@ def delete_bucket_s3(s3_client, bucket):
try:
response = s3_client.delete_bucket(Bucket=bucket)
logger.info(f"S3 Delete bucket result: {response}")
return response
except botocore.exceptions.ClientError as err:
@ -152,11 +149,11 @@ def delete_bucket_s3(s3_client, bucket):
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('HeadBucket S3')
def headbucket(bucket, s3_client):
@keyword('Head bucket S3')
def head_bucket(s3_client, bucket):
try:
response = s3_client.head_bucket(Bucket=bucket)
logger.info(f"S3 HeadBucket result: {response}")
logger.info(f"S3 Head bucket result: {response}")
return response
except botocore.exceptions.ClientError as err:
@ -168,12 +165,12 @@ def headbucket(bucket, s3_client):
def put_object_s3(s3_client, bucket, filepath):
filename = os.path.basename(filepath)
with open(filepath, "rb") as f:
fileContent = f.read()
with open(filepath, "rb") as put_file:
file_content = put_file.read()
try:
response = s3_client.put_object(Body=fileContent, Bucket=bucket, Key=filename)
logger.info("S3 Put object result: %s" % response)
response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename)
logger.info(f"S3 Put object result: {response}")
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@ -184,19 +181,19 @@ def head_object_s3(s3_client, bucket, object_key):
try:
response = s3_client.head_object(Bucket=bucket, Key=object_key)
logger.info("S3 Head object result: %s" % response)
logger.info(f"S3 Head object result: {response}")
return response
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Delete object S3')
def delete_object_s3(s3_client, bucket, object_key):
try:
response = s3_client.delete_object(Bucket=bucket, Key=object_key)
logger.info("S3 Put object result: %s" % response)
logger.info(f"S3 Put object result: {response}")
return response
except botocore.exceptions.ClientError as err:
@ -205,11 +202,14 @@ def delete_object_s3(s3_client, bucket, object_key):
@keyword('Copy object S3')
def copy_object_s3(s3_client, bucket, object_key, new_object):
def copy_object_s3(s3_client, bucket, object_key):
filename = f"{os.getcwd()}/{uuid.uuid4()}"
try:
response = s3_client.copy_object(Bucket=bucket, CopySource=bucket+"/"+object_key, Key=new_object)
logger.info("S3 Copy object result: %s" % response)
return response
response = s3_client.copy_object(Bucket=bucket,
CopySource=f"{bucket}/{object_key}",
Key=filename)
logger.info(f"S3 Copy object result: {response}")
return filename
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
@ -217,44 +217,19 @@ def copy_object_s3(s3_client, bucket, object_key, new_object):
@keyword('Get object S3')
def get_object_s3(s3_client, bucket, object_key, target_file):
def get_object_s3(s3_client, bucket, object_key):
filename = f"{os.getcwd()}/{uuid.uuid4()}"
try:
response = s3_client.get_object(Bucket=bucket, Key=object_key)
with open(f"{target_file}", 'wb') as f:
with open(f"{filename}", 'wb') as get_file:
chunk = response['Body'].read(1024)
while chunk:
f.write(chunk)
get_file.write(chunk)
chunk = response['Body'].read(1024)
return target_file
return filename
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Get via HTTP Gate')
def get_via_http_gate(cid: str, oid: str):
"""
This function gets given object from HTTP gate
:param cid: CID to get object from
:param oid: object OID
"""
request = f'{HTTP_GATE}/get/{cid}/{oid}'
resp = requests.get(request, stream=True)
if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate:
request: {resp.request.path_url},
response: {resp.text},
status code: {resp.status_code} {resp.reason}""")
return
logger.info(f'Request: {request}')
filename = os.path.curdir + f"/{cid}_{oid}"
with open(filename, "wb") as f:
shutil.copyfileobj(resp.raw, f)
del resp
return filename

View file

@ -54,12 +54,12 @@ def wif_to_binary(wif: str) -> str:
return path
@keyword('Make Up')
def make_up(services=['']):
def make_up(services: list=[]):
test_path = os.getcwd()
dev_path = os.getenv('DEVENV_PATH', '../neofs-dev-env')
os.chdir(dev_path)
if services != ['']:
if len(services) > 0:
for service in services:
cmd = f'make up/{service}'
logger.info(f"Cmd: {cmd}")