forked from TrueCloudLab/frostfs-testcases
Add pytest tests.
Change AWS cli v1 to cli v2. Improve allure attachments. Add tests for S3 API. Signed-off-by: a.y.volkov <a.y.volkov@yadro.com>
This commit is contained in:
parent
7abb2761c9
commit
6b1e1ab28d
24 changed files with 2210 additions and 247 deletions
219
robot/resources/lib/python_keywords/aws_cli_client.py
Normal file
219
robot/resources/lib/python_keywords/aws_cli_client.py
Normal file
|
@ -0,0 +1,219 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import allure
|
||||
from cli_helpers import _cmd_run, _configure_aws_cli
|
||||
from common import ASSETS_DIR, S3_GATE
|
||||
|
||||
logger = logging.getLogger('NeoLogger')
|
||||
|
||||
|
||||
class AwsCliClient:
|
||||
|
||||
def __init__(self, access_key_id: str, secret_access_key: str):
|
||||
self.access_key_id = access_key_id
|
||||
self.secret_access_key = secret_access_key
|
||||
self.config_aws_client()
|
||||
|
||||
def config_aws_client(self):
|
||||
cmd = 'aws configure'
|
||||
logger.info(f'Executing command: {cmd}')
|
||||
_configure_aws_cli(cmd, self.access_key_id, self.secret_access_key)
|
||||
|
||||
def create_bucket(self, Bucket: str):
|
||||
cmd = f'aws --no-verify-ssl s3api create-bucket --bucket {Bucket} --endpoint-url {S3_GATE}'
|
||||
_cmd_run(cmd)
|
||||
|
||||
def list_buckets(self) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api list-buckets --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def get_bucket_versioning(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api get-bucket-versioning --bucket {Bucket}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def put_bucket_versioning(self, Bucket: str, VersioningConfiguration: dict) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api put-bucket-versioning --bucket {Bucket} ' \
|
||||
f'--versioning-configuration Status={VersioningConfiguration.get("Status")}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def list_objects(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api list-objects --bucket {Bucket}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def list_objects_v2(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api list-objects-v2 --bucket {Bucket}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def list_object_versions(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api list-object-versions --bucket {Bucket}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def copy_object(self, Bucket: str, CopySource: str, Key: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api copy-object --copy-source {CopySource} --bucket {Bucket} --key {Key}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def head_bucket(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api head-bucket --bucket {Bucket} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def put_object(self, Body: str, Bucket: str, Key: str) -> dict:
|
||||
cmd = f' aws --no-verify-ssl s3api put-object --bucket {Bucket} --key {Key} --body {Body}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def head_object(self, Bucket: str, Key: str, VersionId: str = None) -> dict:
|
||||
version = f' --version-id {VersionId}' if VersionId else ''
|
||||
cmd = f' aws --no-verify-ssl s3api head-object --bucket {Bucket} --key {Key} {version}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def get_object(self, Bucket: str, Key: str, file_path: str, VersionId: str = None) -> dict:
|
||||
version = f' --version-id {VersionId}' if VersionId else ''
|
||||
cmd = f' aws --no-verify-ssl s3api get-object --bucket {Bucket} ' \
|
||||
f'--key {Key} {version} {file_path} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def delete_objects(self, Bucket: str, Delete: dict) -> dict:
|
||||
file_path = f"{os.getcwd()}/{ASSETS_DIR}/delete.json"
|
||||
with open(file_path, 'w') as out_file:
|
||||
out_file.write(json.dumps(Delete))
|
||||
|
||||
cmd = f'aws --no-verify-ssl s3api delete-objects --bucket {Bucket} --delete file://{file_path} ' \
|
||||
f'--endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def delete_object(self, Bucket: str, Key: str, VersionId: str = None) -> dict:
|
||||
version = f' --version-id {VersionId}' if VersionId else ''
|
||||
cmd = f'aws --no-verify-ssl s3api delete-object --bucket {Bucket} --key {Key} {version}' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def get_object_attributes(self, bucket: str, key: str, *attributes: str, version_id: str = None,
|
||||
max_parts: int = None, part_number: int = None) -> dict:
|
||||
attrs = ','.join(attributes)
|
||||
version = f' --version-id {version_id}' if version_id else ''
|
||||
parts = f'--max-parts {max_parts}' if max_parts else ''
|
||||
part_number = f'--part-number-marker {part_number}' if part_number else ''
|
||||
cmd = f'aws --no-verify-ssl s3api get-object-attributes --bucket {bucket} --key {key} {version}' \
|
||||
f' {parts} {part_number} --object-attributes {attrs} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def delete_bucket(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api delete-bucket --bucket {Bucket} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def get_bucket_tagging(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api get-bucket-tagging --bucket {Bucket} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def put_bucket_tagging(self, Bucket: str, Tagging: dict) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api put-bucket-tagging --bucket {Bucket} --tagging \'{json.dumps(Tagging)}\'' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def delete_bucket_tagging(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api delete-bucket-tagging --bucket {Bucket} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def put_object_tagging(self, Bucket: str, Key: str, Tagging: dict) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api put-object-tagging --bucket {Bucket} --key {Key}' \
|
||||
f' --tagging \'{json.dumps(Tagging)}\'' \
|
||||
f' --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def get_object_tagging(self, Bucket: str, Key: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api get-object-tagging --bucket {Bucket} --key {Key} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def delete_object_tagging(self, Bucket: str, Key: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api delete-object-tagging --bucket {Bucket} --key {Key} --endpoint {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
@allure.step('Sync directory S3')
|
||||
def sync(self, bucket_name: str, dir_path: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3 sync {dir_path} s3://{bucket_name} --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def create_multipart_upload(self, Bucket: str, Key: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api create-multipart-upload --bucket {Bucket} --key {Key}' \
|
||||
f' --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def list_multipart_uploads(self, Bucket: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api list-multipart-uploads --bucket {Bucket}' \
|
||||
f' --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def abort_multipart_upload(self, Bucket: str, Key: str, UploadId: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api abort-multipart-upload --bucket {Bucket} --key {Key}' \
|
||||
f' --upload-id {UploadId} --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def upload_part(self, UploadId: str, Bucket: str, Key: str, PartNumber: int, Body: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api upload-part --bucket {Bucket} --key {Key} --upload-id {UploadId} ' \
|
||||
f'--part-number {PartNumber} --body {Body} --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def list_parts(self, UploadId: str, Bucket: str, Key: str) -> dict:
|
||||
cmd = f'aws --no-verify-ssl s3api list-parts --bucket {Bucket} --key {Key} --upload-id {UploadId} ' \
|
||||
f' --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
def complete_multipart_upload(self, Bucket: str, Key: str, UploadId: str, MultipartUpload: dict) -> dict:
|
||||
file_path = f"{os.getcwd()}/{ASSETS_DIR}/parts.json"
|
||||
with open(file_path, 'w') as out_file:
|
||||
out_file.write(json.dumps(MultipartUpload))
|
||||
|
||||
cmd = f'aws --no-verify-ssl s3api complete-multipart-upload --bucket {Bucket} --key {Key}' \
|
||||
f' --upload-id {UploadId} --multipart-upload file://{file_path}' \
|
||||
f' --endpoint-url {S3_GATE}'
|
||||
output = _cmd_run(cmd)
|
||||
return self._to_json(output)
|
||||
|
||||
@staticmethod
|
||||
def _to_json(output: str) -> dict:
|
||||
json_output = {}
|
||||
try:
|
||||
json_output = json.loads(output)
|
||||
except Exception:
|
||||
if '{' not in output and '}' not in output:
|
||||
logger.warning(f'Could not parse json from output {output}')
|
||||
return json_output
|
||||
json_output = json.loads(output[output.index('{'):])
|
||||
|
||||
return json_output
|
|
@ -4,9 +4,15 @@
|
|||
Helper functions to use with `neofs-cli`, `neo-go`
|
||||
and other CLIs.
|
||||
"""
|
||||
|
||||
from typing import Union
|
||||
import subprocess
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
from json import dumps
|
||||
from textwrap import shorten
|
||||
|
||||
import allure
|
||||
import pexpect
|
||||
from robot.api import logger
|
||||
|
||||
|
@ -20,11 +26,16 @@ def _cmd_run(cmd, timeout=30):
|
|||
"""
|
||||
try:
|
||||
logger.info(f"Executing command: {cmd}")
|
||||
start_time = datetime.now()
|
||||
compl_proc = subprocess.run(cmd, check=True, universal_newlines=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout,
|
||||
shell=True)
|
||||
output = compl_proc.stdout
|
||||
return_code = compl_proc.returncode
|
||||
end_time = datetime.now()
|
||||
logger.info(f"Output: {output}")
|
||||
_attach_allure_log(cmd, output, return_code, start_time, end_time)
|
||||
|
||||
return output
|
||||
except subprocess.CalledProcessError as exc:
|
||||
raise RuntimeError(f"Error:\nreturn code: {exc.returncode} "
|
||||
|
@ -38,8 +49,62 @@ def _cmd_run(cmd, timeout=30):
|
|||
|
||||
def _run_with_passwd(cmd):
|
||||
child = pexpect.spawn(cmd)
|
||||
child.delaybeforesend = 1
|
||||
child.expect(".*")
|
||||
child.sendline('\r')
|
||||
if sys.platform == "darwin":
|
||||
child.expect(pexpect.EOF)
|
||||
cmd = child.before
|
||||
else:
|
||||
child.wait()
|
||||
cmd = child.read()
|
||||
return cmd.decode()
|
||||
|
||||
|
||||
def _configure_aws_cli(cmd, key_id, access_key, out_format='json'):
|
||||
child = pexpect.spawn(cmd)
|
||||
child.delaybeforesend = 1
|
||||
|
||||
child.expect("AWS Access Key ID.*")
|
||||
child.sendline(key_id)
|
||||
|
||||
child.expect("AWS Secret Access Key.*")
|
||||
child.sendline(access_key)
|
||||
|
||||
child.expect("Default region name.*")
|
||||
child.sendline('')
|
||||
|
||||
child.expect("Default output format.*")
|
||||
child.sendline(out_format)
|
||||
|
||||
child.wait()
|
||||
cmd = child.read()
|
||||
# child.expect(pexpect.EOF)
|
||||
# cmd = child.before
|
||||
return cmd.decode()
|
||||
|
||||
|
||||
def _attach_allure_log(cmd: str, output: str, return_code: int, start_time: datetime, end_time: datetime):
|
||||
if 'allure' in sys.modules:
|
||||
command_attachment = (
|
||||
f"COMMAND: '{cmd}'\n"
|
||||
f'OUTPUT:\n {output}\n'
|
||||
f'RC: {return_code}\n'
|
||||
f'Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}'
|
||||
)
|
||||
with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'):
|
||||
allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT)
|
||||
|
||||
|
||||
def log_command_execution(cmd: str, output: Union[str, dict]):
|
||||
logger.info(f'{cmd}: {output}')
|
||||
if 'allure' in sys.modules:
|
||||
with suppress(Exception):
|
||||
json_output = dumps(output, indent=4, sort_keys=True)
|
||||
output = json_output
|
||||
command_attachment = (
|
||||
f"COMMAND: '{cmd}'\n"
|
||||
f'OUTPUT:\n {output}\n'
|
||||
)
|
||||
with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'):
|
||||
allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT)
|
||||
|
|
|
@ -1,16 +1,28 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import uuid
|
||||
import zipfile
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
import allure
|
||||
import requests
|
||||
from cli_helpers import _cmd_run
|
||||
from common import HTTP_GATE
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
from robot.libraries.BuiltIn import BuiltIn
|
||||
|
||||
from common import HTTP_GATE
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
|
||||
|
||||
if "pytest" in sys.modules:
|
||||
import os
|
||||
|
||||
ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
|
||||
else:
|
||||
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
|
||||
|
||||
|
||||
@keyword('Get via HTTP Gate')
|
||||
|
@ -30,7 +42,141 @@ def get_via_http_gate(cid: str, oid: str):
|
|||
status code: {resp.status_code} {resp.reason}""")
|
||||
|
||||
logger.info(f'Request: {request}')
|
||||
_attach_allure_step(request, resp.status_code)
|
||||
|
||||
filename = f"{ASSETS_DIR}/{cid}_{oid}"
|
||||
with open(filename, "wb") as get_file:
|
||||
shutil.copyfileobj(resp.raw, get_file)
|
||||
return filename
|
||||
|
||||
|
||||
@keyword('Get via Zip HTTP Gate')
|
||||
def get_via_zip_http_gate(cid: str, prefix: str):
|
||||
"""
|
||||
This function gets given object from HTTP gate
|
||||
:param cid: CID to get object from
|
||||
:param prefix: common prefix
|
||||
"""
|
||||
request = f'{HTTP_GATE}/zip/{cid}/{prefix}'
|
||||
resp = requests.get(request, stream=True)
|
||||
|
||||
if not resp.ok:
|
||||
raise Exception(f"""Failed to get object via HTTP gate:
|
||||
request: {resp.request.path_url},
|
||||
response: {resp.text},
|
||||
status code: {resp.status_code} {resp.reason}""")
|
||||
|
||||
logger.info(f'Request: {request}')
|
||||
_attach_allure_step(request, resp.status_code)
|
||||
|
||||
filename = f'{ASSETS_DIR}/{cid}_archive.zip'
|
||||
with open(filename, 'wb') as get_file:
|
||||
shutil.copyfileobj(resp.raw, get_file)
|
||||
|
||||
with zipfile.ZipFile(filename, 'r') as zip_ref:
|
||||
zip_ref.extractall(ASSETS_DIR)
|
||||
|
||||
return f'{ASSETS_DIR}/{prefix}'
|
||||
|
||||
|
||||
@keyword('Get via HTTP Gate by attribute')
|
||||
def get_via_http_gate_by_attribute(cid: str, attribute: dict):
|
||||
"""
|
||||
This function gets given object from HTTP gate
|
||||
:param cid: CID to get object from
|
||||
:param attribute: attribute name: attribute value pair
|
||||
"""
|
||||
attr_name = list(attribute.keys())[0]
|
||||
attr_value = quote_plus(str(attribute.get(attr_name)))
|
||||
request = f'{HTTP_GATE}/get_by_attribute/{cid}/{quote_plus(str(attr_name))}/{attr_value}'
|
||||
resp = requests.get(request, stream=True)
|
||||
|
||||
if not resp.ok:
|
||||
raise Exception(f"""Failed to get object via HTTP gate:
|
||||
request: {resp.request.path_url},
|
||||
response: {resp.text},
|
||||
status code: {resp.status_code} {resp.reason}""")
|
||||
|
||||
logger.info(f'Request: {request}')
|
||||
_attach_allure_step(request, resp.status_code)
|
||||
|
||||
filename = f"{ASSETS_DIR}/{cid}_{str(uuid.uuid4())}"
|
||||
with open(filename, "wb") as get_file:
|
||||
shutil.copyfileobj(resp.raw, get_file)
|
||||
return filename
|
||||
|
||||
|
||||
@keyword('Upload via HTTP Gate')
|
||||
def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str:
|
||||
"""
|
||||
This function upload given object through HTTP gate
|
||||
:param cid: CID to get object from
|
||||
:param path: File path to upload
|
||||
:param headers: Object header
|
||||
"""
|
||||
request = f'{HTTP_GATE}/upload/{cid}'
|
||||
files = {'upload_file': open(path, 'rb')}
|
||||
body = {
|
||||
'filename': path
|
||||
}
|
||||
resp = requests.post(request, files=files, data=body, headers=headers)
|
||||
|
||||
if not resp.ok:
|
||||
raise Exception(f"""Failed to get object via HTTP gate:
|
||||
request: {resp.request.path_url},
|
||||
response: {resp.text},
|
||||
status code: {resp.status_code} {resp.reason}""")
|
||||
|
||||
logger.info(f'Request: {request}')
|
||||
_attach_allure_step(request, resp.json(), req_type='POST')
|
||||
|
||||
assert resp.json().get('object_id'), f'OID found in response {resp}'
|
||||
|
||||
return resp.json().get('object_id')
|
||||
|
||||
|
||||
@keyword('Upload via HTTP Gate using Curl')
|
||||
def upload_via_http_gate_curl(cid: str, filepath: str, large_object=False, headers: dict = None) -> str:
|
||||
"""
|
||||
This function upload given object through HTTP gate using curl utility.
|
||||
:param cid: CID to get object from
|
||||
:param filepath: File path to upload
|
||||
:param headers: Object header
|
||||
"""
|
||||
request = f'{HTTP_GATE}/upload/{cid}'
|
||||
files = f'file=@{filepath};filename={os.path.basename(filepath)}'
|
||||
cmd = f'curl -F \'{files}\' {request}'
|
||||
if large_object:
|
||||
files = f'file=@pipe;filename={os.path.basename(filepath)}'
|
||||
cmd = f'mkfifo pipe;cat {filepath} > pipe & curl --no-buffer -F \'{files}\' {request}'
|
||||
output = _cmd_run(cmd)
|
||||
oid_re = re.search(r'"object_id": "(.*)"', output)
|
||||
if not oid_re:
|
||||
raise AssertionError(f'Could not find "object_id" in {output}')
|
||||
return oid_re.group(1)
|
||||
|
||||
|
||||
@keyword('Get via HTTP Gate using Curl')
|
||||
def get_via_http_curl(cid: str, oid: str) -> str:
|
||||
"""
|
||||
This function gets given object from HTTP gate using curl utility.
|
||||
:param cid: CID to get object from
|
||||
:param oid: object OID
|
||||
"""
|
||||
request = f'{HTTP_GATE}/get/{cid}/{oid}'
|
||||
filename = f"{ASSETS_DIR}/{cid}_{oid}_{str(uuid.uuid4())}"
|
||||
cmd = f'curl {request} > {filename}'
|
||||
|
||||
_cmd_run(cmd)
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def _attach_allure_step(request: str, status_code: int, req_type='GET'):
|
||||
if 'allure' in sys.modules:
|
||||
command_attachment = (
|
||||
f"REQUEST: '{request}'\n"
|
||||
f'RESPONSE:\n {status_code}\n'
|
||||
)
|
||||
with allure.step(f'{req_type} Request'):
|
||||
allure.attach(command_attachment, f'{req_type} Request', allure.attachment_type.TEXT)
|
||||
|
|
|
@ -1,236 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
|
||||
import boto3
|
||||
import botocore
|
||||
import urllib3
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
from cli_helpers import _run_with_passwd
|
||||
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
|
||||
|
||||
##########################################################
|
||||
# Disabling warnings on self-signed certificate which the
|
||||
# boto library produces on requests to S3-gate in dev-env.
|
||||
urllib3.disable_warnings()
|
||||
##########################################################
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
CREDENTIALS_CREATE_TIMEOUT = '30s'
|
||||
|
||||
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
|
||||
|
||||
|
||||
@keyword('Init S3 Credentials')
|
||||
def init_s3_credentials(wallet):
|
||||
bucket = str(uuid.uuid4())
|
||||
s3_bearer_rules = "robot/resources/files/s3_bearer_rules.json"
|
||||
cmd = (
|
||||
f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} '
|
||||
f'issue-secret --wallet {wallet} --gate-public-key={GATE_PUB_KEY} '
|
||||
f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} '
|
||||
f'--bearer-rules {s3_bearer_rules}'
|
||||
)
|
||||
logger.info(f"Executing command: {cmd}")
|
||||
|
||||
try:
|
||||
output = _run_with_passwd(cmd)
|
||||
logger.info(f"Command completed with output: {output}")
|
||||
# first five string are log output, cutting them off and parse
|
||||
# the rest of the output as JSON
|
||||
output = '\n'.join(output.split('\n')[5:])
|
||||
output_dict = json.loads(output)
|
||||
|
||||
return (output_dict['container_id'],
|
||||
bucket,
|
||||
output_dict['access_key_id'],
|
||||
output_dict['secret_access_key'],
|
||||
output_dict['owner_private_key'])
|
||||
|
||||
except Exception as exc:
|
||||
raise RuntimeError("failed to init s3 credentials") from exc
|
||||
|
||||
|
||||
@keyword('Config S3 client')
|
||||
def config_s3_client(access_key_id, secret_access_key):
|
||||
try:
|
||||
|
||||
session = boto3.session.Session()
|
||||
|
||||
s3_client = session.client(
|
||||
service_name='s3',
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
endpoint_url=S3_GATE, verify=False
|
||||
)
|
||||
|
||||
return s3_client
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('List objects S3 v2')
|
||||
def list_objects_s3_v2(s3_client, bucket):
|
||||
try:
|
||||
response = s3_client.list_objects_v2(Bucket=bucket)
|
||||
logger.info(f"S3 v2 List objects result: {response['Contents']}")
|
||||
obj_list = []
|
||||
for obj in response['Contents']:
|
||||
obj_list.append(obj['Key'])
|
||||
logger.info(f"Found s3 objects: {obj_list}")
|
||||
return obj_list
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('List objects S3')
|
||||
def list_objects_s3(s3_client, bucket):
|
||||
try:
|
||||
response = s3_client.list_objects(Bucket=bucket)
|
||||
logger.info(f"S3 List objects result: {response['Contents']}")
|
||||
obj_list = []
|
||||
for obj in response['Contents']:
|
||||
obj_list.append(obj['Key'])
|
||||
logger.info(f"Found s3 objects: {obj_list}")
|
||||
return obj_list
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Create bucket S3')
|
||||
def create_bucket_s3(s3_client):
|
||||
bucket_name = str(uuid.uuid4())
|
||||
|
||||
try:
|
||||
s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
|
||||
logger.info(f"Created S3 bucket: {s3_bucket}")
|
||||
return bucket_name
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('List buckets S3')
|
||||
def list_buckets_s3(s3_client):
|
||||
found_buckets = []
|
||||
try:
|
||||
response = s3_client.list_buckets()
|
||||
logger.info(f"S3 List buckets result: {response}")
|
||||
|
||||
for bucket in response['Buckets']:
|
||||
found_buckets.append(bucket['Name'])
|
||||
|
||||
return found_buckets
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Delete bucket S3')
|
||||
def delete_bucket_s3(s3_client, bucket):
|
||||
try:
|
||||
response = s3_client.delete_bucket(Bucket=bucket)
|
||||
logger.info(f"S3 Delete bucket result: {response}")
|
||||
|
||||
return response
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Head bucket S3')
|
||||
def head_bucket(s3_client, bucket):
|
||||
try:
|
||||
response = s3_client.head_bucket(Bucket=bucket)
|
||||
logger.info(f"S3 Head bucket result: {response}")
|
||||
return response
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Put object S3')
|
||||
def put_object_s3(s3_client, bucket, filepath):
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
with open(filepath, "rb") as put_file:
|
||||
file_content = put_file.read()
|
||||
|
||||
try:
|
||||
response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename)
|
||||
logger.info(f"S3 Put object result: {response}")
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Head object S3')
|
||||
def head_object_s3(s3_client, bucket, object_key):
|
||||
try:
|
||||
response = s3_client.head_object(Bucket=bucket, Key=object_key)
|
||||
logger.info(f"S3 Head object result: {response}")
|
||||
return response
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Delete object S3')
|
||||
def delete_object_s3(s3_client, bucket, object_key):
|
||||
try:
|
||||
response = s3_client.delete_object(Bucket=bucket, Key=object_key)
|
||||
logger.info(f"S3 Put object result: {response}")
|
||||
return response
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Copy object S3')
|
||||
def copy_object_s3(s3_client, bucket, object_key):
|
||||
filename = f"{os.getcwd()}/{uuid.uuid4()}"
|
||||
try:
|
||||
response = s3_client.copy_object(Bucket=bucket,
|
||||
CopySource=f"{bucket}/{object_key}",
|
||||
Key=filename)
|
||||
logger.info(f"S3 Copy object result: {response}")
|
||||
return filename
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
||||
|
||||
|
||||
@keyword('Get object S3')
|
||||
def get_object_s3(s3_client, bucket, object_key):
|
||||
filename = f"{os.getcwd()}/{uuid.uuid4()}"
|
||||
try:
|
||||
response = s3_client.get_object(Bucket=bucket, Key=object_key)
|
||||
|
||||
with open(f"{filename}", 'wb') as get_file:
|
||||
chunk = response['Body'].read(1024)
|
||||
while chunk:
|
||||
get_file.write(chunk)
|
||||
chunk = response['Body'].read(1024)
|
||||
|
||||
return filename
|
||||
|
||||
except botocore.exceptions.ClientError as err:
|
||||
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
|
||||
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
|
194
robot/resources/lib/python_keywords/s3_gate_bucket.py
Normal file
194
robot/resources/lib/python_keywords/s3_gate_bucket.py
Normal file
|
@ -0,0 +1,194 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from enum import Enum
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
import urllib3
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
from cli_helpers import _run_with_passwd, log_command_execution
|
||||
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
|
||||
|
||||
##########################################################
|
||||
# Disabling warnings on self-signed certificate which the
|
||||
# boto library produces on requests to S3-gate in dev-env.
|
||||
urllib3.disable_warnings()
|
||||
##########################################################
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
CREDENTIALS_CREATE_TIMEOUT = '30s'
|
||||
|
||||
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
|
||||
ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/')
|
||||
|
||||
|
||||
class VersioningStatus(Enum):
|
||||
ENABLED = 'Enabled'
|
||||
SUSPENDED = 'Suspended'
|
||||
|
||||
|
||||
@keyword('Init S3 Credentials')
|
||||
def init_s3_credentials(wallet, s3_bearer_rules_file: str = None):
|
||||
bucket = str(uuid.uuid4())
|
||||
s3_bearer_rules = s3_bearer_rules_file or 'robot/resources/files/s3_bearer_rules.json'
|
||||
cmd = (
|
||||
f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} '
|
||||
f'issue-secret --wallet {wallet} --gate-public-key={GATE_PUB_KEY} '
|
||||
f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} '
|
||||
f'--bearer-rules {s3_bearer_rules}'
|
||||
)
|
||||
logger.info(f'Executing command: {cmd}')
|
||||
|
||||
try:
|
||||
output = _run_with_passwd(cmd)
|
||||
logger.info(f'Command completed with output: {output}')
|
||||
# first five string are log output, cutting them off and parse
|
||||
# the rest of the output as JSON
|
||||
output = '\n'.join(output.split('\n')[5:])
|
||||
output_dict = json.loads(output)
|
||||
|
||||
return (output_dict['container_id'],
|
||||
bucket,
|
||||
output_dict['access_key_id'],
|
||||
output_dict['secret_access_key'],
|
||||
output_dict['owner_private_key'])
|
||||
|
||||
except Exception as exc:
|
||||
raise RuntimeError(f'Failed to init s3 credentials because of error\n{exc}') from exc
|
||||
|
||||
|
||||
@keyword('Config S3 client')
|
||||
def config_s3_client(access_key_id: str, secret_access_key: str):
|
||||
try:
|
||||
|
||||
session = boto3.session.Session()
|
||||
|
||||
s3_client = session.client(
|
||||
service_name='s3',
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
endpoint_url=S3_GATE, verify=False
|
||||
)
|
||||
|
||||
return s3_client
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Create bucket S3')
|
||||
def create_bucket_s3(s3_client):
|
||||
bucket_name = str(uuid.uuid4())
|
||||
|
||||
try:
|
||||
s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
|
||||
log_command_execution(f'Created S3 bucket {bucket_name}', s3_bucket)
|
||||
return bucket_name
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('List buckets S3')
|
||||
def list_buckets_s3(s3_client):
|
||||
found_buckets = []
|
||||
try:
|
||||
response = s3_client.list_buckets()
|
||||
log_command_execution('S3 List buckets result', response)
|
||||
|
||||
for bucket in response['Buckets']:
|
||||
found_buckets.append(bucket['Name'])
|
||||
|
||||
return found_buckets
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Delete bucket S3')
|
||||
def delete_bucket_s3(s3_client, bucket: str):
|
||||
try:
|
||||
response = s3_client.delete_bucket(Bucket=bucket)
|
||||
log_command_execution('S3 Delete bucket result', response)
|
||||
|
||||
return response
|
||||
|
||||
except ClientError as err:
|
||||
log_command_execution('S3 Delete bucket error', str(err))
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Head bucket S3')
|
||||
def head_bucket(s3_client, bucket: str):
|
||||
try:
|
||||
response = s3_client.head_bucket(Bucket=bucket)
|
||||
log_command_execution('S3 Head bucket result', response)
|
||||
return response
|
||||
|
||||
except ClientError as err:
|
||||
log_command_execution('S3 Head bucket error', str(err))
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Set bucket versioning status')
|
||||
def set_bucket_versioning(s3_client, bucket_name: str, status: VersioningStatus):
|
||||
try:
|
||||
response = s3_client.put_bucket_versioning(Bucket=bucket_name, VersioningConfiguration={'Status': status.value})
|
||||
log_command_execution('S3 Set bucket versioning to', response)
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during set bucket versioning: {err}') from err
|
||||
|
||||
|
||||
@keyword('Get bucket versioning status')
|
||||
def get_bucket_versioning_status(s3_client, bucket_name: str) -> str:
|
||||
try:
|
||||
response = s3_client.get_bucket_versioning(Bucket=bucket_name)
|
||||
status = response.get('Status')
|
||||
log_command_execution('S3 Got bucket versioning status', response)
|
||||
return status
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during get bucket versioning status: {err}') from err
|
||||
|
||||
|
||||
@keyword('Put bucket tagging')
|
||||
def put_bucket_tagging(s3_client, bucket_name: str, tags: list):
|
||||
try:
|
||||
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags]
|
||||
tagging = {'TagSet': tags}
|
||||
response = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tagging)
|
||||
log_command_execution('S3 Put bucket tagging', response)
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during put bucket tagging: {err}') from err
|
||||
|
||||
|
||||
@keyword('Get bucket tagging')
|
||||
def get_bucket_tagging(s3_client, bucket_name: str) -> list:
|
||||
try:
|
||||
response = s3_client.get_bucket_tagging(Bucket=bucket_name)
|
||||
log_command_execution('S3 Get bucket tagging', response)
|
||||
return response.get('TagSet')
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during get bucket tagging: {err}') from err
|
||||
|
||||
|
||||
@keyword('Delete bucket tagging')
|
||||
def delete_bucket_tagging(s3_client, bucket_name: str):
|
||||
try:
|
||||
response = s3_client.delete_bucket_tagging(Bucket=bucket_name)
|
||||
log_command_execution('S3 Delete bucket tagging', response)
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during delete bucket tagging: {err}') from err
|
330
robot/resources/lib/python_keywords/s3_gate_object.py
Normal file
330
robot/resources/lib/python_keywords/s3_gate_object.py
Normal file
|
@ -0,0 +1,330 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import os
|
||||
import uuid
|
||||
from enum import Enum
|
||||
from typing import Optional, List
|
||||
|
||||
import urllib3
|
||||
from botocore.exceptions import ClientError
|
||||
from robot.api import logger
|
||||
from robot.api.deco import keyword
|
||||
|
||||
from cli_helpers import log_command_execution
|
||||
from python_keywords.aws_cli_client import AwsCliClient
|
||||
|
||||
##########################################################
|
||||
# Disabling warnings on self-signed certificate which the
|
||||
# boto library produces on requests to S3-gate in dev-env.
|
||||
urllib3.disable_warnings()
|
||||
##########################################################
|
||||
|
||||
ROBOT_AUTO_KEYWORDS = False
|
||||
CREDENTIALS_CREATE_TIMEOUT = '30s'
|
||||
|
||||
ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/')
|
||||
|
||||
|
||||
class VersioningStatus(Enum):
|
||||
ENABLED = 'Enabled'
|
||||
SUSPENDED = 'Suspended'
|
||||
|
||||
|
||||
@keyword('List objects S3 v2')
|
||||
def list_objects_s3_v2(s3_client, bucket: str) -> list:
|
||||
try:
|
||||
response = s3_client.list_objects_v2(Bucket=bucket)
|
||||
content = response.get('Contents', [])
|
||||
log_command_execution('S3 v2 List objects result', response)
|
||||
obj_list = []
|
||||
for obj in content:
|
||||
obj_list.append(obj['Key'])
|
||||
logger.info(f'Found s3 objects: {obj_list}')
|
||||
return obj_list
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('List objects S3')
|
||||
def list_objects_s3(s3_client, bucket: str) -> list:
|
||||
try:
|
||||
response = s3_client.list_objects(Bucket=bucket)
|
||||
content = response.get('Contents', [])
|
||||
log_command_execution('S3 List objects result', response)
|
||||
obj_list = []
|
||||
for obj in content:
|
||||
obj_list.append(obj['Key'])
|
||||
logger.info(f'Found s3 objects: {obj_list}')
|
||||
return obj_list
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('List objects versions S3')
|
||||
def list_objects_versions_s3(s3_client, bucket: str) -> list:
|
||||
try:
|
||||
response = s3_client.list_object_versions(Bucket=bucket)
|
||||
versions = response.get('Versions', [])
|
||||
log_command_execution('S3 List objects versions result', response)
|
||||
return versions
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Put object S3')
|
||||
def put_object_s3(s3_client, bucket: str, filepath: str):
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
if isinstance(s3_client, AwsCliClient):
|
||||
file_content = filepath
|
||||
else:
|
||||
with open(filepath, 'rb') as put_file:
|
||||
file_content = put_file.read()
|
||||
|
||||
try:
|
||||
response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename)
|
||||
log_command_execution('S3 Put object result', response)
|
||||
return response.get('VersionId')
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Head object S3')
|
||||
def head_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None):
|
||||
try:
|
||||
params = {'Bucket': bucket, 'Key': object_key}
|
||||
if version_id:
|
||||
params['VersionId'] = version_id
|
||||
response = s3_client.head_object(**params)
|
||||
log_command_execution('S3 Head object result', response)
|
||||
return response
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Delete object S3')
|
||||
def delete_object_s3(s3_client, bucket, object_key, version_id: str = None):
|
||||
try:
|
||||
params = {'Bucket': bucket, 'Key': object_key}
|
||||
if version_id:
|
||||
params['VersionId'] = version_id
|
||||
response = s3_client.delete_object(**params)
|
||||
log_command_execution('S3 Delete object result', response)
|
||||
return response
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Delete objects S3')
|
||||
def delete_objects_s3(s3_client, bucket: str, object_keys: list):
|
||||
try:
|
||||
response = s3_client.delete_objects(Bucket=bucket, Delete=_make_objs_dict(object_keys))
|
||||
log_command_execution('S3 Delete objects result', response)
|
||||
return response
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Copy object S3')
|
||||
def copy_object_s3(s3_client, bucket, object_key, bucket_dst=None):
|
||||
filename = f'{os.getcwd()}/{uuid.uuid4()}'
|
||||
try:
|
||||
response = s3_client.copy_object(Bucket=bucket_dst or bucket,
|
||||
CopySource=f'{bucket}/{object_key}',
|
||||
Key=filename)
|
||||
log_command_execution('S3 Copy objects result', response)
|
||||
return filename
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Get object S3')
|
||||
def get_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None):
|
||||
filename = f'{ASSETS_DIR}/{uuid.uuid4()}'
|
||||
try:
|
||||
params = {'Bucket': bucket, 'Key': object_key}
|
||||
if version_id:
|
||||
params['VersionId'] = version_id
|
||||
|
||||
if isinstance(s3_client, AwsCliClient):
|
||||
params['file_path'] = filename
|
||||
|
||||
response = s3_client.get_object(**params)
|
||||
log_command_execution('S3 Get objects result', response)
|
||||
|
||||
if not isinstance(s3_client, AwsCliClient):
|
||||
with open(f'{filename}', 'wb') as get_file:
|
||||
chunk = response['Body'].read(1024)
|
||||
while chunk:
|
||||
get_file.write(chunk)
|
||||
chunk = response['Body'].read(1024)
|
||||
|
||||
return filename
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Create multipart upload S3')
|
||||
def create_multipart_upload_s3(s3_client, bucket_name: str, object_key: str) -> str:
|
||||
try:
|
||||
response = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_key)
|
||||
log_command_execution('S3 Created multipart upload', response)
|
||||
assert response.get('UploadId'), f'Expected UploadId in response:\n{response}'
|
||||
|
||||
return response.get('UploadId')
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('List multipart uploads S3')
|
||||
def list_multipart_uploads_s3(s3_client, bucket_name: str) -> Optional[List[dict]]:
|
||||
try:
|
||||
response = s3_client.list_multipart_uploads(Bucket=bucket_name)
|
||||
log_command_execution('S3 List multipart upload', response)
|
||||
|
||||
return response.get('Uploads')
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Abort multipart upload S3')
|
||||
def abort_multipart_uploads_s3(s3_client, bucket_name: str, object_key: str, upload_id: str):
|
||||
try:
|
||||
response = s3_client.abort_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id)
|
||||
log_command_execution('S3 Abort multipart upload', response)
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Upload part S3')
|
||||
def upload_part_s3(s3_client, bucket_name: str, object_key: str, upload_id: str, part_num: int, filepath: str) -> str:
|
||||
if isinstance(s3_client, AwsCliClient):
|
||||
file_content = filepath
|
||||
else:
|
||||
with open(filepath, 'rb') as put_file:
|
||||
file_content = put_file.read()
|
||||
|
||||
try:
|
||||
response = s3_client.upload_part(UploadId=upload_id, Bucket=bucket_name, Key=object_key, PartNumber=part_num,
|
||||
Body=file_content)
|
||||
log_command_execution('S3 Upload part', response)
|
||||
assert response.get('ETag'), f'Expected ETag in response:\n{response}'
|
||||
|
||||
return response.get('ETag')
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('List parts S3')
|
||||
def list_parts_s3(s3_client, bucket_name: str, object_key: str, upload_id: str) -> List[dict]:
|
||||
try:
|
||||
response = s3_client.list_parts(UploadId=upload_id, Bucket=bucket_name, Key=object_key)
|
||||
log_command_execution('S3 List part', response)
|
||||
assert response.get('Parts'), f'Expected Parts in response:\n{response}'
|
||||
|
||||
return response.get('Parts')
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Complete multipart upload S3')
|
||||
def complete_multipart_upload_s3(s3_client, bucket_name: str, object_key: str, upload_id: str,
|
||||
parts: list):
|
||||
try:
|
||||
parts = [{'ETag': etag, 'PartNumber': part_num} for part_num, etag in parts]
|
||||
response = s3_client.complete_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id,
|
||||
MultipartUpload={'Parts': parts})
|
||||
log_command_execution('S3 Complete multipart upload', response)
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
|
||||
|
||||
|
||||
@keyword('Put object tagging')
|
||||
def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list):
|
||||
try:
|
||||
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags]
|
||||
tagging = {'TagSet': tags}
|
||||
s3_client.put_object_tagging(Bucket=bucket_name, Key=object_key, Tagging=tagging)
|
||||
log_command_execution('S3 Put object tagging', str(tags))
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during put object tagging: {err}') from err
|
||||
|
||||
|
||||
@keyword('Get object tagging')
|
||||
def get_object_tagging(s3_client, bucket_name: str, object_key: str) -> list:
|
||||
try:
|
||||
response = s3_client.get_object_tagging(Bucket=bucket_name, Key=object_key)
|
||||
log_command_execution('S3 Get object tagging', response)
|
||||
return response.get('TagSet')
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during get object tagging: {err}') from err
|
||||
|
||||
|
||||
@keyword('Delete object tagging')
|
||||
def delete_object_tagging(s3_client, bucket_name: str, object_key: str):
|
||||
try:
|
||||
response = s3_client.delete_object_tagging(Bucket=bucket_name, Key=object_key)
|
||||
log_command_execution('S3 Delete object tagging', response)
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during delete object tagging: {err}') from err
|
||||
|
||||
|
||||
@keyword('Get object tagging')
|
||||
def get_object_attributes(s3_client, bucket_name: str, object_key: str, *attributes: str, version_id: str = None,
|
||||
max_parts: int = None, part_number: int = None, get_full_resp=True) -> dict:
|
||||
try:
|
||||
if not isinstance(s3_client, AwsCliClient):
|
||||
logger.warn('Method get_object_attributes is not supported by boto3 client')
|
||||
return {}
|
||||
response = s3_client.get_object_attributes(bucket_name, object_key, *attributes, version_id=version_id,
|
||||
max_parts=max_parts, part_number=part_number)
|
||||
log_command_execution('S3 Get object attributes', response)
|
||||
for attr in attributes:
|
||||
assert attr in response, f'Expected attribute {attr} in {response}'
|
||||
|
||||
if get_full_resp:
|
||||
return response
|
||||
else:
|
||||
return response.get(attributes[0])
|
||||
|
||||
except ClientError as err:
|
||||
raise Exception(f'Got error during get object attributes: {err}') from err
|
||||
|
||||
|
||||
def _make_objs_dict(key_names):
|
||||
objs_list = []
|
||||
for key in key_names:
|
||||
obj_dict = {'Key': key}
|
||||
objs_list.append(obj_dict)
|
||||
objs_dict = {'Objects': objs_list}
|
||||
return objs_dict
|
|
@ -126,9 +126,15 @@ def get_nodes_without_object(wallet: str, cid: str, oid: str):
|
|||
"""
|
||||
nodes_list = []
|
||||
for node in NEOFS_NETMAP:
|
||||
res = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
if res is None:
|
||||
nodes_list.append(node)
|
||||
try:
|
||||
res = neofs_verbs.head_object(wallet, cid, oid,
|
||||
endpoint=node,
|
||||
is_direct=True)
|
||||
if res is None:
|
||||
nodes_list.append(node)
|
||||
except Exception as err:
|
||||
if 'object not found' in str(err):
|
||||
nodes_list.append(node)
|
||||
else:
|
||||
raise Exception(f'Got error {err} on head object command') from err
|
||||
return nodes_list
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue