frostfs-testcases/robot/resources/lib/python_keywords/s3_gate_bucket.py
Vladimir Domnich d9d74baa72 Add test suites for acl, container and node management
Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>

commit f7c68cfb423e3213179521954dccb6053fc6382d
Merge: e234b61 99bfe6b
    Merge branch 'avolkov/add_ssh' into internal_tmp_b

commit 99bfe6b56cd75590f868313910068cf1a80bd43f
    Tick one more epoch.

commit bd70bc49391d578cdda727edb4dcd181b832bf1e
    Start nodes in case of test fail.

commit b3888ec62cfc3c18b1dff58962a94a3094342186
    Catch json decode error.

commit c18e415b783ec3e4ce804f43c19246240c186a97
    Add ssh-key access.

commit 7dbdeb653b7d5b7ab3874b546e05a48b502c2460
    Add some tests.

commit 844367c68638c7f97ba4860dd0069c07f499d66d
    Add some tests for nodes management.

commit 1b84b37048dcd3cc0888aa54639975fc11fb2d75
    Add some tests for nodes management.

commit b30c1336a6919e0c8e500bdf2a9be3d5a14470ea
    Add ssh execution option.

commit 2df40eca74ee20bd668778715185ffddda63cb05
    Change AWS cli v1 to cli v2.

commit 7403da3d7c2a5963cfbb12b7c0f3d1d641f52a7e
    Change AWS cli v1 to cli v2.

commit b110dcdb655a585e6c53e6ebc3eae7bf1f1e792f
    Change AWS cli v1 to cli v2.

commit 6183756a4c064c932ee193c2e08a79343017fa49
    Change AWS cli v1 to cli v2.

commit 398006544d60896faa3fc6e6a9dbb51ada06759c
    Fix container run.

commit e7202136dabbe7e2d3da508e0a2ec55a0d5cb67a
    Added tests with AWS CLI.

commit 042e1478ee1fd700c8572cbc6d0d9e6b312b8e8d
    Fix PR comments.

commit e234b61dbb9b8b10812e069322ab03615af0d44e
    Add debug for env.

commit 14febd06713dc03a8207bb80384acb4a7d32df0e
    Move env variables for pytest docker into env file.

commit bafdc6131b5ac855a43b672be194cde2ccf6f75b
    Move env variables for pytest docker into env file.

commit 27c2c6b11f51d2e3c085d44b814cb4c00f81b376
    Move env variables for pytest docker into env file.

commit e4db4948978e092adb83aeacdf06619f5ca2f242
    Merge branch 'master' into avolkov/try_pytest

commit c83a7e625e8daba3a40b65a1d69b2b1323e9ae28
    WIP.

commit 42489bbf8058acd2926cdb04074dc9a8ff86a0a0
    Merge branch 'avolkov/try_pytest' into internal_tmp_b

commit 62526d94dc2bf72372125bea119fa66f670cf7e1
    Improve allure attachments.

commit 4564dae697cb069ac45bc4ba7eb0b5bbdcf1d153
    Merge branch 'avolkov/try_pytest' into internal_tmp_b

commit ab65810b23410ca7382ed4bdd257addfa6619659
    Added tests for S3 API.

commit 846c495a846c977f3e5f0bada01e5a9691a81e3d
    Let's get NEOFS_IR_CONTRACTS_NEOFS from env.

commit c39bd88568b70ffcb76b76d68531b17d3747829d
    Added S3 test for versioning.

commit d7c9f351abc7e02d4ebf162475604a2d6b46e712
    Merge remote-tracking branch 'origin/avolkov/try_pytest' into internal_tmp_b

commit bfbed22a50ce4cb6a49de383cfef66452ba9f4c1
    Added some tests for S3 API and curl tests for HTTP.

commit 1c49def3ddd0b3f7cf97f131e269ad465c70a680
    Add yadro submodule

commit 2a91685f9108101ab523e05cc9287d0f5a20196b
    Fix.

commit 33fc2813e205766e69ef74a42a10850db6c63ce6
    Add debug.

commit aaaceca59e4c67253ecd4a741667b7327d1fb679
    Add env variables for data nodes.

commit 001cb26bcc22c8543fb2672564e898928d20622b
Merge: b48a87d c70da26
    Merge branch 'avolkov/try_pytest' into tmp_b

commit b48a87d9a09309fea671573ba6cf303c31b11b6a
    Added submodule

commit c70da265d319950977774e34740276f324eb57a7
    Added tests for S3 bucket API.

commit 3d335abe6de45d1859454f1ddf85a97514667b8f
    Added tests for S3 object API.

commit 2ac829c700f5bc20c28953f1d40cd953fed8b390
    flake8 changes for python_keywords module.

commit 2de5963e96b13a5e944906b695e5d9c0829de9ad
    Add pytest tests.

commit 4472c079b9dfd979b7c101bea32893c80cb1fe57
    Add pytest tests.

Signed-off-by: a.y.volkov <a.y.volkov@yadro.com>
2022-08-05 13:29:30 +03:00

199 lines
7 KiB
Python

#!/usr/bin/python3
import json
import os
import uuid
from enum import Enum
import boto3
from data_formatters import pub_key_hex
from botocore.exceptions import ClientError
import urllib3
from robot.api import logger
from robot.api.deco import keyword
from cli_helpers import _run_with_passwd, log_command_execution
from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS
##########################################################
# Disabling warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings()
##########################################################
ROBOT_AUTO_KEYWORDS = False
CREDENTIALS_CREATE_TIMEOUT = '30s'
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/')
class VersioningStatus(Enum):
ENABLED = 'Enabled'
SUSPENDED = 'Suspended'
@keyword('Init S3 Credentials')
def init_s3_credentials(wallet_path, s3_bearer_rules_file: str = None):
bucket = str(uuid.uuid4())
s3_bearer_rules = s3_bearer_rules_file or 'robot/resources/files/s3_bearer_rules.json'
gate_pub_key = pub_key_hex(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS)
cmd = (
f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} '
f'issue-secret --wallet {wallet_path} --gate-public-key={gate_pub_key} '
f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} '
f'--bearer-rules {s3_bearer_rules}'
)
logger.info(f'Executing command: {cmd}')
try:
output = _run_with_passwd(cmd)
logger.info(f'Command completed with output: {output}')
# first five string are log output, cutting them off and parse
# the rest of the output as JSON
output = '\n'.join(output.split('\n')[5:])
try:
output_dict = json.loads(output)
except json.JSONDecodeError:
raise AssertionError(f'Could not parse info from output\n{output}')
return (output_dict['container_id'],
bucket,
output_dict['access_key_id'],
output_dict['secret_access_key'],
output_dict['owner_private_key'])
except Exception as exc:
raise RuntimeError(f'Failed to init s3 credentials because of error\n{exc}') from exc
@keyword('Config S3 client')
def config_s3_client(access_key_id: str, secret_access_key: str):
try:
session = boto3.session.Session()
s3_client = session.client(
service_name='s3',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
endpoint_url=S3_GATE, verify=False
)
return s3_client
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Create bucket S3')
def create_bucket_s3(s3_client):
bucket_name = str(uuid.uuid4())
try:
s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
log_command_execution(f'Created S3 bucket {bucket_name}', s3_bucket)
return bucket_name
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('List buckets S3')
def list_buckets_s3(s3_client):
found_buckets = []
try:
response = s3_client.list_buckets()
log_command_execution('S3 List buckets result', response)
for bucket in response['Buckets']:
found_buckets.append(bucket['Name'])
return found_buckets
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Delete bucket S3')
def delete_bucket_s3(s3_client, bucket: str):
try:
response = s3_client.delete_bucket(Bucket=bucket)
log_command_execution('S3 Delete bucket result', response)
return response
except ClientError as err:
log_command_execution('S3 Delete bucket error', str(err))
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Head bucket S3')
def head_bucket(s3_client, bucket: str):
try:
response = s3_client.head_bucket(Bucket=bucket)
log_command_execution('S3 Head bucket result', response)
return response
except ClientError as err:
log_command_execution('S3 Head bucket error', str(err))
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Set bucket versioning status')
def set_bucket_versioning(s3_client, bucket_name: str, status: VersioningStatus):
try:
response = s3_client.put_bucket_versioning(Bucket=bucket_name, VersioningConfiguration={'Status': status.value})
log_command_execution('S3 Set bucket versioning to', response)
except ClientError as err:
raise Exception(f'Got error during set bucket versioning: {err}') from err
@keyword('Get bucket versioning status')
def get_bucket_versioning_status(s3_client, bucket_name: str) -> str:
try:
response = s3_client.get_bucket_versioning(Bucket=bucket_name)
status = response.get('Status')
log_command_execution('S3 Got bucket versioning status', response)
return status
except ClientError as err:
raise Exception(f'Got error during get bucket versioning status: {err}') from err
@keyword('Put bucket tagging')
def put_bucket_tagging(s3_client, bucket_name: str, tags: list):
try:
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags]
tagging = {'TagSet': tags}
response = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tagging)
log_command_execution('S3 Put bucket tagging', response)
except ClientError as err:
raise Exception(f'Got error during put bucket tagging: {err}') from err
@keyword('Get bucket tagging')
def get_bucket_tagging(s3_client, bucket_name: str) -> list:
try:
response = s3_client.get_bucket_tagging(Bucket=bucket_name)
log_command_execution('S3 Get bucket tagging', response)
return response.get('TagSet')
except ClientError as err:
raise Exception(f'Got error during get bucket tagging: {err}') from err
@keyword('Delete bucket tagging')
def delete_bucket_tagging(s3_client, bucket_name: str):
try:
response = s3_client.delete_bucket_tagging(Bucket=bucket_name)
log_command_execution('S3 Delete bucket tagging', response)
except ClientError as err:
raise Exception(f'Got error during delete bucket tagging: {err}') from err