2022-10-03 08:16:27 +00:00
|
|
|
import json
|
|
|
|
import logging
|
2022-09-19 14:22:10 +00:00
|
|
|
import os
|
2022-10-03 08:16:27 +00:00
|
|
|
import re
|
|
|
|
import uuid
|
2022-12-05 22:31:45 +00:00
|
|
|
from typing import Any, Optional
|
2022-09-19 14:22:10 +00:00
|
|
|
|
|
|
|
import allure
|
2022-10-03 08:16:27 +00:00
|
|
|
import boto3
|
2022-09-19 14:22:10 +00:00
|
|
|
import pytest
|
2022-12-05 22:31:45 +00:00
|
|
|
import s3_gate_bucket
|
|
|
|
import s3_gate_object
|
2022-10-03 08:16:27 +00:00
|
|
|
import urllib3
|
2022-12-05 22:31:45 +00:00
|
|
|
from aws_cli_client import AwsCliClient
|
2022-10-03 08:16:27 +00:00
|
|
|
from botocore.config import Config
|
|
|
|
from botocore.exceptions import ClientError
|
|
|
|
from cli_helpers import _cmd_run, _configure_aws_cli, _run_with_passwd
|
2022-12-05 22:31:45 +00:00
|
|
|
from cluster import Cluster
|
|
|
|
from cluster_test_base import ClusterTestBase
|
2023-01-09 12:46:03 +00:00
|
|
|
from common import FROSTFS_AUTHMATE_EXEC
|
|
|
|
from frostfs_testlib.shell import Shell
|
2022-12-05 22:31:45 +00:00
|
|
|
from pytest import FixtureRequest
|
2022-09-19 14:22:10 +00:00
|
|
|
from python_keywords.container import list_containers
|
2022-09-28 12:07:16 +00:00
|
|
|
|
2022-10-03 08:16:27 +00:00
|
|
|
# Disable warnings on self-signed certificate which the
|
|
|
|
# boto library produces on requests to S3-gate in dev-env
|
|
|
|
urllib3.disable_warnings()
|
|
|
|
|
|
|
|
logger = logging.getLogger("NeoLogger")
|
|
|
|
CREDENTIALS_CREATE_TIMEOUT = "1m"
|
|
|
|
|
|
|
|
# Number of attempts that S3 clients will attempt per each request (1 means single attempt
|
|
|
|
# without any retries)
|
|
|
|
MAX_REQUEST_ATTEMPTS = 1
|
|
|
|
RETRY_MODE = "standard"
|
|
|
|
|
2022-09-19 14:22:10 +00:00
|
|
|
|
2022-12-05 22:31:45 +00:00
|
|
|
class TestS3GateBase(ClusterTestBase):
|
|
|
|
s3_client: Any = None
|
2022-09-19 14:22:10 +00:00
|
|
|
|
2022-09-28 12:07:16 +00:00
|
|
|
@pytest.fixture(scope="class", autouse=True)
|
|
|
|
@allure.title("[Class/Autouse]: Create S3 client")
|
2022-12-05 22:31:45 +00:00
|
|
|
def s3_client(
|
|
|
|
self, default_wallet, client_shell: Shell, request: FixtureRequest, cluster: Cluster
|
|
|
|
) -> Any:
|
|
|
|
wallet = default_wallet
|
2022-09-19 14:22:10 +00:00
|
|
|
s3_bearer_rules_file = f"{os.getcwd()}/robot/resources/files/s3_bearer_rules.json"
|
2022-11-14 12:04:15 +00:00
|
|
|
policy = None if isinstance(request.param, str) else request.param[1]
|
2022-12-14 09:39:39 +00:00
|
|
|
(cid, bucket, access_key_id, secret_access_key, owner_private_key,) = init_s3_credentials(
|
|
|
|
wallet, cluster, s3_bearer_rules_file=s3_bearer_rules_file, policy=policy
|
|
|
|
)
|
2022-12-05 22:31:45 +00:00
|
|
|
containers_list = list_containers(
|
|
|
|
wallet, shell=client_shell, endpoint=self.cluster.default_rpc_endpoint
|
|
|
|
)
|
2022-09-28 12:07:16 +00:00
|
|
|
assert cid in containers_list, f"Expected cid {cid} in {containers_list}"
|
2022-09-19 14:22:10 +00:00
|
|
|
|
2022-11-14 12:04:15 +00:00
|
|
|
if "aws cli" in request.param:
|
2022-12-05 22:31:45 +00:00
|
|
|
client = configure_cli_client(
|
|
|
|
access_key_id, secret_access_key, cluster.default_s3_gate_endpoint
|
|
|
|
)
|
2022-09-19 14:22:10 +00:00
|
|
|
else:
|
2022-12-05 22:31:45 +00:00
|
|
|
client = configure_boto3_client(
|
|
|
|
access_key_id, secret_access_key, cluster.default_s3_gate_endpoint
|
|
|
|
)
|
2022-09-19 14:22:10 +00:00
|
|
|
TestS3GateBase.s3_client = client
|
|
|
|
TestS3GateBase.wallet = wallet
|
2022-10-03 08:16:27 +00:00
|
|
|
|
2022-11-29 14:16:15 +00:00
|
|
|
@pytest.fixture
|
|
|
|
@allure.title("Create/delete bucket")
|
|
|
|
def bucket(self):
|
|
|
|
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
|
|
|
yield bucket
|
|
|
|
self.delete_all_object_in_bucket(bucket)
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
@allure.title("Create two buckets")
|
|
|
|
def two_buckets(self):
|
|
|
|
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
|
|
|
bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
|
|
|
yield bucket_1, bucket_2
|
|
|
|
for bucket in [bucket_1, bucket_2]:
|
|
|
|
self.delete_all_object_in_bucket(bucket)
|
|
|
|
|
|
|
|
def delete_all_object_in_bucket(self, bucket):
|
|
|
|
versioning_status = s3_gate_bucket.get_bucket_versioning_status(self.s3_client, bucket)
|
|
|
|
if versioning_status == s3_gate_bucket.VersioningStatus.ENABLED.value:
|
2022-12-12 10:34:52 +00:00
|
|
|
# From versioned bucket we should delete all versions and delete markers of all objects
|
2022-11-29 14:16:15 +00:00
|
|
|
objects_versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket)
|
|
|
|
if objects_versions:
|
2022-12-12 10:34:52 +00:00
|
|
|
s3_gate_object.delete_object_versions_s3_without_dm(
|
|
|
|
self.s3_client, bucket, objects_versions
|
|
|
|
)
|
|
|
|
objects_delete_markers = s3_gate_object.list_objects_delete_markers_s3(
|
|
|
|
self.s3_client, bucket
|
|
|
|
)
|
|
|
|
if objects_delete_markers:
|
|
|
|
s3_gate_object.delete_object_versions_s3_without_dm(
|
|
|
|
self.s3_client, bucket, objects_delete_markers
|
|
|
|
)
|
|
|
|
|
2022-11-29 14:16:15 +00:00
|
|
|
else:
|
|
|
|
# From non-versioned bucket it's sufficient to delete objects by key
|
|
|
|
objects = s3_gate_object.list_objects_s3(self.s3_client, bucket)
|
|
|
|
if objects:
|
|
|
|
s3_gate_object.delete_objects_s3(self.s3_client, bucket, objects)
|
2022-12-12 10:34:52 +00:00
|
|
|
objects_delete_markers = s3_gate_object.list_objects_delete_markers_s3(
|
|
|
|
self.s3_client, bucket
|
|
|
|
)
|
|
|
|
if objects_delete_markers:
|
|
|
|
s3_gate_object.delete_object_versions_s3_without_dm(
|
|
|
|
self.s3_client, bucket, objects_delete_markers
|
|
|
|
)
|
2022-11-29 14:16:15 +00:00
|
|
|
|
|
|
|
# Delete the bucket itself
|
|
|
|
s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket)
|
|
|
|
|
2022-10-03 08:16:27 +00:00
|
|
|
|
|
|
|
@allure.step("Init S3 Credentials")
|
2022-11-14 12:04:15 +00:00
|
|
|
def init_s3_credentials(
|
2022-11-18 08:56:57 +00:00
|
|
|
wallet_path: str,
|
2022-12-05 22:31:45 +00:00
|
|
|
cluster: Cluster,
|
2022-11-18 08:56:57 +00:00
|
|
|
s3_bearer_rules_file: Optional[str] = None,
|
|
|
|
policy: Optional[dict] = None,
|
2022-11-14 12:04:15 +00:00
|
|
|
):
|
2022-10-03 08:16:27 +00:00
|
|
|
bucket = str(uuid.uuid4())
|
|
|
|
s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json"
|
2022-12-05 22:31:45 +00:00
|
|
|
|
|
|
|
s3gate_node = cluster.s3gates[0]
|
|
|
|
gate_public_key = s3gate_node.get_wallet_public_key()
|
2022-10-03 08:16:27 +00:00
|
|
|
cmd = (
|
2023-01-09 12:46:03 +00:00
|
|
|
f"{FROSTFS_AUTHMATE_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} "
|
2022-10-03 08:16:27 +00:00
|
|
|
f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} "
|
2022-12-05 22:31:45 +00:00
|
|
|
f"--peer {cluster.default_rpc_endpoint} --container-friendly-name {bucket} "
|
2022-10-03 08:16:27 +00:00
|
|
|
f"--bearer-rules {s3_bearer_rules}"
|
|
|
|
)
|
2022-11-14 12:04:15 +00:00
|
|
|
if policy:
|
|
|
|
cmd += f" --container-policy {policy}'"
|
2022-10-03 08:16:27 +00:00
|
|
|
logger.info(f"Executing command: {cmd}")
|
|
|
|
|
|
|
|
try:
|
|
|
|
output = _run_with_passwd(cmd)
|
|
|
|
logger.info(f"Command completed with output: {output}")
|
|
|
|
|
|
|
|
# output contains some debug info and then several JSON structures, so we find each
|
|
|
|
# JSON structure by curly brackets (naive approach, but works while JSON is not nested)
|
|
|
|
# and then we take JSON containing secret_access_key
|
|
|
|
json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL)
|
|
|
|
for json_block in json_blocks:
|
|
|
|
try:
|
|
|
|
parsed_json_block = json.loads(json_block)
|
|
|
|
if "secret_access_key" in parsed_json_block:
|
|
|
|
return (
|
|
|
|
parsed_json_block["container_id"],
|
|
|
|
bucket,
|
|
|
|
parsed_json_block["access_key_id"],
|
|
|
|
parsed_json_block["secret_access_key"],
|
|
|
|
parsed_json_block["owner_private_key"],
|
|
|
|
)
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
raise AssertionError(f"Could not parse info from output\n{output}")
|
|
|
|
raise AssertionError(f"Could not find AWS credentials in output:\n{output}")
|
|
|
|
|
|
|
|
except Exception as exc:
|
|
|
|
raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc
|
|
|
|
|
|
|
|
|
|
|
|
@allure.step("Configure S3 client (boto3)")
|
2022-12-05 22:31:45 +00:00
|
|
|
def configure_boto3_client(access_key_id: str, secret_access_key: str, s3gate_endpoint: str):
|
2022-10-03 08:16:27 +00:00
|
|
|
try:
|
2022-12-05 22:31:45 +00:00
|
|
|
session = boto3.Session()
|
2022-10-03 08:16:27 +00:00
|
|
|
config = Config(
|
|
|
|
retries={
|
|
|
|
"max_attempts": MAX_REQUEST_ATTEMPTS,
|
|
|
|
"mode": RETRY_MODE,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
s3_client = session.client(
|
|
|
|
service_name="s3",
|
|
|
|
aws_access_key_id=access_key_id,
|
|
|
|
aws_secret_access_key=secret_access_key,
|
|
|
|
config=config,
|
2022-12-05 22:31:45 +00:00
|
|
|
endpoint_url=s3gate_endpoint,
|
2022-10-03 08:16:27 +00:00
|
|
|
verify=False,
|
|
|
|
)
|
|
|
|
return s3_client
|
|
|
|
except ClientError as err:
|
|
|
|
raise Exception(
|
|
|
|
f'Error Message: {err.response["Error"]["Message"]}\n'
|
|
|
|
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
|
|
|
|
) from err
|
|
|
|
|
|
|
|
|
|
|
|
@allure.step("Configure S3 client (aws cli)")
|
2022-12-05 22:31:45 +00:00
|
|
|
def configure_cli_client(access_key_id: str, secret_access_key: str, s3gate_endpoint: str):
|
2022-10-03 08:16:27 +00:00
|
|
|
try:
|
2022-12-05 22:31:45 +00:00
|
|
|
client = AwsCliClient(s3gate_endpoint)
|
2022-10-03 08:16:27 +00:00
|
|
|
_configure_aws_cli("aws configure", access_key_id, secret_access_key)
|
|
|
|
_cmd_run(f"aws configure set max_attempts {MAX_REQUEST_ATTEMPTS}")
|
|
|
|
_cmd_run(f"aws configure set retry_mode {RETRY_MODE}")
|
|
|
|
return client
|
|
|
|
except Exception as err:
|
|
|
|
if "command was not found or was not executable" in str(err):
|
|
|
|
pytest.skip("AWS CLI was not found")
|
|
|
|
else:
|
|
|
|
raise RuntimeError("Error while configuring AwsCliClient") from err
|