[#334] Disable automatic retries in S3 clients
Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
parent
987df42542
commit
f1d3aa6098
7 changed files with 128 additions and 112 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
|
|
|
@ -5,7 +5,7 @@ from datetime import datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
from cli_helpers import _cmd_run, _configure_aws_cli
|
from cli_helpers import _cmd_run
|
||||||
from common import ASSETS_DIR, S3_GATE
|
from common import ASSETS_DIR, S3_GATE
|
||||||
|
|
||||||
logger = logging.getLogger("NeoLogger")
|
logger = logging.getLogger("NeoLogger")
|
||||||
|
@ -18,16 +18,6 @@ class AwsCliClient:
|
||||||
# certificate in devenv) and disable automatic pagination in CLI output
|
# certificate in devenv) and disable automatic pagination in CLI output
|
||||||
common_flags = "--no-verify-ssl --no-paginate"
|
common_flags = "--no-verify-ssl --no-paginate"
|
||||||
|
|
||||||
def __init__(self, access_key_id: str, secret_access_key: str):
|
|
||||||
self.access_key_id = access_key_id
|
|
||||||
self.secret_access_key = secret_access_key
|
|
||||||
self.config_aws_client()
|
|
||||||
|
|
||||||
def config_aws_client(self):
|
|
||||||
cmd = "aws configure"
|
|
||||||
logger.info(f"Executing command: {cmd}")
|
|
||||||
_configure_aws_cli(cmd, self.access_key_id, self.secret_access_key)
|
|
||||||
|
|
||||||
def create_bucket(
|
def create_bucket(
|
||||||
self,
|
self,
|
||||||
Bucket: str,
|
Bucket: str,
|
||||||
|
|
|
@ -1,12 +1,41 @@
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
|
import boto3
|
||||||
import pytest
|
import pytest
|
||||||
|
import urllib3
|
||||||
|
from botocore.config import Config
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
from cli_helpers import _cmd_run, _configure_aws_cli, _run_with_passwd
|
||||||
|
from common import (
|
||||||
|
NEOFS_AUTHMATE_EXEC,
|
||||||
|
NEOFS_ENDPOINT,
|
||||||
|
S3_GATE,
|
||||||
|
S3_GATE_WALLET_PASS,
|
||||||
|
S3_GATE_WALLET_PATH,
|
||||||
|
)
|
||||||
|
from data_formatters import get_wallet_public_key
|
||||||
from python_keywords.container import list_containers
|
from python_keywords.container import list_containers
|
||||||
|
|
||||||
from steps import s3_gate_bucket
|
|
||||||
from steps.aws_cli_client import AwsCliClient
|
from steps.aws_cli_client import AwsCliClient
|
||||||
|
|
||||||
|
# Disable warnings on self-signed certificate which the
|
||||||
|
# boto library produces on requests to S3-gate in dev-env
|
||||||
|
urllib3.disable_warnings()
|
||||||
|
|
||||||
|
logger = logging.getLogger("NeoLogger")
|
||||||
|
CREDENTIALS_CREATE_TIMEOUT = "1m"
|
||||||
|
|
||||||
|
# Number of attempts that S3 clients will attempt per each request (1 means single attempt
|
||||||
|
# without any retries)
|
||||||
|
MAX_REQUEST_ATTEMPTS = 1
|
||||||
|
RETRY_MODE = "standard"
|
||||||
|
|
||||||
|
|
||||||
class TestS3GateBase:
|
class TestS3GateBase:
|
||||||
s3_client = None
|
s3_client = None
|
||||||
|
@ -23,19 +52,95 @@ class TestS3GateBase:
|
||||||
access_key_id,
|
access_key_id,
|
||||||
secret_access_key,
|
secret_access_key,
|
||||||
owner_private_key,
|
owner_private_key,
|
||||||
) = s3_gate_bucket.init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file)
|
) = init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file)
|
||||||
containers_list = list_containers(wallet)
|
containers_list = list_containers(wallet)
|
||||||
assert cid in containers_list, f"Expected cid {cid} in {containers_list}"
|
assert cid in containers_list, f"Expected cid {cid} in {containers_list}"
|
||||||
|
|
||||||
if request.param == "aws cli":
|
if request.param == "aws cli":
|
||||||
|
client = configure_cli_client(access_key_id, secret_access_key)
|
||||||
|
else:
|
||||||
|
client = configure_boto3_client(access_key_id, secret_access_key)
|
||||||
|
TestS3GateBase.s3_client = client
|
||||||
|
TestS3GateBase.wallet = wallet
|
||||||
|
|
||||||
|
|
||||||
|
@allure.step("Init S3 Credentials")
|
||||||
|
def init_s3_credentials(wallet_path: str, s3_bearer_rules_file: Optional[str] = None):
|
||||||
|
bucket = str(uuid.uuid4())
|
||||||
|
s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json"
|
||||||
|
gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS)
|
||||||
|
cmd = (
|
||||||
|
f"{NEOFS_AUTHMATE_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} "
|
||||||
|
f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} "
|
||||||
|
f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} "
|
||||||
|
f"--bearer-rules {s3_bearer_rules}"
|
||||||
|
)
|
||||||
|
logger.info(f"Executing command: {cmd}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client = AwsCliClient(access_key_id, secret_access_key)
|
output = _run_with_passwd(cmd)
|
||||||
|
logger.info(f"Command completed with output: {output}")
|
||||||
|
|
||||||
|
# output contains some debug info and then several JSON structures, so we find each
|
||||||
|
# JSON structure by curly brackets (naive approach, but works while JSON is not nested)
|
||||||
|
# and then we take JSON containing secret_access_key
|
||||||
|
json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL)
|
||||||
|
for json_block in json_blocks:
|
||||||
|
try:
|
||||||
|
parsed_json_block = json.loads(json_block)
|
||||||
|
if "secret_access_key" in parsed_json_block:
|
||||||
|
return (
|
||||||
|
parsed_json_block["container_id"],
|
||||||
|
bucket,
|
||||||
|
parsed_json_block["access_key_id"],
|
||||||
|
parsed_json_block["secret_access_key"],
|
||||||
|
parsed_json_block["owner_private_key"],
|
||||||
|
)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise AssertionError(f"Could not parse info from output\n{output}")
|
||||||
|
raise AssertionError(f"Could not find AWS credentials in output:\n{output}")
|
||||||
|
|
||||||
|
except Exception as exc:
|
||||||
|
raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc
|
||||||
|
|
||||||
|
|
||||||
|
@allure.step("Configure S3 client (boto3)")
|
||||||
|
def configure_boto3_client(access_key_id: str, secret_access_key: str):
|
||||||
|
try:
|
||||||
|
session = boto3.session.Session()
|
||||||
|
config = Config(
|
||||||
|
retries={
|
||||||
|
"max_attempts": MAX_REQUEST_ATTEMPTS,
|
||||||
|
"mode": RETRY_MODE,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
s3_client = session.client(
|
||||||
|
service_name="s3",
|
||||||
|
aws_access_key_id=access_key_id,
|
||||||
|
aws_secret_access_key=secret_access_key,
|
||||||
|
config=config,
|
||||||
|
endpoint_url=S3_GATE,
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
return s3_client
|
||||||
|
except ClientError as err:
|
||||||
|
raise Exception(
|
||||||
|
f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||||
|
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
|
||||||
|
) from err
|
||||||
|
|
||||||
|
|
||||||
|
@allure.step("Configure S3 client (aws cli)")
|
||||||
|
def configure_cli_client(access_key_id: str, secret_access_key: str):
|
||||||
|
try:
|
||||||
|
client = AwsCliClient()
|
||||||
|
_configure_aws_cli("aws configure", access_key_id, secret_access_key)
|
||||||
|
_cmd_run(f"aws configure set max_attempts {MAX_REQUEST_ATTEMPTS}")
|
||||||
|
_cmd_run(f"aws configure set retry_mode {RETRY_MODE}")
|
||||||
|
return client
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if "command was not found or was not executable" in str(err):
|
if "command was not found or was not executable" in str(err):
|
||||||
pytest.skip("AWS CLI was not found")
|
pytest.skip("AWS CLI was not found")
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Error on creating instance for AwsCliClient") from err
|
raise RuntimeError("Error while configuring AwsCliClient") from err
|
||||||
else:
|
|
||||||
client = s3_gate_bucket.config_s3_client(access_key_id, secret_access_key)
|
|
||||||
TestS3GateBase.s3_client = client
|
|
||||||
TestS3GateBase.wallet = wallet
|
|
||||||
|
|
|
@ -1,32 +1,14 @@
|
||||||
#!/usr/bin/python3
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import uuid
|
import uuid
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
import boto3
|
|
||||||
import urllib3
|
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from cli_helpers import _run_with_passwd, log_command_execution
|
from cli_helpers import log_command_execution
|
||||||
from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PASS, S3_GATE_WALLET_PATH
|
|
||||||
from data_formatters import get_wallet_public_key
|
|
||||||
|
|
||||||
from steps.aws_cli_client import AwsCliClient
|
|
||||||
|
|
||||||
##########################################################
|
|
||||||
# Disabling warnings on self-signed certificate which the
|
|
||||||
# boto library produces on requests to S3-gate in dev-env.
|
|
||||||
urllib3.disable_warnings()
|
|
||||||
##########################################################
|
|
||||||
logger = logging.getLogger("NeoLogger")
|
logger = logging.getLogger("NeoLogger")
|
||||||
CREDENTIALS_CREATE_TIMEOUT = "1m"
|
|
||||||
NEOFS_EXEC = os.getenv("NEOFS_EXEC", "neofs-authmate")
|
|
||||||
|
|
||||||
# Artificial delay that we add after object deletion and container creation
|
# Artificial delay that we add after object deletion and container creation
|
||||||
# Delay is added because sometimes immediately after deletion object still appears
|
# Delay is added because sometimes immediately after deletion object still appears
|
||||||
|
@ -40,69 +22,6 @@ class VersioningStatus(Enum):
|
||||||
SUSPENDED = "Suspended"
|
SUSPENDED = "Suspended"
|
||||||
|
|
||||||
|
|
||||||
@allure.step("Init S3 Credentials")
|
|
||||||
def init_s3_credentials(wallet_path, s3_bearer_rules_file: Optional[str] = None):
|
|
||||||
bucket = str(uuid.uuid4())
|
|
||||||
s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json"
|
|
||||||
gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS)
|
|
||||||
cmd = (
|
|
||||||
f"{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} "
|
|
||||||
f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} "
|
|
||||||
f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} "
|
|
||||||
f"--bearer-rules {s3_bearer_rules}"
|
|
||||||
)
|
|
||||||
logger.info(f"Executing command: {cmd}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
output = _run_with_passwd(cmd)
|
|
||||||
logger.info(f"Command completed with output: {output}")
|
|
||||||
|
|
||||||
# output contains some debug info and then several JSON structures, so we find each
|
|
||||||
# JSON structure by curly brackets (naive approach, but works while JSON is not nested)
|
|
||||||
# and then we take JSON containing secret_access_key
|
|
||||||
json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL)
|
|
||||||
for json_block in json_blocks:
|
|
||||||
try:
|
|
||||||
parsed_json_block = json.loads(json_block)
|
|
||||||
if "secret_access_key" in parsed_json_block:
|
|
||||||
return (
|
|
||||||
parsed_json_block["container_id"],
|
|
||||||
bucket,
|
|
||||||
parsed_json_block["access_key_id"],
|
|
||||||
parsed_json_block["secret_access_key"],
|
|
||||||
parsed_json_block["owner_private_key"],
|
|
||||||
)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
raise AssertionError(f"Could not parse info from output\n{output}")
|
|
||||||
raise AssertionError(f"Could not find AWS credentials in output:\n{output}")
|
|
||||||
|
|
||||||
except Exception as exc:
|
|
||||||
raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc
|
|
||||||
|
|
||||||
|
|
||||||
@allure.step("Config S3 client")
|
|
||||||
def config_s3_client(access_key_id: str, secret_access_key: str):
|
|
||||||
try:
|
|
||||||
|
|
||||||
session = boto3.session.Session()
|
|
||||||
|
|
||||||
s3_client = session.client(
|
|
||||||
service_name="s3",
|
|
||||||
aws_access_key_id=access_key_id,
|
|
||||||
aws_secret_access_key=secret_access_key,
|
|
||||||
endpoint_url=S3_GATE,
|
|
||||||
verify=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
return s3_client
|
|
||||||
|
|
||||||
except ClientError as err:
|
|
||||||
raise Exception(
|
|
||||||
f'Error Message: {err.response["Error"]["Message"]}\n'
|
|
||||||
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
|
|
||||||
) from err
|
|
||||||
|
|
||||||
|
|
||||||
@allure.step("Create bucket S3")
|
@allure.step("Create bucket S3")
|
||||||
def create_bucket_s3(
|
def create_bucket_s3(
|
||||||
s3_client, object_lock_enabled_for_bucket: Optional[bool] = None, acl: Optional[str] = None
|
s3_client, object_lock_enabled_for_bucket: Optional[bool] = None, acl: Optional[str] = None
|
||||||
|
@ -119,7 +38,6 @@ def create_bucket_s3(
|
||||||
log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket)
|
log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket)
|
||||||
sleep(S3_SYNC_WAIT_TIME)
|
sleep(S3_SYNC_WAIT_TIME)
|
||||||
return bucket_name
|
return bucket_name
|
||||||
|
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f'Error Message: {err.response["Error"]["Message"]}\n'
|
f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||||
|
@ -260,11 +178,11 @@ def put_bucket_acl_s3(
|
||||||
params.update({"GrantWrite": grant_write})
|
params.update({"GrantWrite": grant_write})
|
||||||
elif grant_read:
|
elif grant_read:
|
||||||
params.update({"GrantRead": grant_read})
|
params.update({"GrantRead": grant_read})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = s3_client.put_bucket_acl(**params)
|
response = s3_client.put_bucket_acl(**params)
|
||||||
log_command_execution("S3 ACL bucket result", response)
|
log_command_execution("S3 ACL bucket result", response)
|
||||||
return response.get("Grants")
|
return response.get("Grants")
|
||||||
|
|
||||||
except ClientError as err:
|
except ClientError as err:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f'Error Message: {err.response["Error"]["Message"]}\n'
|
f'Error Message: {err.response["Error"]["Message"]}\n'
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import os
|
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
import pytest
|
import pytest
|
||||||
from python_keywords.utility_keywords import generate_file
|
from python_keywords.utility_keywords import generate_file
|
||||||
|
|
|
@ -233,11 +233,13 @@ class TestHttpGate:
|
||||||
get_via_http_gate(cid=cid, oid=oid)
|
get_via_http_gate(cid=cid, oid=oid)
|
||||||
raise AssertionError(f"Expected error on getting object with cid: {cid}")
|
raise AssertionError(f"Expected error on getting object with cid: {cid}")
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
assert error_pattern in err, f"Expected {err} to match {error_pattern}"
|
assert error_pattern in str(err), f"Expected {err} to match {error_pattern}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@allure.step("Verify object can be get using HTTP header attribute")
|
@allure.step("Verify object can be get using HTTP header attribute")
|
||||||
def get_object_by_attr_and_verify_hashes(oid: str, file_name: str, cid: str, attrs: dict):
|
def get_object_by_attr_and_verify_hashes(
|
||||||
|
oid: str, file_name: str, cid: str, attrs: dict
|
||||||
|
) -> None:
|
||||||
got_file_path_http = get_via_http_gate(cid=cid, oid=oid)
|
got_file_path_http = get_via_http_gate(cid=cid, oid=oid)
|
||||||
got_file_path_http_attr = get_via_http_gate_by_attribute(cid=cid, attribute=attrs)
|
got_file_path_http_attr = get_via_http_gate_by_attribute(cid=cid, attribute=attrs)
|
||||||
|
|
||||||
|
@ -249,7 +251,7 @@ class TestHttpGate:
|
||||||
@allure.step("Verify object can be get using HTTP")
|
@allure.step("Verify object can be get using HTTP")
|
||||||
def get_object_and_verify_hashes(
|
def get_object_and_verify_hashes(
|
||||||
oid: str, file_name: str, wallet: str, cid: str, object_getter=None
|
oid: str, file_name: str, wallet: str, cid: str, object_getter=None
|
||||||
):
|
) -> None:
|
||||||
nodes = get_nodes_without_object(wallet=wallet, cid=cid, oid=oid)
|
nodes = get_nodes_without_object(wallet=wallet, cid=cid, oid=oid)
|
||||||
random_node = choice(nodes)
|
random_node = choice(nodes)
|
||||||
object_getter = object_getter or get_via_http_gate
|
object_getter = object_getter or get_via_http_gate
|
||||||
|
@ -260,7 +262,7 @@ class TestHttpGate:
|
||||||
TestHttpGate._assert_hashes_are_equal(file_name, got_file_path, got_file_path_http)
|
TestHttpGate._assert_hashes_are_equal(file_name, got_file_path, got_file_path_http)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _assert_hashes_are_equal(orig_file_name: str, got_file_1: str, got_file_2: str):
|
def _assert_hashes_are_equal(orig_file_name: str, got_file_1: str, got_file_2: str) -> None:
|
||||||
msg = "Expected hashes are equal for files {f1} and {f2}"
|
msg = "Expected hashes are equal for files {f1} and {f2}"
|
||||||
got_file_hash_http = get_file_hash(got_file_1)
|
got_file_hash_http = get_file_hash(got_file_1)
|
||||||
assert get_file_hash(got_file_2) == got_file_hash_http, msg.format(
|
assert get_file_hash(got_file_2) == got_file_hash_http, msg.format(
|
||||||
|
|
|
@ -124,9 +124,11 @@ STORAGE_NODE_SSH_PRIVATE_KEY_PATH = os.getenv("STORAGE_NODE_SSH_PRIVATE_KEY_PATH
|
||||||
# Path to directory with CLI binaries on storage node (currently we need only neofs-cli)
|
# Path to directory with CLI binaries on storage node (currently we need only neofs-cli)
|
||||||
STORAGE_NODE_BIN_PATH = os.getenv("STORAGE_NODE_BIN_PATH", f"{DEVENV_PATH}/vendor")
|
STORAGE_NODE_BIN_PATH = os.getenv("STORAGE_NODE_BIN_PATH", f"{DEVENV_PATH}/vendor")
|
||||||
|
|
||||||
|
# Path to neofs-s3-authmate utility
|
||||||
|
NEOFS_AUTHMATE_EXEC = os.getenv("NEOFS_AUTHMATE_EXEC", "neofs-authmate")
|
||||||
|
|
||||||
# Path and config for neofs-adm utility. Optional if tests are running against devenv
|
# Path and config for neofs-adm utility. Optional if tests are running against devenv
|
||||||
NEOFS_ADM_EXEC = os.getenv("NEOFS_ADM_EXEC", "neofs-adm")
|
NEOFS_ADM_EXEC = os.getenv("NEOFS_ADM_EXEC", "neofs-adm")
|
||||||
|
|
||||||
NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH")
|
NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH")
|
||||||
|
|
||||||
INFRASTRUCTURE_TYPE = os.getenv("INFRASTRUCTURE_TYPE", "LOCAL_DEVENV")
|
INFRASTRUCTURE_TYPE = os.getenv("INFRASTRUCTURE_TYPE", "LOCAL_DEVENV")
|
||||||
|
|
Loading…
Reference in a new issue