From f1d3aa60981d117f5f52c85dddf18f2122d55b09 Mon Sep 17 00:00:00 2001 From: Vladimir Domnich Date: Mon, 3 Oct 2022 12:16:27 +0400 Subject: [PATCH] [#334] Disable automatic retries in S3 clients Signed-off-by: Vladimir Domnich --- pytest_tests/helpers/s3_helper.py | 1 + pytest_tests/steps/aws_cli_client.py | 12 +- pytest_tests/steps/s3_gate_base.py | 125 ++++++++++++++++-- pytest_tests/steps/s3_gate_bucket.py | 86 +----------- .../services/s3_gate/test_s3_ACL.py | 2 - .../testsuites/services/test_http_gate.py | 10 +- robot/variables/common.py | 4 +- 7 files changed, 128 insertions(+), 112 deletions(-) diff --git a/pytest_tests/helpers/s3_helper.py b/pytest_tests/helpers/s3_helper.py index c6146e90..cb18e87c 100644 --- a/pytest_tests/helpers/s3_helper.py +++ b/pytest_tests/helpers/s3_helper.py @@ -1,3 +1,4 @@ +import os from typing import Optional import allure diff --git a/pytest_tests/steps/aws_cli_client.py b/pytest_tests/steps/aws_cli_client.py index a00043b2..9dbff505 100644 --- a/pytest_tests/steps/aws_cli_client.py +++ b/pytest_tests/steps/aws_cli_client.py @@ -5,7 +5,7 @@ from datetime import datetime from typing import Optional import allure -from cli_helpers import _cmd_run, _configure_aws_cli +from cli_helpers import _cmd_run from common import ASSETS_DIR, S3_GATE logger = logging.getLogger("NeoLogger") @@ -18,16 +18,6 @@ class AwsCliClient: # certificate in devenv) and disable automatic pagination in CLI output common_flags = "--no-verify-ssl --no-paginate" - def __init__(self, access_key_id: str, secret_access_key: str): - self.access_key_id = access_key_id - self.secret_access_key = secret_access_key - self.config_aws_client() - - def config_aws_client(self): - cmd = "aws configure" - logger.info(f"Executing command: {cmd}") - _configure_aws_cli(cmd, self.access_key_id, self.secret_access_key) - def create_bucket( self, Bucket: str, diff --git a/pytest_tests/steps/s3_gate_base.py b/pytest_tests/steps/s3_gate_base.py index 4bdd430d..874db618 100644 --- a/pytest_tests/steps/s3_gate_base.py +++ b/pytest_tests/steps/s3_gate_base.py @@ -1,12 +1,41 @@ +import json +import logging import os +import re +import uuid +from typing import Optional import allure +import boto3 import pytest +import urllib3 +from botocore.config import Config +from botocore.exceptions import ClientError +from cli_helpers import _cmd_run, _configure_aws_cli, _run_with_passwd +from common import ( + NEOFS_AUTHMATE_EXEC, + NEOFS_ENDPOINT, + S3_GATE, + S3_GATE_WALLET_PASS, + S3_GATE_WALLET_PATH, +) +from data_formatters import get_wallet_public_key from python_keywords.container import list_containers -from steps import s3_gate_bucket from steps.aws_cli_client import AwsCliClient +# Disable warnings on self-signed certificate which the +# boto library produces on requests to S3-gate in dev-env +urllib3.disable_warnings() + +logger = logging.getLogger("NeoLogger") +CREDENTIALS_CREATE_TIMEOUT = "1m" + +# Number of attempts that S3 clients will attempt per each request (1 means single attempt +# without any retries) +MAX_REQUEST_ATTEMPTS = 1 +RETRY_MODE = "standard" + class TestS3GateBase: s3_client = None @@ -23,19 +52,95 @@ class TestS3GateBase: access_key_id, secret_access_key, owner_private_key, - ) = s3_gate_bucket.init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file) + ) = init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file) containers_list = list_containers(wallet) assert cid in containers_list, f"Expected cid {cid} in {containers_list}" if request.param == "aws cli": - try: - client = AwsCliClient(access_key_id, secret_access_key) - except Exception as err: - if "command was not found or was not executable" in str(err): - pytest.skip("AWS CLI was not found") - else: - raise RuntimeError("Error on creating instance for AwsCliClient") from err + client = configure_cli_client(access_key_id, secret_access_key) else: - client = s3_gate_bucket.config_s3_client(access_key_id, secret_access_key) + client = configure_boto3_client(access_key_id, secret_access_key) TestS3GateBase.s3_client = client TestS3GateBase.wallet = wallet + + +@allure.step("Init S3 Credentials") +def init_s3_credentials(wallet_path: str, s3_bearer_rules_file: Optional[str] = None): + bucket = str(uuid.uuid4()) + s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json" + gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS) + cmd = ( + f"{NEOFS_AUTHMATE_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} " + f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} " + f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} " + f"--bearer-rules {s3_bearer_rules}" + ) + logger.info(f"Executing command: {cmd}") + + try: + output = _run_with_passwd(cmd) + logger.info(f"Command completed with output: {output}") + + # output contains some debug info and then several JSON structures, so we find each + # JSON structure by curly brackets (naive approach, but works while JSON is not nested) + # and then we take JSON containing secret_access_key + json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL) + for json_block in json_blocks: + try: + parsed_json_block = json.loads(json_block) + if "secret_access_key" in parsed_json_block: + return ( + parsed_json_block["container_id"], + bucket, + parsed_json_block["access_key_id"], + parsed_json_block["secret_access_key"], + parsed_json_block["owner_private_key"], + ) + except json.JSONDecodeError: + raise AssertionError(f"Could not parse info from output\n{output}") + raise AssertionError(f"Could not find AWS credentials in output:\n{output}") + + except Exception as exc: + raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc + + +@allure.step("Configure S3 client (boto3)") +def configure_boto3_client(access_key_id: str, secret_access_key: str): + try: + session = boto3.session.Session() + config = Config( + retries={ + "max_attempts": MAX_REQUEST_ATTEMPTS, + "mode": RETRY_MODE, + } + ) + + s3_client = session.client( + service_name="s3", + aws_access_key_id=access_key_id, + aws_secret_access_key=secret_access_key, + config=config, + endpoint_url=S3_GATE, + verify=False, + ) + return s3_client + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err + + +@allure.step("Configure S3 client (aws cli)") +def configure_cli_client(access_key_id: str, secret_access_key: str): + try: + client = AwsCliClient() + _configure_aws_cli("aws configure", access_key_id, secret_access_key) + _cmd_run(f"aws configure set max_attempts {MAX_REQUEST_ATTEMPTS}") + _cmd_run(f"aws configure set retry_mode {RETRY_MODE}") + return client + except Exception as err: + if "command was not found or was not executable" in str(err): + pytest.skip("AWS CLI was not found") + else: + raise RuntimeError("Error while configuring AwsCliClient") from err diff --git a/pytest_tests/steps/s3_gate_bucket.py b/pytest_tests/steps/s3_gate_bucket.py index 1dcad0bf..86ef11c7 100644 --- a/pytest_tests/steps/s3_gate_bucket.py +++ b/pytest_tests/steps/s3_gate_bucket.py @@ -1,32 +1,14 @@ -#!/usr/bin/python3 - -import json import logging -import os -import re import uuid from enum import Enum from time import sleep from typing import Optional import allure -import boto3 -import urllib3 from botocore.exceptions import ClientError -from cli_helpers import _run_with_passwd, log_command_execution -from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PASS, S3_GATE_WALLET_PATH -from data_formatters import get_wallet_public_key +from cli_helpers import log_command_execution -from steps.aws_cli_client import AwsCliClient - -########################################################## -# Disabling warnings on self-signed certificate which the -# boto library produces on requests to S3-gate in dev-env. -urllib3.disable_warnings() -########################################################## logger = logging.getLogger("NeoLogger") -CREDENTIALS_CREATE_TIMEOUT = "1m" -NEOFS_EXEC = os.getenv("NEOFS_EXEC", "neofs-authmate") # Artificial delay that we add after object deletion and container creation # Delay is added because sometimes immediately after deletion object still appears @@ -40,69 +22,6 @@ class VersioningStatus(Enum): SUSPENDED = "Suspended" -@allure.step("Init S3 Credentials") -def init_s3_credentials(wallet_path, s3_bearer_rules_file: Optional[str] = None): - bucket = str(uuid.uuid4()) - s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json" - gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS) - cmd = ( - f"{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} " - f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} " - f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} " - f"--bearer-rules {s3_bearer_rules}" - ) - logger.info(f"Executing command: {cmd}") - - try: - output = _run_with_passwd(cmd) - logger.info(f"Command completed with output: {output}") - - # output contains some debug info and then several JSON structures, so we find each - # JSON structure by curly brackets (naive approach, but works while JSON is not nested) - # and then we take JSON containing secret_access_key - json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL) - for json_block in json_blocks: - try: - parsed_json_block = json.loads(json_block) - if "secret_access_key" in parsed_json_block: - return ( - parsed_json_block["container_id"], - bucket, - parsed_json_block["access_key_id"], - parsed_json_block["secret_access_key"], - parsed_json_block["owner_private_key"], - ) - except json.JSONDecodeError: - raise AssertionError(f"Could not parse info from output\n{output}") - raise AssertionError(f"Could not find AWS credentials in output:\n{output}") - - except Exception as exc: - raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc - - -@allure.step("Config S3 client") -def config_s3_client(access_key_id: str, secret_access_key: str): - try: - - session = boto3.session.Session() - - s3_client = session.client( - service_name="s3", - aws_access_key_id=access_key_id, - aws_secret_access_key=secret_access_key, - endpoint_url=S3_GATE, - verify=False, - ) - - return s3_client - - except ClientError as err: - raise Exception( - f'Error Message: {err.response["Error"]["Message"]}\n' - f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' - ) from err - - @allure.step("Create bucket S3") def create_bucket_s3( s3_client, object_lock_enabled_for_bucket: Optional[bool] = None, acl: Optional[str] = None @@ -119,7 +38,6 @@ def create_bucket_s3( log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket) sleep(S3_SYNC_WAIT_TIME) return bucket_name - except ClientError as err: raise Exception( f'Error Message: {err.response["Error"]["Message"]}\n' @@ -260,11 +178,11 @@ def put_bucket_acl_s3( params.update({"GrantWrite": grant_write}) elif grant_read: params.update({"GrantRead": grant_read}) + try: response = s3_client.put_bucket_acl(**params) log_command_execution("S3 ACL bucket result", response) return response.get("Grants") - except ClientError as err: raise Exception( f'Error Message: {err.response["Error"]["Message"]}\n' diff --git a/pytest_tests/testsuites/services/s3_gate/test_s3_ACL.py b/pytest_tests/testsuites/services/s3_gate/test_s3_ACL.py index 808468ad..854e43dc 100644 --- a/pytest_tests/testsuites/services/s3_gate/test_s3_ACL.py +++ b/pytest_tests/testsuites/services/s3_gate/test_s3_ACL.py @@ -1,5 +1,3 @@ -import os - import allure import pytest from python_keywords.utility_keywords import generate_file diff --git a/pytest_tests/testsuites/services/test_http_gate.py b/pytest_tests/testsuites/services/test_http_gate.py index 440f7af1..ff01ba3b 100644 --- a/pytest_tests/testsuites/services/test_http_gate.py +++ b/pytest_tests/testsuites/services/test_http_gate.py @@ -233,11 +233,13 @@ class TestHttpGate: get_via_http_gate(cid=cid, oid=oid) raise AssertionError(f"Expected error on getting object with cid: {cid}") except Exception as err: - assert error_pattern in err, f"Expected {err} to match {error_pattern}" + assert error_pattern in str(err), f"Expected {err} to match {error_pattern}" @staticmethod @allure.step("Verify object can be get using HTTP header attribute") - def get_object_by_attr_and_verify_hashes(oid: str, file_name: str, cid: str, attrs: dict): + def get_object_by_attr_and_verify_hashes( + oid: str, file_name: str, cid: str, attrs: dict + ) -> None: got_file_path_http = get_via_http_gate(cid=cid, oid=oid) got_file_path_http_attr = get_via_http_gate_by_attribute(cid=cid, attribute=attrs) @@ -249,7 +251,7 @@ class TestHttpGate: @allure.step("Verify object can be get using HTTP") def get_object_and_verify_hashes( oid: str, file_name: str, wallet: str, cid: str, object_getter=None - ): + ) -> None: nodes = get_nodes_without_object(wallet=wallet, cid=cid, oid=oid) random_node = choice(nodes) object_getter = object_getter or get_via_http_gate @@ -260,7 +262,7 @@ class TestHttpGate: TestHttpGate._assert_hashes_are_equal(file_name, got_file_path, got_file_path_http) @staticmethod - def _assert_hashes_are_equal(orig_file_name: str, got_file_1: str, got_file_2: str): + def _assert_hashes_are_equal(orig_file_name: str, got_file_1: str, got_file_2: str) -> None: msg = "Expected hashes are equal for files {f1} and {f2}" got_file_hash_http = get_file_hash(got_file_1) assert get_file_hash(got_file_2) == got_file_hash_http, msg.format( diff --git a/robot/variables/common.py b/robot/variables/common.py index c32a7ac9..52759758 100644 --- a/robot/variables/common.py +++ b/robot/variables/common.py @@ -124,9 +124,11 @@ STORAGE_NODE_SSH_PRIVATE_KEY_PATH = os.getenv("STORAGE_NODE_SSH_PRIVATE_KEY_PATH # Path to directory with CLI binaries on storage node (currently we need only neofs-cli) STORAGE_NODE_BIN_PATH = os.getenv("STORAGE_NODE_BIN_PATH", f"{DEVENV_PATH}/vendor") +# Path to neofs-s3-authmate utility +NEOFS_AUTHMATE_EXEC = os.getenv("NEOFS_AUTHMATE_EXEC", "neofs-authmate") + # Path and config for neofs-adm utility. Optional if tests are running against devenv NEOFS_ADM_EXEC = os.getenv("NEOFS_ADM_EXEC", "neofs-adm") - NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH") INFRASTRUCTURE_TYPE = os.getenv("INFRASTRUCTURE_TYPE", "LOCAL_DEVENV")