[#334] Disable automatic retries in S3 clients

Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
Vladimir Domnich 2022-10-03 12:16:27 +04:00 committed by Vladimir
parent 987df42542
commit f1d3aa6098
7 changed files with 128 additions and 112 deletions

View file

@ -1,32 +1,14 @@
#!/usr/bin/python3
import json
import logging
import os
import re
import uuid
from enum import Enum
from time import sleep
from typing import Optional
import allure
import boto3
import urllib3
from botocore.exceptions import ClientError
from cli_helpers import _run_with_passwd, log_command_execution
from common import NEOFS_ENDPOINT, S3_GATE, S3_GATE_WALLET_PASS, S3_GATE_WALLET_PATH
from data_formatters import get_wallet_public_key
from cli_helpers import log_command_execution
from steps.aws_cli_client import AwsCliClient
##########################################################
# Disabling warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings()
##########################################################
logger = logging.getLogger("NeoLogger")
CREDENTIALS_CREATE_TIMEOUT = "1m"
NEOFS_EXEC = os.getenv("NEOFS_EXEC", "neofs-authmate")
# Artificial delay that we add after object deletion and container creation
# Delay is added because sometimes immediately after deletion object still appears
@ -40,69 +22,6 @@ class VersioningStatus(Enum):
SUSPENDED = "Suspended"
@allure.step("Init S3 Credentials")
def init_s3_credentials(wallet_path, s3_bearer_rules_file: Optional[str] = None):
bucket = str(uuid.uuid4())
s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json"
gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS)
cmd = (
f"{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} "
f"issue-secret --wallet {wallet_path} --gate-public-key={gate_public_key} "
f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} "
f"--bearer-rules {s3_bearer_rules}"
)
logger.info(f"Executing command: {cmd}")
try:
output = _run_with_passwd(cmd)
logger.info(f"Command completed with output: {output}")
# output contains some debug info and then several JSON structures, so we find each
# JSON structure by curly brackets (naive approach, but works while JSON is not nested)
# and then we take JSON containing secret_access_key
json_blocks = re.findall(r"\{.*?\}", output, re.DOTALL)
for json_block in json_blocks:
try:
parsed_json_block = json.loads(json_block)
if "secret_access_key" in parsed_json_block:
return (
parsed_json_block["container_id"],
bucket,
parsed_json_block["access_key_id"],
parsed_json_block["secret_access_key"],
parsed_json_block["owner_private_key"],
)
except json.JSONDecodeError:
raise AssertionError(f"Could not parse info from output\n{output}")
raise AssertionError(f"Could not find AWS credentials in output:\n{output}")
except Exception as exc:
raise RuntimeError(f"Failed to init s3 credentials because of error\n{exc}") from exc
@allure.step("Config S3 client")
def config_s3_client(access_key_id: str, secret_access_key: str):
try:
session = boto3.session.Session()
s3_client = session.client(
service_name="s3",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
endpoint_url=S3_GATE,
verify=False,
)
return s3_client
except ClientError as err:
raise Exception(
f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step("Create bucket S3")
def create_bucket_s3(
s3_client, object_lock_enabled_for_bucket: Optional[bool] = None, acl: Optional[str] = None
@ -119,7 +38,6 @@ def create_bucket_s3(
log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket)
sleep(S3_SYNC_WAIT_TIME)
return bucket_name
except ClientError as err:
raise Exception(
f'Error Message: {err.response["Error"]["Message"]}\n'
@ -260,11 +178,11 @@ def put_bucket_acl_s3(
params.update({"GrantWrite": grant_write})
elif grant_read:
params.update({"GrantRead": grant_read})
try:
response = s3_client.put_bucket_acl(**params)
log_command_execution("S3 ACL bucket result", response)
return response.get("Grants")
except ClientError as err:
raise Exception(
f'Error Message: {err.response["Error"]["Message"]}\n'