[#253] Update S3 clients and permission matrixes
Some checks reported warnings
DCO action / DCO (pull_request) Has been cancelled

Signed-off-by: a.berezin <a.berezin@yadro.com>
This commit is contained in:
Andrey Berezin 2024-06-28 15:18:20 +03:00
parent c9e4c2c7bb
commit 3a4204f2e4
5 changed files with 33 additions and 14 deletions

View file

@ -4,6 +4,6 @@ ALL_USERS_GROUP_READ_GRANT = {"Grantee": {"Type": "Group", "URI": ALL_USERS_GROU
CANONICAL_USER_FULL_CONTROL_GRANT = {"Grantee": {"Type": "CanonicalUser"}, "Permission": "FULL_CONTROL"} CANONICAL_USER_FULL_CONTROL_GRANT = {"Grantee": {"Type": "CanonicalUser"}, "Permission": "FULL_CONTROL"}
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl # https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl
PRIVATE_GRANTS = [CANONICAL_USER_FULL_CONTROL_GRANT] PRIVATE_GRANTS = []
PUBLIC_READ_GRANTS = [CANONICAL_USER_FULL_CONTROL_GRANT, ALL_USERS_GROUP_READ_GRANT] PUBLIC_READ_GRANTS = [ALL_USERS_GROUP_READ_GRANT]
PUBLIC_READ_WRITE_GRANTS = [CANONICAL_USER_FULL_CONTROL_GRANT, ALL_USERS_GROUP_WRITE_GRANT, ALL_USERS_GROUP_READ_GRANT] PUBLIC_READ_WRITE_GRANTS = [ALL_USERS_GROUP_WRITE_GRANT, ALL_USERS_GROUP_READ_GRANT]

View file

@ -1,7 +1,6 @@
import json import json
import logging import logging
import os import os
import uuid
from datetime import datetime from datetime import datetime
from time import sleep from time import sleep
from typing import Literal, Optional, Union from typing import Literal, Optional, Union
@ -11,6 +10,7 @@ from frostfs_testlib.resources.common import ASSETS_DIR, MAX_REQUEST_ATTEMPTS, R
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict
from frostfs_testlib.shell import CommandOptions from frostfs_testlib.shell import CommandOptions
from frostfs_testlib.shell.local_shell import LocalShell from frostfs_testlib.shell.local_shell import LocalShell
from frostfs_testlib.utils import string_utils
# TODO: Refactor this code to use shell instead of _cmd_run # TODO: Refactor this code to use shell instead of _cmd_run
from frostfs_testlib.utils.cli_utils import _configure_aws_cli from frostfs_testlib.utils.cli_utils import _configure_aws_cli
@ -68,7 +68,7 @@ class AwsCliClient(S3ClientWrapper):
location_constraint: Optional[str] = None, location_constraint: Optional[str] = None,
) -> str: ) -> str:
if bucket is None: if bucket is None:
bucket = str(uuid.uuid4()) bucket = string_utils.unique_name("bucket-")
if object_lock_enabled_for_bucket is None: if object_lock_enabled_for_bucket is None:
object_lock = "" object_lock = ""
@ -229,7 +229,7 @@ class AwsCliClient(S3ClientWrapper):
if bucket is None: if bucket is None:
bucket = source_bucket bucket = source_bucket
if key is None: if key is None:
key = os.path.join(os.getcwd(), str(uuid.uuid4())) key = string_utils.unique_name("copy-object-")
copy_source = f"{source_bucket}/{source_key}" copy_source = f"{source_bucket}/{source_key}"
cmd = ( cmd = (
@ -315,7 +315,7 @@ class AwsCliClient(S3ClientWrapper):
object_range: Optional[tuple[int, int]] = None, object_range: Optional[tuple[int, int]] = None,
full_output: bool = False, full_output: bool = False,
) -> dict | TestFile: ) -> dict | TestFile:
test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, str(uuid.uuid4()))) test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, string_utils.unique_name("dl-object-")))
version = f" --version-id {version_id}" if version_id else "" version = f" --version-id {version_id}" if version_id else ""
cmd = ( cmd = (
f"aws {self.common_flags} s3api get-object --bucket {bucket} --key {key} " f"aws {self.common_flags} s3api get-object --bucket {bucket} --key {key} "

View file

@ -1,7 +1,6 @@
import json import json
import logging import logging
import os import os
import uuid
from datetime import datetime from datetime import datetime
from functools import wraps from functools import wraps
from time import sleep from time import sleep
@ -16,6 +15,7 @@ from mypy_boto3_s3 import S3Client
from frostfs_testlib import reporter from frostfs_testlib import reporter
from frostfs_testlib.resources.common import ASSETS_DIR, MAX_REQUEST_ATTEMPTS, RETRY_MODE, S3_SYNC_WAIT_TIME from frostfs_testlib.resources.common import ASSETS_DIR, MAX_REQUEST_ATTEMPTS, RETRY_MODE, S3_SYNC_WAIT_TIME
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict
from frostfs_testlib.utils import string_utils
# TODO: Refactor this code to use shell instead of _cmd_run # TODO: Refactor this code to use shell instead of _cmd_run
from frostfs_testlib.utils.cli_utils import _configure_aws_cli, log_command_execution from frostfs_testlib.utils.cli_utils import _configure_aws_cli, log_command_execution
@ -115,7 +115,7 @@ class Boto3ClientWrapper(S3ClientWrapper):
location_constraint: Optional[str] = None, location_constraint: Optional[str] = None,
) -> str: ) -> str:
if bucket is None: if bucket is None:
bucket = str(uuid.uuid4()) bucket = string_utils.unique_name("bucket-")
params = {"Bucket": bucket} params = {"Bucket": bucket}
if object_lock_enabled_for_bucket is not None: if object_lock_enabled_for_bucket is not None:
@ -439,7 +439,7 @@ class Boto3ClientWrapper(S3ClientWrapper):
if bucket is None: if bucket is None:
bucket = source_bucket bucket = source_bucket
if key is None: if key is None:
key = os.path.join(os.getcwd(), str(uuid.uuid4())) key = string_utils.unique_name("copy-object-")
copy_source = f"{source_bucket}/{source_key}" copy_source = f"{source_bucket}/{source_key}"
params = { params = {
@ -476,7 +476,7 @@ class Boto3ClientWrapper(S3ClientWrapper):
if full_output: if full_output:
return response return response
test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, str(uuid.uuid4()))) test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, string_utils.unique_name("dl-object-")))
with open(test_file, "wb") as file: with open(test_file, "wb") as file:
chunk = response["Body"].read(1024) chunk = response["Body"].read(1024)
while chunk: while chunk:

View file

@ -6,6 +6,7 @@ from typing import Any, Optional
from frostfs_testlib import reporter from frostfs_testlib import reporter
from frostfs_testlib.resources.common import ASSETS_DIR from frostfs_testlib.resources.common import ASSETS_DIR
from frostfs_testlib.utils import string_utils
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")
@ -41,7 +42,9 @@ def ensure_directory_opener(path, flags):
return os.open(path, flags) return os.open(path, flags)
@reporter.step("Generate file with size {size}") # TODO: Do not add {size} to title yet, since it produces dynamic info in top level steps
# Use object_size dt in future as argument
@reporter.step("Generate file")
def generate_file(size: int) -> TestFile: def generate_file(size: int) -> TestFile:
"""Generates a binary file with the specified size in bytes. """Generates a binary file with the specified size in bytes.
@ -51,7 +54,7 @@ def generate_file(size: int) -> TestFile:
Returns: Returns:
The path to the generated file. The path to the generated file.
""" """
test_file = TestFile(os.path.join(ASSETS_DIR, str(uuid.uuid4()))) test_file = TestFile(os.path.join(ASSETS_DIR, string_utils.unique_name("object-")))
with open(test_file, "wb", opener=ensure_directory_opener) as file: with open(test_file, "wb", opener=ensure_directory_opener) as file:
file.write(os.urandom(size)) file.write(os.urandom(size))
logger.info(f"File with size {size} bytes has been generated: {test_file}") logger.info(f"File with size {size} bytes has been generated: {test_file}")
@ -59,7 +62,9 @@ def generate_file(size: int) -> TestFile:
return test_file return test_file
@reporter.step("Generate file with content of size {size}") # TODO: Do not add {size} to title yet, since it produces dynamic info in top level steps
# Use object_size dt in future as argument
@reporter.step("Generate file with content")
def generate_file_with_content( def generate_file_with_content(
size: int, size: int,
file_path: Optional[str | TestFile] = None, file_path: Optional[str | TestFile] = None,

View file

@ -1,12 +1,26 @@
import random import random
import re import re
import string import string
from datetime import datetime
ONLY_ASCII_LETTERS = string.ascii_letters ONLY_ASCII_LETTERS = string.ascii_letters
DIGITS_AND_ASCII_LETTERS = string.ascii_letters + string.digits DIGITS_AND_ASCII_LETTERS = string.ascii_letters + string.digits
NON_DIGITS_AND_LETTERS = string.punctuation NON_DIGITS_AND_LETTERS = string.punctuation
def unique_name(prefix: str = ""):
"""
Generate unique short name of anything with prefix.
This should be unique in scope of multiple runs
Args:
prefix: prefix for unique name generation
Returns:
unique name string
"""
return f"{prefix}{hex(int(datetime.now().timestamp() * 1000000))}"
def random_string(length: int = 5, source: str = ONLY_ASCII_LETTERS): def random_string(length: int = 5, source: str = ONLY_ASCII_LETTERS):
""" """
Generate random string from source letters list Generate random string from source letters list