Move shared code to testlib

Signed-off-by: Andrey Berezin <a.berezin@yadro.com>
This commit is contained in:
Andrey Berezin 2023-05-14 13:43:59 +03:00
parent d97a02d1d3
commit 997e768e92
69 changed files with 9213 additions and 64 deletions

View file

@ -0,0 +1,3 @@
from frostfs_testlib.s3.aws_cli_client import AwsCliClient
from frostfs_testlib.s3.boto3_client import Boto3ClientWrapper
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus

View file

@ -0,0 +1,754 @@
import json
import logging
import os
import uuid
from datetime import datetime
from time import sleep
from typing import Literal, Optional, Union
from frostfs_testlib.reporter import get_reporter
from frostfs_testlib.resources.common import (
ASSETS_DIR,
MAX_REQUEST_ATTEMPTS,
RETRY_MODE,
S3_SYNC_WAIT_TIME,
)
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict
# TODO: Refactor this code to use shell instead of _cmd_run
from frostfs_testlib.utils.cli_utils import _cmd_run, _configure_aws_cli
reporter = get_reporter()
logger = logging.getLogger("NeoLogger")
LONG_TIMEOUT = 240
class AwsCliClient(S3ClientWrapper):
# Flags that we use for all S3 commands: disable SSL verification (as we use self-signed
# certificate in devenv) and disable automatic pagination in CLI output
common_flags = "--no-verify-ssl --no-paginate"
s3gate_endpoint: str
@reporter.step_deco("Configure S3 client (aws cli)")
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
self.s3gate_endpoint = s3gate_endpoint
try:
_configure_aws_cli("aws configure", access_key_id, secret_access_key)
_cmd_run(f"aws configure set max_attempts {MAX_REQUEST_ATTEMPTS}")
_cmd_run(f"aws configure set retry_mode {RETRY_MODE}")
except Exception as err:
raise RuntimeError("Error while configuring AwsCliClient") from err
@reporter.step_deco("Create bucket S3")
def create_bucket(
self,
bucket: Optional[str] = None,
object_lock_enabled_for_bucket: Optional[bool] = None,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
grant_full_control: Optional[str] = None,
location_constraint: Optional[str] = None,
) -> str:
if bucket is None:
bucket = str(uuid.uuid4())
if object_lock_enabled_for_bucket is None:
object_lock = ""
elif object_lock_enabled_for_bucket:
object_lock = " --object-lock-enabled-for-bucket"
else:
object_lock = " --no-object-lock-enabled-for-bucket"
cmd = (
f"aws {self.common_flags} s3api create-bucket --bucket {bucket} "
f"{object_lock} --endpoint {self.s3gate_endpoint}"
)
if acl:
cmd += f" --acl {acl}"
if grant_full_control:
cmd += f" --grant-full-control {grant_full_control}"
if grant_write:
cmd += f" --grant-write {grant_write}"
if grant_read:
cmd += f" --grant-read {grant_read}"
if location_constraint:
cmd += f" --create-bucket-configuration LocationConstraint={location_constraint}"
_cmd_run(cmd)
sleep(S3_SYNC_WAIT_TIME)
return bucket
@reporter.step_deco("List buckets S3")
def list_buckets(self) -> list[str]:
cmd = f"aws {self.common_flags} s3api list-buckets --endpoint {self.s3gate_endpoint}"
output = _cmd_run(cmd)
buckets_json = self._to_json(output)
return [bucket["Name"] for bucket in buckets_json["Buckets"]]
@reporter.step_deco("Delete bucket S3")
def delete_bucket(self, bucket: str) -> None:
cmd = f"aws {self.common_flags} s3api delete-bucket --bucket {bucket} --endpoint {self.s3gate_endpoint}"
_cmd_run(cmd, LONG_TIMEOUT)
sleep(S3_SYNC_WAIT_TIME)
@reporter.step_deco("Head bucket S3")
def head_bucket(self, bucket: str) -> None:
cmd = f"aws {self.common_flags} s3api head-bucket --bucket {bucket} --endpoint {self.s3gate_endpoint}"
_cmd_run(cmd)
@reporter.step_deco("Put bucket versioning status")
def put_bucket_versioning(self, bucket: str, status: VersioningStatus) -> None:
cmd = (
f"aws {self.common_flags} s3api put-bucket-versioning --bucket {bucket} "
f"--versioning-configuration Status={status.value} "
f"--endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Get bucket versioning status")
def get_bucket_versioning_status(self, bucket: str) -> Literal["Enabled", "Suspended"]:
cmd = (
f"aws {self.common_flags} s3api get-bucket-versioning --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("Status")
@reporter.step_deco("Put bucket tagging")
def put_bucket_tagging(self, bucket: str, tags: list) -> None:
tags_json = {
"TagSet": [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags]
}
cmd = (
f"aws {self.common_flags} s3api put-bucket-tagging --bucket {bucket} "
f"--tagging '{json.dumps(tags_json)}' --endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Get bucket tagging")
def get_bucket_tagging(self, bucket: str) -> list:
cmd = (
f"aws {self.common_flags} s3api get-bucket-tagging --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("TagSet")
@reporter.step_deco("Get bucket acl")
def get_bucket_acl(self, bucket: str) -> list:
cmd = (
f"aws {self.common_flags} s3api get-bucket-acl --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("Grants")
@reporter.step_deco("Get bucket location")
def get_bucket_location(self, bucket: str) -> dict:
cmd = (
f"aws {self.common_flags} s3api get-bucket-location --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("LocationConstraint")
@reporter.step_deco("List objects S3")
def list_objects(self, bucket: str, full_output: bool = False) -> Union[dict, list[str]]:
cmd = (
f"aws {self.common_flags} s3api list-objects --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
obj_list = [obj["Key"] for obj in response.get("Contents", [])]
logger.info(f"Found s3 objects: {obj_list}")
return response if full_output else obj_list
@reporter.step_deco("List objects S3 v2")
def list_objects_v2(self, bucket: str, full_output: bool = False) -> Union[dict, list[str]]:
cmd = (
f"aws {self.common_flags} s3api list-objects-v2 --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
obj_list = [obj["Key"] for obj in response.get("Contents", [])]
logger.info(f"Found s3 objects: {obj_list}")
return response if full_output else obj_list
@reporter.step_deco("List objects versions S3")
def list_objects_versions(self, bucket: str, full_output: bool = False) -> dict:
cmd = (
f"aws {self.common_flags} s3api list-object-versions --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response if full_output else response.get("Versions", [])
@reporter.step_deco("List objects delete markers S3")
def list_delete_markers(self, bucket: str, full_output: bool = False) -> list:
cmd = (
f"aws {self.common_flags} s3api list-object-versions --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response if full_output else response.get("DeleteMarkers", [])
@reporter.step_deco("Copy object S3")
def copy_object(
self,
source_bucket: str,
source_key: str,
bucket: Optional[str] = None,
key: Optional[str] = None,
acl: Optional[str] = None,
metadata_directive: Optional[Literal["COPY", "REPLACE"]] = None,
metadata: Optional[dict] = None,
tagging_directive: Optional[Literal["COPY", "REPLACE"]] = None,
tagging: Optional[str] = None,
) -> str:
if bucket is None:
bucket = source_bucket
if key is None:
key = os.path.join(os.getcwd(), str(uuid.uuid4()))
copy_source = f"{source_bucket}/{source_key}"
cmd = (
f"aws {self.common_flags} s3api copy-object --copy-source {copy_source} "
f"--bucket {bucket} --key {key} --endpoint {self.s3gate_endpoint}"
)
if acl:
cmd += f" --acl {acl}"
if metadata_directive:
cmd += f" --metadata-directive {metadata_directive}"
if metadata:
cmd += " --metadata "
for meta_key, value in metadata.items():
cmd += f" {meta_key}={value}"
if tagging_directive:
cmd += f" --tagging-directive {tagging_directive}"
if tagging:
cmd += f" --tagging {tagging}"
_cmd_run(cmd, LONG_TIMEOUT)
return key
@reporter.step_deco("Put object S3")
def put_object(
self,
bucket: str,
filepath: str,
key: Optional[str] = None,
metadata: Optional[dict] = None,
tagging: Optional[str] = None,
acl: Optional[str] = None,
object_lock_mode: Optional[str] = None,
object_lock_retain_until_date: Optional[datetime] = None,
object_lock_legal_hold_status: Optional[str] = None,
grant_full_control: Optional[str] = None,
grant_read: Optional[str] = None,
) -> str:
if key is None:
key = os.path.basename(filepath)
cmd = (
f"aws {self.common_flags} s3api put-object --bucket {bucket} --key {key} "
f"--body {filepath} --endpoint {self.s3gate_endpoint}"
)
if metadata:
cmd += " --metadata"
for key, value in metadata.items():
cmd += f" {key}={value}"
if tagging:
cmd += f" --tagging '{tagging}'"
if acl:
cmd += f" --acl {acl}"
if object_lock_mode:
cmd += f" --object-lock-mode {object_lock_mode}"
if object_lock_retain_until_date:
cmd += f' --object-lock-retain-until-date "{object_lock_retain_until_date}"'
if object_lock_legal_hold_status:
cmd += f" --object-lock-legal-hold-status {object_lock_legal_hold_status}"
if grant_full_control:
cmd += f" --grant-full-control '{grant_full_control}'"
if grant_read:
cmd += f" --grant-read {grant_read}"
output = _cmd_run(cmd, LONG_TIMEOUT)
response = self._to_json(output)
return response.get("VersionId")
@reporter.step_deco("Head object S3")
def head_object(self, bucket: str, key: str, version_id: Optional[str] = None) -> dict:
version = f" --version-id {version_id}" if version_id else ""
cmd = (
f"aws {self.common_flags} s3api head-object --bucket {bucket} --key {key} "
f"{version} --endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response
@reporter.step_deco("Get object S3")
def get_object(
self,
bucket: str,
key: str,
version_id: Optional[str] = None,
object_range: Optional[tuple[int, int]] = None,
full_output: bool = False,
) -> Union[dict, str]:
file_path = os.path.join(os.getcwd(), ASSETS_DIR, str(uuid.uuid4()))
version = f" --version-id {version_id}" if version_id else ""
cmd = (
f"aws {self.common_flags} s3api get-object --bucket {bucket} --key {key} "
f"{version} {file_path} --endpoint {self.s3gate_endpoint}"
)
if object_range:
cmd += f" --range bytes={object_range[0]}-{object_range[1]}"
output = _cmd_run(cmd)
response = self._to_json(output)
return response if full_output else file_path
@reporter.step_deco("Get object ACL")
def get_object_acl(self, bucket: str, key: str, version_id: Optional[str] = None) -> list:
version = f" --version-id {version_id}" if version_id else ""
cmd = (
f"aws {self.common_flags} s3api get-object-acl --bucket {bucket} --key {key} "
f"{version} --endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("Grants")
@reporter.step_deco("Put object ACL")
def put_object_acl(
self,
bucket: str,
key: str,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
) -> list:
cmd = (
f"aws {self.common_flags} s3api put-object-acl --bucket {bucket} --key {key} "
f" --endpoint {self.s3gate_endpoint}"
)
if acl:
cmd += f" --acl {acl}"
if grant_write:
cmd += f" --grant-write {grant_write}"
if grant_read:
cmd += f" --grant-read {grant_read}"
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("Grants")
@reporter.step_deco("Put bucket ACL")
def put_bucket_acl(
self,
bucket: str,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
) -> None:
cmd = (
f"aws {self.common_flags} s3api put-bucket-acl --bucket {bucket} "
f" --endpoint {self.s3gate_endpoint}"
)
if acl:
cmd += f" --acl {acl}"
if grant_write:
cmd += f" --grant-write {grant_write}"
if grant_read:
cmd += f" --grant-read {grant_read}"
_cmd_run(cmd)
@reporter.step_deco("Delete objects S3")
def delete_objects(self, bucket: str, keys: list[str]) -> dict:
file_path = os.path.join(os.getcwd(), ASSETS_DIR, "delete.json")
delete_structure = json.dumps(_make_objs_dict(keys))
with open(file_path, "w") as out_file:
out_file.write(delete_structure)
logger.info(f"Input file for delete-objects: {delete_structure}")
cmd = (
f"aws {self.common_flags} s3api delete-objects --bucket {bucket} "
f"--delete file://{file_path} --endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd, LONG_TIMEOUT)
response = self._to_json(output)
sleep(S3_SYNC_WAIT_TIME)
return response
@reporter.step_deco("Delete object S3")
def delete_object(self, bucket: str, key: str, version_id: Optional[str] = None) -> dict:
version = f" --version-id {version_id}" if version_id else ""
cmd = (
f"aws {self.common_flags} s3api delete-object --bucket {bucket} "
f"--key {key} {version} --endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd, LONG_TIMEOUT)
sleep(S3_SYNC_WAIT_TIME)
return self._to_json(output)
@reporter.step_deco("Delete object versions S3")
def delete_object_versions(self, bucket: str, object_versions: list) -> dict:
# Build deletion list in S3 format
delete_list = {
"Objects": [
{
"Key": object_version["Key"],
"VersionId": object_version["VersionId"],
}
for object_version in object_versions
]
}
file_path = os.path.join(os.getcwd(), ASSETS_DIR, "delete.json")
delete_structure = json.dumps(delete_list)
with open(file_path, "w") as out_file:
out_file.write(delete_structure)
logger.info(f"Input file for delete-objects: {delete_structure}")
cmd = (
f"aws {self.common_flags} s3api delete-objects --bucket {bucket} "
f"--delete file://{file_path} --endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd, LONG_TIMEOUT)
sleep(S3_SYNC_WAIT_TIME)
return self._to_json(output)
@reporter.step_deco("Delete object versions S3 without delete markers")
def delete_object_versions_without_dm(self, bucket: str, object_versions: list) -> None:
# Delete objects without creating delete markers
for object_version in object_versions:
self.delete_object(
bucket=bucket, key=object_version["Key"], version_id=object_version["VersionId"]
)
@reporter.step_deco("Get object attributes")
def get_object_attributes(
self,
bucket: str,
key: str,
attributes: list[str],
version_id: str = "",
max_parts: int = 0,
part_number: int = 0,
full_output: bool = True,
) -> dict:
attrs = ",".join(attributes)
version = f" --version-id {version_id}" if version_id else ""
parts = f"--max-parts {max_parts}" if max_parts else ""
part_number_str = f"--part-number-marker {part_number}" if part_number else ""
cmd = (
f"aws {self.common_flags} s3api get-object-attributes --bucket {bucket} "
f"--key {key} {version} {parts} {part_number_str} --object-attributes {attrs} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
for attr in attributes:
assert attr in response, f"Expected attribute {attr} in {response}"
if full_output:
return response
else:
return response.get(attributes[0])
@reporter.step_deco("Get bucket policy")
def get_bucket_policy(self, bucket: str) -> dict:
cmd = (
f"aws {self.common_flags} s3api get-bucket-policy --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("Policy")
@reporter.step_deco("Put bucket policy")
def put_bucket_policy(self, bucket: str, policy: dict) -> None:
# Leaving it as is was in test repo. Double dumps to escape resulting string
# Example:
# policy = {"a": 1}
# json.dumps(policy) => {"a": 1}
# json.dumps(json.dumps(policy)) => "{\"a\": 1}"
# TODO: update this
dumped_policy = json.dumps(json.dumps(policy))
cmd = (
f"aws {self.common_flags} s3api put-bucket-policy --bucket {bucket} "
f"--policy {dumped_policy} --endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Get bucket cors")
def get_bucket_cors(self, bucket: str) -> dict:
cmd = (
f"aws {self.common_flags} s3api get-bucket-cors --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("CORSRules")
@reporter.step_deco("Put bucket cors")
def put_bucket_cors(self, bucket: str, cors_configuration: dict) -> None:
cmd = (
f"aws {self.common_flags} s3api put-bucket-cors --bucket {bucket} "
f"--cors-configuration '{json.dumps(cors_configuration)}' --endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Delete bucket cors")
def delete_bucket_cors(self, bucket: str) -> None:
cmd = (
f"aws {self.common_flags} s3api delete-bucket-cors --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Delete bucket tagging")
def delete_bucket_tagging(self, bucket: str) -> None:
cmd = (
f"aws {self.common_flags} s3api delete-bucket-tagging --bucket {bucket} "
f"--endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Put object retention")
def put_object_retention(
self,
bucket: str,
key: str,
retention: dict,
version_id: Optional[str] = None,
bypass_governance_retention: Optional[bool] = None,
) -> None:
version = f" --version-id {version_id}" if version_id else ""
cmd = (
f"aws {self.common_flags} s3api put-object-retention --bucket {bucket} --key {key} "
f"{version} --retention '{json.dumps(retention, indent=4, sort_keys=True, default=str)}' --endpoint {self.s3gate_endpoint}"
)
if bypass_governance_retention is not None:
cmd += " --bypass-governance-retention"
_cmd_run(cmd)
@reporter.step_deco("Put object legal hold")
def put_object_legal_hold(
self,
bucket: str,
key: str,
legal_hold_status: Literal["ON", "OFF"],
version_id: Optional[str] = None,
) -> None:
version = f" --version-id {version_id}" if version_id else ""
legal_hold = json.dumps({"Status": legal_hold_status})
cmd = (
f"aws {self.common_flags} s3api put-object-legal-hold --bucket {bucket} --key {key} "
f"{version} --legal-hold '{legal_hold}' --endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Put object tagging")
def put_object_tagging(self, bucket: str, key: str, tags: list) -> None:
tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags]
tagging = {"TagSet": tags}
cmd = (
f"aws {self.common_flags} s3api put-object-tagging --bucket {bucket} --key {key} "
f"--tagging '{json.dumps(tagging)}' --endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Get object tagging")
def get_object_tagging(self, bucket: str, key: str, version_id: Optional[str] = None) -> list:
version = f" --version-id {version_id}" if version_id else ""
cmd = (
f"aws {self.common_flags} s3api get-object-tagging --bucket {bucket} --key {key} "
f"{version} --endpoint {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("TagSet")
@reporter.step_deco("Delete object tagging")
def delete_object_tagging(self, bucket: str, key: str) -> None:
cmd = (
f"aws {self.common_flags} s3api delete-object-tagging --bucket {bucket} "
f"--key {key} --endpoint {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Sync directory S3")
def sync(
self,
bucket: str,
dir_path: str,
acl: Optional[str] = None,
metadata: Optional[dict] = None,
) -> dict:
cmd = (
f"aws {self.common_flags} s3 sync {dir_path} s3://{bucket} "
f"--endpoint-url {self.s3gate_endpoint}"
)
if metadata:
cmd += " --metadata"
for key, value in metadata.items():
cmd += f" {key}={value}"
if acl:
cmd += f" --acl {acl}"
output = _cmd_run(cmd, LONG_TIMEOUT)
return self._to_json(output)
@reporter.step_deco("CP directory S3")
def cp(
self,
bucket: str,
dir_path: str,
acl: Optional[str] = None,
metadata: Optional[dict] = None,
) -> dict:
cmd = (
f"aws {self.common_flags} s3 cp {dir_path} s3://{bucket} "
f"--endpoint-url {self.s3gate_endpoint} --recursive"
)
if metadata:
cmd += " --metadata"
for key, value in metadata.items():
cmd += f" {key}={value}"
if acl:
cmd += f" --acl {acl}"
output = _cmd_run(cmd, LONG_TIMEOUT)
return self._to_json(output)
@reporter.step_deco("Create multipart upload S3")
def create_multipart_upload(self, bucket: str, key: str) -> str:
cmd = (
f"aws {self.common_flags} s3api create-multipart-upload --bucket {bucket} "
f"--key {key} --endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
assert response.get("UploadId"), f"Expected UploadId in response:\n{response}"
return response["UploadId"]
@reporter.step_deco("List multipart uploads S3")
def list_multipart_uploads(self, bucket: str) -> Optional[list[dict]]:
cmd = (
f"aws {self.common_flags} s3api list-multipart-uploads --bucket {bucket} "
f"--endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("Uploads")
@reporter.step_deco("Abort multipart upload S3")
def abort_multipart_upload(self, bucket: str, key: str, upload_id: str) -> None:
cmd = (
f"aws {self.common_flags} s3api abort-multipart-upload --bucket {bucket} "
f"--key {key} --upload-id {upload_id} --endpoint-url {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Upload part S3")
def upload_part(
self, bucket: str, key: str, upload_id: str, part_num: int, filepath: str
) -> str:
cmd = (
f"aws {self.common_flags} s3api upload-part --bucket {bucket} --key {key} "
f"--upload-id {upload_id} --part-number {part_num} --body {filepath} "
f"--endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd, LONG_TIMEOUT)
response = self._to_json(output)
assert response.get("ETag"), f"Expected ETag in response:\n{response}"
return response["ETag"]
@reporter.step_deco("Upload copy part S3")
def upload_part_copy(
self, bucket: str, key: str, upload_id: str, part_num: int, copy_source: str
) -> str:
cmd = (
f"aws {self.common_flags} s3api upload-part-copy --bucket {bucket} --key {key} "
f"--upload-id {upload_id} --part-number {part_num} --copy-source {copy_source} "
f"--endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd, LONG_TIMEOUT)
response = self._to_json(output)
assert response.get("CopyPartResult", []).get(
"ETag"
), f"Expected ETag in response:\n{response}"
return response["CopyPartResult"]["ETag"]
@reporter.step_deco("List parts S3")
def list_parts(self, bucket: str, key: str, upload_id: str) -> list[dict]:
cmd = (
f"aws {self.common_flags} s3api list-parts --bucket {bucket} --key {key} "
f"--upload-id {upload_id} --endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
assert response.get("Parts"), f"Expected Parts in response:\n{response}"
return response["Parts"]
@reporter.step_deco("Complete multipart upload S3")
def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> None:
file_path = os.path.join(os.getcwd(), ASSETS_DIR, "parts.json")
parts_dict = {"Parts": [{"ETag": etag, "PartNumber": part_num} for part_num, etag in parts]}
with open(file_path, "w") as out_file:
out_file.write(json.dumps(parts_dict))
logger.info(f"Input file for complete-multipart-upload: {json.dumps(parts_dict)}")
cmd = (
f"aws {self.common_flags} s3api complete-multipart-upload --bucket {bucket} "
f"--key {key} --upload-id {upload_id} --multipart-upload file://{file_path} "
f"--endpoint-url {self.s3gate_endpoint}"
)
_cmd_run(cmd)
@reporter.step_deco("Put object lock configuration")
def put_object_lock_configuration(self, bucket: str, configuration: dict) -> dict:
cmd = (
f"aws {self.common_flags} s3api put-object-lock-configuration --bucket {bucket} "
f"--object-lock-configuration '{json.dumps(configuration)}' --endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
return self._to_json(output)
@reporter.step_deco("Get object lock configuration")
def get_object_lock_configuration(self, bucket: str):
cmd = (
f"aws {self.common_flags} s3api get-object-lock-configuration --bucket {bucket} "
f"--endpoint-url {self.s3gate_endpoint}"
)
output = _cmd_run(cmd)
response = self._to_json(output)
return response.get("ObjectLockConfiguration")
@staticmethod
def _to_json(output: str) -> dict:
json_output = {}
if "{" not in output and "}" not in output:
logger.warning(f"Could not parse json from output {output}")
return json_output
json_output = json.loads(output[output.index("{") :])
return json_output

View file

@ -0,0 +1,661 @@
import json
import logging
import os
import uuid
from datetime import datetime
from functools import wraps
from time import sleep
from typing import Literal, Optional, Union
import boto3
import urllib3
from botocore.config import Config
from botocore.exceptions import ClientError
from mypy_boto3_s3 import S3Client
from frostfs_testlib.reporter import get_reporter
from frostfs_testlib.resources.common import (
ASSETS_DIR,
MAX_REQUEST_ATTEMPTS,
RETRY_MODE,
S3_SYNC_WAIT_TIME,
)
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict
from frostfs_testlib.utils.cli_utils import log_command_execution
reporter = get_reporter()
logger = logging.getLogger("NeoLogger")
# Disable warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env
urllib3.disable_warnings()
def report_error(func):
@wraps(func)
def deco(*a, **kw):
try:
return func(*a, **kw)
except ClientError as err:
log_command_execution("Result", str(err))
raise
return deco
class Boto3ClientWrapper(S3ClientWrapper):
@reporter.step_deco("Configure S3 client (boto3)")
@report_error
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
session = boto3.Session()
config = Config(
retries={
"max_attempts": MAX_REQUEST_ATTEMPTS,
"mode": RETRY_MODE,
}
)
self.boto3_client: S3Client = session.client(
service_name="s3",
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
config=config,
endpoint_url=s3gate_endpoint,
verify=False,
)
def _to_s3_param(self, param: str):
replacement_map = {
"Acl": "ACL",
"Cors": "CORS",
"_": "",
}
result = param.title()
for find, replace in replacement_map.items():
result = result.replace(find, replace)
return result
# BUCKET METHODS #
@reporter.step_deco("Create bucket S3")
@report_error
def create_bucket(
self,
bucket: Optional[str] = None,
object_lock_enabled_for_bucket: Optional[bool] = None,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
grant_full_control: Optional[str] = None,
location_constraint: Optional[str] = None,
) -> str:
if bucket is None:
bucket = str(uuid.uuid4())
params = {"Bucket": bucket}
if object_lock_enabled_for_bucket is not None:
params.update({"ObjectLockEnabledForBucket": object_lock_enabled_for_bucket})
if acl is not None:
params.update({"ACL": acl})
elif grant_write or grant_read or grant_full_control:
if grant_write:
params.update({"GrantWrite": grant_write})
elif grant_read:
params.update({"GrantRead": grant_read})
elif grant_full_control:
params.update({"GrantFullControl": grant_full_control})
if location_constraint:
params.update(
{"CreateBucketConfiguration": {"LocationConstraint": location_constraint}}
)
s3_bucket = self.boto3_client.create_bucket(**params)
log_command_execution(f"Created S3 bucket {bucket}", s3_bucket)
sleep(S3_SYNC_WAIT_TIME)
return bucket
@reporter.step_deco("List buckets S3")
@report_error
def list_buckets(self) -> list[str]:
found_buckets = []
response = self.boto3_client.list_buckets()
log_command_execution("S3 List buckets result", response)
for bucket in response["Buckets"]:
found_buckets.append(bucket["Name"])
return found_buckets
@reporter.step_deco("Delete bucket S3")
@report_error
def delete_bucket(self, bucket: str) -> None:
response = self.boto3_client.delete_bucket(Bucket=bucket)
log_command_execution("S3 Delete bucket result", response)
sleep(S3_SYNC_WAIT_TIME)
@reporter.step_deco("Head bucket S3")
@report_error
def head_bucket(self, bucket: str) -> None:
response = self.boto3_client.head_bucket(Bucket=bucket)
log_command_execution("S3 Head bucket result", response)
@reporter.step_deco("Put bucket versioning status")
@report_error
def put_bucket_versioning(self, bucket: str, status: VersioningStatus) -> None:
response = self.boto3_client.put_bucket_versioning(
Bucket=bucket, VersioningConfiguration={"Status": status.value}
)
log_command_execution("S3 Set bucket versioning to", response)
@reporter.step_deco("Get bucket versioning status")
@report_error
def get_bucket_versioning_status(self, bucket: str) -> Literal["Enabled", "Suspended"]:
response = self.boto3_client.get_bucket_versioning(Bucket=bucket)
status = response.get("Status")
log_command_execution("S3 Got bucket versioning status", response)
return status
@reporter.step_deco("Put bucket tagging")
@report_error
def put_bucket_tagging(self, bucket: str, tags: list) -> None:
tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags]
tagging = {"TagSet": tags}
response = self.boto3_client.put_bucket_tagging(Bucket=bucket, Tagging=tagging)
log_command_execution("S3 Put bucket tagging", response)
@reporter.step_deco("Get bucket tagging")
@report_error
def get_bucket_tagging(self, bucket: str) -> list:
response = self.boto3_client.get_bucket_tagging(Bucket=bucket)
log_command_execution("S3 Get bucket tagging", response)
return response.get("TagSet")
@reporter.step_deco("Get bucket acl")
@report_error
def get_bucket_acl(self, bucket: str) -> list:
response = self.boto3_client.get_bucket_acl(Bucket=bucket)
log_command_execution("S3 Get bucket acl", response)
return response.get("Grants")
@reporter.step_deco("Delete bucket tagging")
@report_error
def delete_bucket_tagging(self, bucket: str) -> None:
response = self.boto3_client.delete_bucket_tagging(Bucket=bucket)
log_command_execution("S3 Delete bucket tagging", response)
@reporter.step_deco("Put bucket ACL")
@report_error
def put_bucket_acl(
self,
bucket: str,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
) -> None:
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self"] and value is not None
}
response = self.boto3_client.put_bucket_acl(**params)
log_command_execution("S3 ACL bucket result", response)
@reporter.step_deco("Put object lock configuration")
@report_error
def put_object_lock_configuration(self, bucket: str, configuration: dict) -> dict:
response = self.boto3_client.put_object_lock_configuration(
Bucket=bucket, ObjectLockConfiguration=configuration
)
log_command_execution("S3 put_object_lock_configuration result", response)
return response
@reporter.step_deco("Get object lock configuration")
@report_error
def get_object_lock_configuration(self, bucket: str) -> dict:
response = self.boto3_client.get_object_lock_configuration(Bucket=bucket)
log_command_execution("S3 get_object_lock_configuration result", response)
return response.get("ObjectLockConfiguration")
@reporter.step_deco("Get bucket policy")
@report_error
def get_bucket_policy(self, bucket: str) -> str:
response = self.boto3_client.get_bucket_policy(Bucket=bucket)
log_command_execution("S3 get_bucket_policy result", response)
return response.get("Policy")
@reporter.step_deco("Put bucket policy")
@report_error
def put_bucket_policy(self, bucket: str, policy: dict) -> None:
response = self.boto3_client.put_bucket_policy(Bucket=bucket, Policy=json.dumps(policy))
log_command_execution("S3 put_bucket_policy result", response)
return response
@reporter.step_deco("Get bucket cors")
@report_error
def get_bucket_cors(self, bucket: str) -> dict:
response = self.boto3_client.get_bucket_cors(Bucket=bucket)
log_command_execution("S3 get_bucket_cors result", response)
return response.get("CORSRules")
@reporter.step_deco("Get bucket location")
@report_error
def get_bucket_location(self, bucket: str) -> str:
response = self.boto3_client.get_bucket_location(Bucket=bucket)
log_command_execution("S3 get_bucket_location result", response)
return response.get("LocationConstraint")
@reporter.step_deco("Put bucket cors")
@report_error
def put_bucket_cors(self, bucket: str, cors_configuration: dict) -> None:
response = self.boto3_client.put_bucket_cors(
Bucket=bucket, CORSConfiguration=cors_configuration
)
log_command_execution("S3 put_bucket_cors result", response)
return response
@reporter.step_deco("Delete bucket cors")
@report_error
def delete_bucket_cors(self, bucket: str) -> None:
response = self.boto3_client.delete_bucket_cors(Bucket=bucket)
log_command_execution("S3 delete_bucket_cors result", response)
# END OF BUCKET METHODS #
# OBJECT METHODS #
@reporter.step_deco("List objects S3 v2")
@report_error
def list_objects_v2(self, bucket: str, full_output: bool = False) -> Union[dict, list[str]]:
response = self.boto3_client.list_objects_v2(Bucket=bucket)
log_command_execution("S3 v2 List objects result", response)
obj_list = [obj["Key"] for obj in response.get("Contents", [])]
logger.info(f"Found s3 objects: {obj_list}")
return response if full_output else obj_list
@reporter.step_deco("List objects S3")
@report_error
def list_objects(self, bucket: str, full_output: bool = False) -> Union[dict, list[str]]:
response = self.boto3_client.list_objects(Bucket=bucket)
log_command_execution("S3 List objects result", response)
obj_list = [obj["Key"] for obj in response.get("Contents", [])]
logger.info(f"Found s3 objects: {obj_list}")
return response if full_output else obj_list
@reporter.step_deco("List objects versions S3")
@report_error
def list_objects_versions(self, bucket: str, full_output: bool = False) -> dict:
response = self.boto3_client.list_object_versions(Bucket=bucket)
log_command_execution("S3 List objects versions result", response)
return response if full_output else response.get("Versions", [])
@reporter.step_deco("List objects delete markers S3")
@report_error
def list_delete_markers(self, bucket: str, full_output: bool = False) -> list:
response = self.boto3_client.list_object_versions(Bucket=bucket)
log_command_execution("S3 List objects delete markers result", response)
return response if full_output else response.get("DeleteMarkers", [])
@reporter.step_deco("Put object S3")
@report_error
def put_object(
self,
bucket: str,
filepath: str,
key: Optional[str] = None,
metadata: Optional[dict] = None,
tagging: Optional[str] = None,
acl: Optional[str] = None,
object_lock_mode: Optional[str] = None,
object_lock_retain_until_date: Optional[datetime] = None,
object_lock_legal_hold_status: Optional[str] = None,
grant_full_control: Optional[str] = None,
grant_read: Optional[str] = None,
) -> str:
if key is None:
key = os.path.basename(filepath)
with open(filepath, "rb") as put_file:
body = put_file.read()
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self", "filepath", "put_file"] and value is not None
}
response = self.boto3_client.put_object(**params)
log_command_execution("S3 Put object result", response)
return response.get("VersionId")
@reporter.step_deco("Head object S3")
@report_error
def head_object(self, bucket: str, key: str, version_id: Optional[str] = None) -> dict:
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self"] and value is not None
}
response = self.boto3_client.head_object(**params)
log_command_execution("S3 Head object result", response)
return response
@reporter.step_deco("Delete object S3")
@report_error
def delete_object(self, bucket: str, key: str, version_id: Optional[str] = None) -> dict:
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self"] and value is not None
}
response = self.boto3_client.delete_object(**params)
log_command_execution("S3 Delete object result", response)
sleep(S3_SYNC_WAIT_TIME)
return response
@reporter.step_deco("Delete objects S3")
@report_error
def delete_objects(self, bucket: str, keys: list[str]) -> dict:
response = self.boto3_client.delete_objects(Bucket=bucket, Delete=_make_objs_dict(keys))
log_command_execution("S3 Delete objects result", response)
assert (
"Errors" not in response
), f'The following objects have not been deleted: {[err_info["Key"] for err_info in response["Errors"]]}.\nError Message: {response["Errors"]["Message"]}'
sleep(S3_SYNC_WAIT_TIME)
return response
@reporter.step_deco("Delete object versions S3")
@report_error
def delete_object_versions(self, bucket: str, object_versions: list) -> dict:
# Build deletion list in S3 format
delete_list = {
"Objects": [
{
"Key": object_version["Key"],
"VersionId": object_version["VersionId"],
}
for object_version in object_versions
]
}
response = self.boto3_client.delete_objects(Bucket=bucket, Delete=delete_list)
log_command_execution("S3 Delete objects result", response)
return response
@reporter.step_deco("Delete object versions S3 without delete markers")
@report_error
def delete_object_versions_without_dm(self, bucket: str, object_versions: list) -> None:
# Delete objects without creating delete markers
for object_version in object_versions:
response = self.boto3_client.delete_object(
Bucket=bucket, Key=object_version["Key"], VersionId=object_version["VersionId"]
)
log_command_execution("S3 Delete object result", response)
@reporter.step_deco("Put object ACL")
@report_error
def put_object_acl(
self,
bucket: str,
key: str,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
) -> list:
# pytest.skip("Method put_object_acl is not supported by boto3 client")
raise NotImplementedError("Unsupported for boto3 client")
@reporter.step_deco("Get object ACL")
@report_error
def get_object_acl(self, bucket: str, key: str, version_id: Optional[str] = None) -> list:
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self"] and value is not None
}
response = self.boto3_client.get_object_acl(**params)
log_command_execution("S3 ACL objects result", response)
return response.get("Grants")
@reporter.step_deco("Copy object S3")
@report_error
def copy_object(
self,
source_bucket: str,
source_key: str,
bucket: Optional[str] = None,
key: Optional[str] = None,
acl: Optional[str] = None,
metadata_directive: Optional[Literal["COPY", "REPLACE"]] = None,
metadata: Optional[dict] = None,
tagging_directive: Optional[Literal["COPY", "REPLACE"]] = None,
tagging: Optional[str] = None,
) -> str:
if bucket is None:
bucket = source_bucket
if key is None:
key = os.path.join(os.getcwd(), str(uuid.uuid4()))
copy_source = f"{source_bucket}/{source_key}"
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self", "source_bucket", "source_key"] and value is not None
}
response = self.boto3_client.copy_object(**params)
log_command_execution("S3 Copy objects result", response)
return key
@reporter.step_deco("Get object S3")
@report_error
def get_object(
self,
bucket: str,
key: str,
version_id: Optional[str] = None,
object_range: Optional[tuple[int, int]] = None,
full_output: bool = False,
) -> Union[dict, str]:
filename = os.path.join(os.getcwd(), ASSETS_DIR, str(uuid.uuid4()))
range_str = None
if object_range:
range_str = f"bytes={object_range[0]}-{object_range[1]}"
params = {
self._to_s3_param(param): value
for param, value in {**locals(), **{"Range": range_str}}.items()
if param not in ["self", "object_range", "full_output", "range_str", "filename"]
and value is not None
}
response = self.boto3_client.get_object(**params)
log_command_execution("S3 Get objects result", response)
with open(f"{filename}", "wb") as get_file:
chunk = response["Body"].read(1024)
while chunk:
get_file.write(chunk)
chunk = response["Body"].read(1024)
return response if full_output else filename
@reporter.step_deco("Create multipart upload S3")
@report_error
def create_multipart_upload(self, bucket: str, key: str) -> str:
response = self.boto3_client.create_multipart_upload(Bucket=bucket, Key=key)
log_command_execution("S3 Created multipart upload", response)
assert response.get("UploadId"), f"Expected UploadId in response:\n{response}"
return response["UploadId"]
@reporter.step_deco("List multipart uploads S3")
@report_error
def list_multipart_uploads(self, bucket: str) -> Optional[list[dict]]:
response = self.boto3_client.list_multipart_uploads(Bucket=bucket)
log_command_execution("S3 List multipart upload", response)
return response.get("Uploads")
@reporter.step_deco("Abort multipart upload S3")
@report_error
def abort_multipart_upload(self, bucket: str, key: str, upload_id: str) -> None:
response = self.boto3_client.abort_multipart_upload(
Bucket=bucket, Key=key, UploadId=upload_id
)
log_command_execution("S3 Abort multipart upload", response)
@reporter.step_deco("Upload part S3")
@report_error
def upload_part(
self, bucket: str, key: str, upload_id: str, part_num: int, filepath: str
) -> str:
with open(filepath, "rb") as put_file:
body = put_file.read()
response = self.boto3_client.upload_part(
UploadId=upload_id,
Bucket=bucket,
Key=key,
PartNumber=part_num,
Body=body,
)
log_command_execution("S3 Upload part", response)
assert response.get("ETag"), f"Expected ETag in response:\n{response}"
return response["ETag"]
@reporter.step_deco("Upload copy part S3")
@report_error
def upload_part_copy(
self, bucket: str, key: str, upload_id: str, part_num: int, copy_source: str
) -> str:
response = self.boto3_client.upload_part_copy(
UploadId=upload_id,
Bucket=bucket,
Key=key,
PartNumber=part_num,
CopySource=copy_source,
)
log_command_execution("S3 Upload copy part", response)
assert response.get("CopyPartResult", []).get(
"ETag"
), f"Expected ETag in response:\n{response}"
return response["CopyPartResult"]["ETag"]
@reporter.step_deco("List parts S3")
@report_error
def list_parts(self, bucket: str, key: str, upload_id: str) -> list[dict]:
response = self.boto3_client.list_parts(UploadId=upload_id, Bucket=bucket, Key=key)
log_command_execution("S3 List part", response)
assert response.get("Parts"), f"Expected Parts in response:\n{response}"
return response["Parts"]
@reporter.step_deco("Complete multipart upload S3")
@report_error
def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> None:
parts = [{"ETag": etag, "PartNumber": part_num} for part_num, etag in parts]
response = self.boto3_client.complete_multipart_upload(
Bucket=bucket, Key=key, UploadId=upload_id, MultipartUpload={"Parts": parts}
)
log_command_execution("S3 Complete multipart upload", response)
@reporter.step_deco("Put object retention")
@report_error
def put_object_retention(
self,
bucket: str,
key: str,
retention: dict,
version_id: Optional[str] = None,
bypass_governance_retention: Optional[bool] = None,
) -> None:
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self"] and value is not None
}
response = self.boto3_client.put_object_retention(**params)
log_command_execution("S3 Put object retention ", response)
@reporter.step_deco("Put object legal hold")
@report_error
def put_object_legal_hold(
self,
bucket: str,
key: str,
legal_hold_status: Literal["ON", "OFF"],
version_id: Optional[str] = None,
) -> None:
legal_hold = {"Status": legal_hold_status}
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self", "legal_hold_status"] and value is not None
}
response = self.boto3_client.put_object_legal_hold(**params)
log_command_execution("S3 Put object legal hold ", response)
@reporter.step_deco("Put object tagging")
@report_error
def put_object_tagging(self, bucket: str, key: str, tags: list) -> None:
tags = [{"Key": tag_key, "Value": tag_value} for tag_key, tag_value in tags]
tagging = {"TagSet": tags}
response = self.boto3_client.put_object_tagging(Bucket=bucket, Key=key, Tagging=tagging)
log_command_execution("S3 Put object tagging", response)
@reporter.step_deco("Get object tagging")
@report_error
def get_object_tagging(self, bucket: str, key: str, version_id: Optional[str] = None) -> list:
params = {
self._to_s3_param(param): value
for param, value in locals().items()
if param not in ["self"] and value is not None
}
response = self.boto3_client.get_object_tagging(**params)
log_command_execution("S3 Get object tagging", response)
return response.get("TagSet")
@reporter.step_deco("Delete object tagging")
@report_error
def delete_object_tagging(self, bucket: str, key: str) -> None:
response = self.boto3_client.delete_object_tagging(Bucket=bucket, Key=key)
log_command_execution("S3 Delete object tagging", response)
@reporter.step_deco("Get object attributes")
@report_error
def get_object_attributes(
self,
bucket: str,
key: str,
attributes: list[str],
version_id: Optional[str] = None,
max_parts: Optional[int] = None,
part_number: Optional[int] = None,
full_output: bool = True,
) -> dict:
logger.warning("Method get_object_attributes is not supported by boto3 client")
return {}
@reporter.step_deco("Sync directory S3")
@report_error
def sync(
self,
bucket: str,
dir_path: str,
acl: Optional[str] = None,
metadata: Optional[dict] = None,
) -> dict:
raise NotImplementedError("Sync is not supported for boto3 client")
@reporter.step_deco("CP directory S3")
@report_error
def cp(
self,
bucket: str,
dir_path: str,
acl: Optional[str] = None,
metadata: Optional[dict] = None,
) -> dict:
raise NotImplementedError("Cp is not supported for boto3 client")
# END OBJECT METHODS #

View file

@ -0,0 +1,378 @@
from abc import ABC, abstractmethod
from datetime import datetime
from enum import Enum
from typing import Literal, Optional, Union
def _make_objs_dict(key_names):
objs_list = []
for key in key_names:
obj_dict = {"Key": key}
objs_list.append(obj_dict)
objs_dict = {"Objects": objs_list}
return objs_dict
class VersioningStatus(Enum):
ENABLED = "Enabled"
SUSPENDED = "Suspended"
ACL_COPY = [
"private",
"public-read",
"public-read-write",
"authenticated-read",
"aws-exec-read",
"bucket-owner-read",
"bucket-owner-full-control",
]
class S3ClientWrapper(ABC):
@abstractmethod
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
pass
@abstractmethod
def create_bucket(
self,
bucket: Optional[str] = None,
object_lock_enabled_for_bucket: Optional[bool] = None,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
grant_full_control: Optional[str] = None,
location_constraint: Optional[str] = None,
) -> str:
"""Create a bucket."""
# BUCKET METHODS #
@abstractmethod
def list_buckets(self) -> list[str]:
"""List buckets."""
@abstractmethod
def delete_bucket(self, bucket: str) -> None:
"""Delete bucket"""
@abstractmethod
def head_bucket(self, bucket: str) -> None:
"""This action is useful to determine if a bucket exists and you have permission to access it.
The action returns a 200 OK if the bucket exists and you have permission to access it.
If the bucket does not exist or you do not have permission to access it, the HEAD request
returns a generic 400 Bad Request, 403 Forbidden or 404 Not Found code.
A message body is not included, so you cannot determine the exception beyond these error codes.
"""
@abstractmethod
def put_bucket_versioning(self, bucket: str, status: VersioningStatus) -> None:
"""Sets the versioning state of an existing bucket.
You can set the versioning state with one of the following values:
EnabledEnables versioning for the objects in the bucket. All objects added to the bucket receive a unique version ID.
SuspendedDisables versioning for the objects in the bucket. All objects added to the bucket receive the version ID null.
If the versioning state has never been set on a bucket, it has no versioning state
"""
@abstractmethod
def get_bucket_versioning_status(self, bucket: str) -> Literal["Enabled", "Suspended"]:
"""Returns the versioning state of a bucket.
To retrieve the versioning state of a bucket, you must be the bucket owner.
"""
@abstractmethod
def put_bucket_tagging(self, bucket: str, tags: list) -> None:
"""Sets the tags for a bucket."""
@abstractmethod
def get_bucket_tagging(self, bucket: str) -> list:
"""Returns the tag set associated with the Outposts bucket."""
@abstractmethod
def delete_bucket_tagging(self, bucket: str) -> None:
"""Deletes the tags from the bucket."""
@abstractmethod
def get_bucket_acl(self, bucket: str) -> list:
"""This implementation of the GET action uses the acl subresource to return the access control list (ACL) of a bucket."""
@abstractmethod
def put_bucket_acl(
self,
bucket: str,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
) -> list:
"""Sets the permissions on an existing bucket using access control lists (ACL)."""
@abstractmethod
def put_object_lock_configuration(self, bucket: str, configuration: dict) -> dict:
"""Places an Object Lock configuration on the specified bucket.
The rule specified in the Object Lock configuration will be applied by
default to every new object placed in the specified bucket."""
@abstractmethod
def get_object_lock_configuration(self, bucket: str) -> dict:
"""Gets the Object Lock configuration for a bucket.
The rule specified in the Object Lock configuration will be applied by
default to every new object placed in the specified bucket."""
@abstractmethod
def get_bucket_policy(self, bucket: str) -> str:
"""Returns the policy of a specified bucket."""
@abstractmethod
def put_bucket_policy(self, bucket: str, policy: dict) -> None:
"""Applies S3 bucket policy to an S3 bucket."""
@abstractmethod
def get_bucket_cors(self, bucket: str) -> dict:
"""Returns the Cross-Origin Resource Sharing (CORS) configuration information set for the bucket."""
@abstractmethod
def put_bucket_cors(self, bucket: str, cors_configuration: dict) -> None:
"""Sets the cors configuration for your bucket. If the configuration exists, S3 replaces it."""
@abstractmethod
def delete_bucket_cors(self, bucket: str) -> None:
"""Deletes the cors configuration information set for the bucket."""
@abstractmethod
def get_bucket_location(self, bucket: str) -> str:
"""Returns the LocationConstraint the bucket resides in. You can set the it
using the LocationConstraint request parameter in a CreateBucket request."""
# END OF BUCKET METHODS #
# OBJECT METHODS #
@abstractmethod
def list_objects_v2(self, bucket: str, full_output: bool = False) -> Union[dict, list[str]]:
"""Returns some or all (up to 1,000) of the objects in a bucket with each request.
You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
A 200 OK response can contain valid or invalid XML. Make sure to design your application
to parse the contents of the response and handle it appropriately.
"""
@abstractmethod
def list_objects(self, bucket: str, full_output: bool = False) -> Union[dict, list[str]]:
"""Returns some or all (up to 1,000) of the objects in a bucket with each request.
You can use the request parameters as selection criteria to return a subset of the objects in a bucket.
A 200 OK response can contain valid or invalid XML. Make sure to design your application
to parse the contents of the response and handle it appropriately.
"""
@abstractmethod
def list_objects_versions(self, bucket: str, full_output: bool = False) -> dict:
"""Returns metadata about all versions of the objects in a bucket."""
@abstractmethod
def list_delete_markers(self, bucket: str, full_output: bool = False) -> dict:
"""Returns metadata about all delete markers of the objects in a bucket."""
@abstractmethod
def put_object(
self,
bucket: str,
filepath: str,
key: Optional[str] = None,
metadata: Optional[dict] = None,
tagging: Optional[str] = None,
acl: Optional[str] = None,
object_lock_mode: Optional[str] = None,
object_lock_retain_until_date: Optional[datetime] = None,
object_lock_legal_hold_status: Optional[str] = None,
grant_full_control: Optional[str] = None,
grant_read: Optional[str] = None,
) -> str:
"""Adds an object to a bucket."""
@abstractmethod
def head_object(self, bucket: str, key: str, version_id: Optional[str] = None) -> dict:
"""The HEAD action retrieves metadata from an object without returning the object itself.
This action is useful if you're only interested in an object's metadata."""
@abstractmethod
def delete_object(self, bucket: str, key: str, version_id: Optional[str] = None) -> dict:
"""Removes the null version (if there is one) of an object and inserts a delete marker,
which becomes the latest version of the object. If there isn't a null version,
S3 does not remove any objects but will still respond that the command was successful."""
@abstractmethod
def delete_objects(self, bucket: str, keys: list[str]) -> dict:
"""This action enables you to delete multiple objects from a bucket
using a single HTTP request. If you know the object keys that
you want to delete, then this action provides a suitable alternative
to sending individual delete requests, reducing per-request overhead.
The request contains a list of up to 1000 keys that you want to delete."""
@abstractmethod
def delete_object_versions(self, bucket: str, object_versions: list) -> dict:
"""Delete object versions"""
@abstractmethod
def delete_object_versions_without_dm(self, bucket: str, object_versions: list) -> None:
"""Delete object versions without delete markers"""
@abstractmethod
def put_object_acl(
self,
bucket: str,
key: str,
acl: Optional[str] = None,
grant_write: Optional[str] = None,
grant_read: Optional[str] = None,
) -> list:
"""Uses the acl subresource to set the access control
list (ACL) permissions for a new or existing object in an S3 bucket."""
@abstractmethod
def get_object_acl(self, bucket: str, key: str, version_id: Optional[str] = None) -> list:
"""Returns the access control list (ACL) of an object."""
@abstractmethod
def copy_object(
self,
source_bucket: str,
source_key: str,
bucket: Optional[str] = None,
key: Optional[str] = None,
acl: Optional[str] = None,
metadata_directive: Optional[Literal["COPY", "REPLACE"]] = None,
metadata: Optional[dict] = None,
tagging_directive: Optional[Literal["COPY", "REPLACE"]] = None,
tagging: Optional[str] = None,
) -> str:
"""Creates a copy of an object"""
@abstractmethod
def get_object(
self,
bucket: str,
key: str,
version_id: Optional[str] = None,
object_range: Optional[tuple[int, int]] = None,
full_output: bool = False,
) -> Union[dict, str]:
"""Retrieves objects from S3."""
@abstractmethod
def create_multipart_upload(self, bucket: str, key: str) -> str:
"""This action initiates a multipart upload and returns an upload ID.
This upload ID is used to associate all of the parts in the specific multipart upload.
You specify this upload ID in each of your subsequent upload part requests (see UploadPart).
You also include this upload ID in the final request to either complete or abort the multipart upload request."""
@abstractmethod
def list_multipart_uploads(self, bucket: str) -> Optional[list[dict]]:
"""This action lists in-progress multipart uploads.
An in-progress multipart upload is a multipart upload that has been initiated
using the Initiate Multipart Upload request, but has not yet been completed or aborted.
This action returns at most 1,000 multipart uploads in the response."""
@abstractmethod
def abort_multipart_upload(self, bucket: str, key: str, upload_id: str) -> None:
"""This action aborts a multipart upload. After a multipart upload is aborted,
no additional parts can be uploaded using that upload ID.
The storage consumed by any previously uploaded parts will be freed.
However, if any part uploads are currently in progress, those part
uploads might or might not succeed. As a result, it might be necessary to
abort a given multipart upload multiple times in order to completely free all storage consumed by all parts."""
@abstractmethod
def upload_part(
self, bucket: str, key: str, upload_id: str, part_num: int, filepath: str
) -> str:
"""Uploads a part in a multipart upload."""
@abstractmethod
def upload_part_copy(
self, bucket: str, key: str, upload_id: str, part_num: int, copy_source: str
) -> str:
"""Uploads a part by copying data from an existing object as data source."""
@abstractmethod
def list_parts(self, bucket: str, key: str, upload_id: str) -> list[dict]:
"""Lists the parts that have been uploaded for a specific multipart upload."""
@abstractmethod
def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> None:
"""Completes a multipart upload by assembling previously uploaded parts."""
@abstractmethod
def put_object_retention(
self,
bucket: str,
key: str,
retention: dict,
version_id: Optional[str] = None,
bypass_governance_retention: Optional[bool] = None,
) -> None:
"""Places an Object Retention configuration on an object."""
@abstractmethod
def put_object_legal_hold(
self,
bucket: str,
key: str,
legal_hold_status: Literal["ON", "OFF"],
version_id: Optional[str] = None,
) -> None:
"""Applies a legal hold configuration to the specified object."""
@abstractmethod
def put_object_tagging(self, bucket: str, key: str, tags: list) -> None:
"""Sets the tag-set for an object."""
@abstractmethod
def get_object_tagging(self, bucket: str, key: str, version_id: Optional[str] = None) -> list:
"""Returns the tag-set of an object."""
@abstractmethod
def delete_object_tagging(self, bucket: str, key: str) -> None:
"""Removes the entire tag set from the specified object."""
@abstractmethod
def get_object_attributes(
self,
bucket: str,
key: str,
attributes: list[str],
version_id: str = "",
max_parts: int = 0,
part_number: int = 0,
full_output: bool = True,
) -> dict:
"""Retrieves all the metadata from an object without returning the object itself."""
@abstractmethod
def sync(
self,
bucket: str,
dir_path: str,
acl: Optional[str] = None,
metadata: Optional[dict] = None,
) -> dict:
"""sync directory TODO: Add proper description"""
@abstractmethod
def cp(
self,
bucket: str,
dir_path: str,
acl: Optional[str] = None,
metadata: Optional[dict] = None,
) -> dict:
"""cp directory TODO: Add proper description"""
# END OF OBJECT METHODS #