From cd15be3b7c41448280217aac741f2fc1efefac95 Mon Sep 17 00:00:00 2001 From: Kirill Sosnovskikh Date: Fri, 15 Nov 2024 21:03:21 +0300 Subject: [PATCH] [#334] Automation of PATCH method in S3 Signed-off-by: Kirill Sosnovskikh --- src/frostfs_testlib/http/http_client.py | 6 +- src/frostfs_testlib/s3/aws_cli_client.py | 7 +- src/frostfs_testlib/s3/boto3_client.py | 7 +- src/frostfs_testlib/s3/interfaces.py | 4 +- src/frostfs_testlib/s3/s3_http_client.py | 127 ++++++++++++++++++++++ src/frostfs_testlib/steps/s3/s3_helper.py | 24 ++++ 6 files changed, 162 insertions(+), 13 deletions(-) create mode 100644 src/frostfs_testlib/s3/s3_http_client.py diff --git a/src/frostfs_testlib/http/http_client.py b/src/frostfs_testlib/http/http_client.py index a3e3e54..c3e5fae 100644 --- a/src/frostfs_testlib/http/http_client.py +++ b/src/frostfs_testlib/http/http_client.py @@ -46,9 +46,9 @@ class HttpClient: logger.info(f"Response: {response.status_code} => {response.text}") if expected_status_code: - assert response.status_code == expected_status_code, ( - f"Got {response.status_code} response code" f" while {expected_status_code} expected" - ) + assert ( + response.status_code == expected_status_code + ), f"Got {response.status_code} response code while {expected_status_code} expected" return response diff --git a/src/frostfs_testlib/s3/aws_cli_client.py b/src/frostfs_testlib/s3/aws_cli_client.py index 2ac6d68..4196c77 100644 --- a/src/frostfs_testlib/s3/aws_cli_client.py +++ b/src/frostfs_testlib/s3/aws_cli_client.py @@ -171,7 +171,7 @@ class AwsCliClient(S3ClientWrapper): return response.get("TagSet") @reporter.step("Get bucket acl") - def get_bucket_acl(self, bucket: str) -> list: + def get_bucket_acl(self, bucket: str) -> dict: if bucket.startswith("-") or " " in bucket: bucket = f'"{bucket}"' @@ -179,8 +179,7 @@ class AwsCliClient(S3ClientWrapper): f"aws {self.common_flags} s3api get-bucket-acl --bucket {bucket} " f"--endpoint {self.s3gate_endpoint} --profile {self.profile}" ) output = self.local_shell.exec(cmd).stdout - response = self._to_json(output) - return response.get("Grants") + return self._to_json(output) @reporter.step("Get bucket location") def get_bucket_location(self, bucket: str) -> dict: @@ -861,7 +860,7 @@ class AwsCliClient(S3ClientWrapper): return response["Parts"] @reporter.step("Complete multipart upload S3") - def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> None: + def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> dict: if bucket.startswith("-") or " " in bucket: bucket = f'"{bucket}"' diff --git a/src/frostfs_testlib/s3/boto3_client.py b/src/frostfs_testlib/s3/boto3_client.py index c680f17..6b6c74e 100644 --- a/src/frostfs_testlib/s3/boto3_client.py +++ b/src/frostfs_testlib/s3/boto3_client.py @@ -230,14 +230,13 @@ class Boto3ClientWrapper(S3ClientWrapper): return response.get("TagSet") @reporter.step("Get bucket acl") - def get_bucket_acl(self, bucket: str) -> list: - response = self._exec_request( + def get_bucket_acl(self, bucket: str) -> dict: + return self._exec_request( self.boto3_client.get_bucket_acl, params={"Bucket": bucket}, endpoint=self.s3gate_endpoint, profile=self.profile, ) - return response.get("Grants") @reporter.step("Delete bucket tagging") def delete_bucket_tagging(self, bucket: str) -> None: @@ -705,7 +704,7 @@ class Boto3ClientWrapper(S3ClientWrapper): return response["Parts"] @reporter.step("Complete multipart upload S3") - def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> None: + def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> dict: parts = [{"ETag": etag, "PartNumber": part_num} for part_num, etag in parts] params = self._convert_to_s3_params(locals(), exclude=["parts"]) params["MultipartUpload"] = {"Parts": parts} diff --git a/src/frostfs_testlib/s3/interfaces.py b/src/frostfs_testlib/s3/interfaces.py index c3d99eb..7ce9f31 100644 --- a/src/frostfs_testlib/s3/interfaces.py +++ b/src/frostfs_testlib/s3/interfaces.py @@ -128,7 +128,7 @@ class S3ClientWrapper(HumanReadableABC): """Deletes the tags from the bucket.""" @abstractmethod - def get_bucket_acl(self, bucket: str) -> list: + def get_bucket_acl(self, bucket: str) -> dict: """This implementation of the GET action uses the acl subresource to return the access control list (ACL) of a bucket.""" @abstractmethod @@ -336,7 +336,7 @@ class S3ClientWrapper(HumanReadableABC): """Lists the parts that have been uploaded for a specific multipart upload.""" @abstractmethod - def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> None: + def complete_multipart_upload(self, bucket: str, key: str, upload_id: str, parts: list) -> dict: """Completes a multipart upload by assembling previously uploaded parts.""" @abstractmethod diff --git a/src/frostfs_testlib/s3/s3_http_client.py b/src/frostfs_testlib/s3/s3_http_client.py new file mode 100644 index 0000000..a34c380 --- /dev/null +++ b/src/frostfs_testlib/s3/s3_http_client.py @@ -0,0 +1,127 @@ +import hashlib +import logging +import xml.etree.ElementTree as ET + +import httpx +from botocore.auth import SigV4Auth +from botocore.awsrequest import AWSRequest +from botocore.credentials import Credentials + +from frostfs_testlib import reporter +from frostfs_testlib.http.http_client import HttpClient +from frostfs_testlib.utils.file_utils import TestFile + +logger = logging.getLogger("NeoLogger") + +DEFAULT_TIMEOUT = 60.0 + + +class S3HttpClient: + def __init__( + self, s3gate_endpoint: str, access_key_id: str, secret_access_key: str, profile: str = "default", region: str = "us-east-1" + ) -> None: + self.http_client = HttpClient() + self.s3gate_endpoint = s3gate_endpoint + self.credentials = Credentials(access_key_id, secret_access_key) + self.profile = profile + self.region = region + self.service = "s3" + self.signature = SigV4Auth(self.credentials, self.service, self.region) + + def _to_s3_header(self, header: str) -> dict: + replacement_map = { + "Acl": "ACL", + "_": "-", + } + + result = header + if not header.startswith("x_amz"): + result = header.title() + + for find, replace in replacement_map.items(): + result = result.replace(find, replace) + + return result + + def _convert_to_s3_headers(self, scope: dict, exclude: list[str] = None): + exclude = ["self", "cls"] if not exclude else exclude + ["self", "cls"] + return {self._to_s3_header(header): value for header, value in scope.items() if header not in exclude and value is not None} + + def _create_aws_request( + self, method: str, url: str, headers: dict, content: str | bytes | TestFile = None, params: dict = None + ) -> AWSRequest: + data = b"" + + if content is not None: + if isinstance(content, TestFile): + with open(content, "rb") as io_content: + data = io_content.read() + elif isinstance(content, str): + data = bytes(content, encoding="utf-8") + elif isinstance(content, bytes): + data = content + else: + raise TypeError(f"Content expected as a string, bytes or TestFile object, got: {content}") + + headers["X-Amz-Content-SHA256"] = hashlib.sha256(data).hexdigest() + aws_request = AWSRequest(method, url, headers, data, params) + self.signature.add_auth(aws_request) + + return aws_request + + def _exec_request( + self, + method: str, + url: str, + headers: dict, + content: str | bytes | TestFile = None, + params: dict = None, + timeout: float = DEFAULT_TIMEOUT, + ) -> dict: + aws_request = self._create_aws_request(method, url, headers, content, params) + response = self.http_client.send( + aws_request.method, + aws_request.url, + headers=dict(aws_request.headers), + data=aws_request.data, + params=aws_request.params, + timeout=timeout, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError: + raise httpx.HTTPStatusError(response.text, request=response.request, response=response) + + root = ET.fromstring(response.read()) + data = { + "LastModified": root.find(".//LastModified").text, + "ETag": root.find(".//ETag").text, + } + + if response.headers.get("x-amz-version-id"): + data["VersionId"] = response.headers.get("x-amz-version-id") + + return data + + @reporter.step("Patch object S3") + def patch_object( + self, + bucket: str, + key: str, + content: str | bytes | TestFile, + content_range: str, + version_id: str = None, + if_match: str = None, + if_unmodified_since: str = None, + x_amz_expected_bucket_owner: str = None, + timeout: float = DEFAULT_TIMEOUT, + ) -> dict: + if content_range and not content_range.startswith("bytes"): + content_range = f"bytes {content_range}/*" + + url = f"{self.s3gate_endpoint}/{bucket}/{key}" + headers = self._convert_to_s3_headers(locals(), exclude=["bucket", "key", "content", "version_id", "timeout"]) + params = {"VersionId": version_id} if version_id is not None else None + + return self._exec_request("PATCH", url, headers, content, params, timeout=timeout) diff --git a/src/frostfs_testlib/steps/s3/s3_helper.py b/src/frostfs_testlib/steps/s3/s3_helper.py index dbf48d3..7949f2d 100644 --- a/src/frostfs_testlib/steps/s3/s3_helper.py +++ b/src/frostfs_testlib/steps/s3/s3_helper.py @@ -12,6 +12,7 @@ from frostfs_testlib.shell import Shell from frostfs_testlib.steps.cli.container import search_nodes_with_container from frostfs_testlib.storage.cluster import Cluster, ClusterNode from frostfs_testlib.storage.dataclasses.wallet import WalletInfo +from frostfs_testlib.utils.file_utils import TestFile, get_file_hash logger = logging.getLogger("NeoLogger") @@ -185,3 +186,26 @@ def search_nodes_with_bucket( break nodes_list = search_nodes_with_container(wallet=wallet, cid=cid, shell=shell, endpoint=endpoint, cluster=cluster) return nodes_list + + +def get_bytes_relative_to_object(value: int | str, object_size: int = None, part_size: int = None) -> int: + if isinstance(value, int): + return value + + if "part" not in value and "object" not in value: + return int(value) + + if object_size is not None: + value = value.replace("object", str(object_size)) + + if part_size is not None: + value = value.replace("part", str(part_size)) + + return int(eval(value)) + + +def get_range_relative_to_object(rng: str, object_size: int = None, part_size: int = None, int_values: bool = False) -> str | int: + start, end = rng.split(":") + start = get_bytes_relative_to_object(start, object_size, part_size) + end = get_bytes_relative_to_object(end, object_size, part_size) + return (start, end) if int_values else f"bytes {start}-{end}/*"