import logging import allure import pytest from frostfs_testlib import reporter from frostfs_testlib.clients import S3ClientWrapper from frostfs_testlib.steps import s3_helper from frostfs_testlib.steps.cli.object import put_object_to_random_node from frostfs_testlib.steps.http_gate import ( assert_hashes_are_equal, get_object_by_attr_and_verify_hashes, get_via_http_gate, try_to_get_object_via_passed_request_and_expect_error, verify_object_hash, ) from frostfs_testlib.storage.dataclasses.wallet import WalletInfo from frostfs_testlib.testing.cluster_test_base import ClusterTestBase from frostfs_testlib.utils.file_utils import TestFile, generate_file, split_file from ....helpers.container_request import REP_2_1_4_PUBLIC, requires_container logger = logging.getLogger("NeoLogger") PART_SIZE = 5 * 1024 * 1024 @pytest.mark.nightly @pytest.mark.sanity @pytest.mark.http_gate class Test_http_object(ClusterTestBase): @allure.title("Put over gRPC, Get over HTTP with attributes (obj_size={object_size})") @requires_container(REP_2_1_4_PUBLIC) def test_object_put_get_attributes(self, default_wallet: WalletInfo, container: str, test_file: TestFile): """ Test that object can be put using gRPC interface and got using HTTP. Steps: 1. Create an object; 2. Put object(s) using gRPC (frostfs-cli) with attributes [--attributes chapter1=peace,chapter2=war]; 3. Download the object using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading); 4. Compare hashes of the original and the downloaded object; 5. [Negative] Try to the get the object with the specified attributes and `get` request: [get/$CID/chapter1/peace]; 6. Download the object with the specified attributes and `get_by_attribute` request: [get_by_attribute/$CID/chapter1/peace]; 7. Compare hashes of the original and the downloaded object; 8. [Negative] Try to the get the object via `get_by_attribute` request: [get_by_attribute/$CID/$OID]; Expected result: Hashes must be the same. """ # List of Key=Value attributes obj_key1 = "chapter1" obj_value1 = "peace" obj_key2 = "chapter2" obj_value2 = "war" # Prepare for grpc PUT request key_value1 = obj_key1 + "=" + obj_value1 key_value2 = obj_key2 + "=" + obj_value2 with reporter.step("Put objects using gRPC [--attributes chapter1=peace,chapter2=war]"): oid = put_object_to_random_node( wallet=default_wallet, path=test_file.path, cid=container, shell=self.shell, cluster=self.cluster, attributes=f"{key_value1},{key_value2}", ) with reporter.step("Get object and verify hashes [ get/$CID/$OID ]"): verify_object_hash( oid=oid, file_name=test_file.path, wallet=default_wallet, cid=container, shell=self.shell, nodes=self.cluster.storage_nodes, request_node=self.cluster.cluster_nodes[0], ) with reporter.step("[Negative] try to get object: [get/$CID/chapter1/peace]"): attrs = {obj_key1: obj_value1, obj_key2: obj_value2} request = f"/get/{container}/{obj_key1}/{obj_value1}" expected_err_msg = "Failed to get object via HTTP gate:" try_to_get_object_via_passed_request_and_expect_error( cid=container, oid=oid, node=self.cluster.cluster_nodes[0], error_pattern=expected_err_msg, http_request_path=request, attrs=attrs, ) with reporter.step("Download the object with attribute [get_by_attribute/$CID/chapter1/peace]"): get_object_by_attr_and_verify_hashes(oid, test_file, container, attrs, self.cluster.cluster_nodes[0]) with reporter.step("[Negative] try to get object: get_by_attribute/$CID/$OID"): request = f"/get_by_attribute/{container}/{oid}" try_to_get_object_via_passed_request_and_expect_error( container, oid, self.cluster.cluster_nodes[0], error_pattern=expected_err_msg, http_request_path=request, ) @allure.title("Put object over S3, get over HTTP with bucket name and key (s3_client={s3_client}, object_size={object_size})") def test_object_put_get_bucketname_key(self, test_file: TestFile, s3_client: S3ClientWrapper): """ Test that object can be put using s3-gateway interface and got via HTTP with bucket name and object key. Steps: 1. Create an object; 2. Create a bucket via s3; 3. Put the object via s3; 4. Download the object using HTTP gate with the bucket name and the object key; 5. Compare hashes of the original and the downloaded objects; Expected result: Hashes must be the same. """ object_key = s3_helper.object_key_from_file_path(test_file) with reporter.step("Create public bucket"): bucket = s3_client.create_bucket(acl="public-read-write") with reporter.step("Put object"): s3_client.put_object(bucket, test_file, object_key) with reporter.step("Get object via S3 gate"): obj_s3 = s3_client.get_object(bucket, object_key) with reporter.step("Get object via HTTP gate"): obj_http = get_via_http_gate(bucket, object_key, node=self.cluster.cluster_nodes[0]) with reporter.step("Make sure the hashes of both objects are the same"): assert_hashes_are_equal(test_file.path, obj_http, obj_s3) @allure.title("Put multipart object over S3, get over HTTP with bucket name and key (s3_client={s3_client})") def test_object_put_get_bucketname_key_multipart(self, s3_client: S3ClientWrapper): parts = [] parts_count = 5 original_size = PART_SIZE * parts_count with reporter.step("Create public container"): bucket = s3_client.create_bucket(acl="public-read-write") with reporter.step("Generate original object and split it into parts"): original_file = generate_file(original_size) file_parts = split_file(original_file, parts_count) object_key = s3_helper.object_key_from_file_path(original_file) with reporter.step("Create multipart and upload parts"): upload_id = s3_client.create_multipart_upload(bucket, object_key) for part_id, file_path in enumerate(file_parts, start=1): etag = s3_client.upload_part(bucket, object_key, upload_id, part_id, file_path) parts.append((part_id, etag)) with reporter.step("Check all parts are visible in bucket"): got_parts = s3_client.list_parts(bucket, object_key, upload_id) assert len(got_parts) == len(file_parts), f"Expected {parts_count} parts, got:\n{got_parts}" with reporter.step("Complete multipart upload"): s3_client.complete_multipart_upload(bucket, object_key, upload_id, parts) with reporter.step("Get multipart object via S3 gate"): obj_s3 = s3_client.get_object(bucket, object_key) with reporter.step("Get multipart object via HTTP gate"): obj_http = get_via_http_gate(bucket, object_key, self.cluster.cluster_nodes[0]) with reporter.step("Make sure the hashes of both objects are the same"): assert_hashes_are_equal(original_file, obj_http, obj_s3)