import logging import allure import pytest from frostfs_testlib import reporter from frostfs_testlib.s3 import AwsCliClient, S3ClientWrapper from frostfs_testlib.steps.cli.object import put_object_to_random_node from frostfs_testlib.steps.http.http_gate import ( assert_hashes_are_equal, get_object_by_attr_and_verify_hashes, get_via_http_gate, try_to_get_object_via_passed_request_and_expect_error, verify_object_hash, ) from frostfs_testlib.steps.s3 import s3_helper from frostfs_testlib.storage.dataclasses.wallet import WalletInfo from frostfs_testlib.testing.cluster_test_base import ClusterTestBase from frostfs_testlib.utils.file_utils import TestFile from ....helpers.container_request import REP_2_1_4_PUBLIC, requires_container logger = logging.getLogger("NeoLogger") @pytest.mark.nightly @pytest.mark.sanity @pytest.mark.http_gate class Test_http_object(ClusterTestBase): @allure.title("Put over gRPC, Get over HTTP with attributes (obj_size={object_size})") @requires_container(REP_2_1_4_PUBLIC) def test_object_put_get_attributes(self, default_wallet: WalletInfo, container: str, test_file: TestFile): """ Test that object can be put using gRPC interface and got using HTTP. Steps: 1. Create an object; 2. Put object(s) using gRPC (frostfs-cli) with attributes [--attributes chapter1=peace,chapter2=war]; 3. Download the object using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading); 4. Compare hashes of the original and the downloaded object; 5. [Negative] Try to the get the object with the specified attributes and `get` request: [get/$CID/chapter1/peace]; 6. Download the object with the specified attributes and `get_by_attribute` request: [get_by_attribute/$CID/chapter1/peace]; 7. Compare hashes of the original and the downloaded object; 8. [Negative] Try to the get the object via `get_by_attribute` request: [get_by_attribute/$CID/$OID]; Expected result: Hashes must be the same. """ # List of Key=Value attributes obj_key1 = "chapter1" obj_value1 = "peace" obj_key2 = "chapter2" obj_value2 = "war" # Prepare for grpc PUT request key_value1 = obj_key1 + "=" + obj_value1 key_value2 = obj_key2 + "=" + obj_value2 with reporter.step("Put objects using gRPC [--attributes chapter1=peace,chapter2=war]"): oid = put_object_to_random_node( wallet=default_wallet, path=test_file.path, cid=container, shell=self.shell, cluster=self.cluster, attributes=f"{key_value1},{key_value2}", ) with reporter.step("Get object and verify hashes [ get/$CID/$OID ]"): verify_object_hash( oid=oid, file_name=test_file.path, wallet=default_wallet, cid=container, shell=self.shell, nodes=self.cluster.storage_nodes, request_node=self.cluster.cluster_nodes[0], ) with reporter.step("[Negative] try to get object: [get/$CID/chapter1/peace]"): attrs = {obj_key1: obj_value1, obj_key2: obj_value2} request = f"/get/{container}/{obj_key1}/{obj_value1}" expected_err_msg = "Failed to get object via HTTP gate:" try_to_get_object_via_passed_request_and_expect_error( cid=container, oid=oid, node=self.cluster.cluster_nodes[0], error_pattern=expected_err_msg, http_request_path=request, attrs=attrs, ) with reporter.step("Download the object with attribute [get_by_attribute/$CID/chapter1/peace]"): get_object_by_attr_and_verify_hashes( oid=oid, file_name=test_file.path, cid=container, attrs=attrs, node=self.cluster.cluster_nodes[0], ) with reporter.step("[Negative] try to get object: get_by_attribute/$CID/$OID"): request = f"/get_by_attribute/{container}/{oid}" try_to_get_object_via_passed_request_and_expect_error( cid=container, oid=oid, node=self.cluster.cluster_nodes[0], error_pattern=expected_err_msg, http_request_path=request, ) @allure.title("Put over s3, Get over HTTP with bucket name and key (object_size={object_size})") @pytest.mark.parametrize("s3_client", [AwsCliClient], indirect=True) def test_object_put_get_bucketname_key(self, test_file: TestFile, s3_client: S3ClientWrapper): """ Test that object can be put using s3-gateway interface and got via HTTP with bucket name and object key. Steps: 1. Create an object; 2. Create a bucket via s3; 3. Put the object via s3; 4. Download the object using HTTP gate with the bucket name and the object key; 5. Compare hashes of the original and the downloaded objects; Expected result: Hashes must be the same. """ object_key = s3_helper.object_key_from_file_path(test_file.path) bucket = s3_client.create_bucket(acl="public-read-write") s3_client.put_object(bucket=bucket, filepath=test_file.path, key=object_key) obj_s3 = s3_client.get_object(bucket=bucket, key=object_key) request = f"/get/{bucket}/{object_key}" obj_http = get_via_http_gate( cid=None, oid=None, node=self.cluster.cluster_nodes[0], request_path=request, ) with reporter.step("Verify hashes"): assert_hashes_are_equal(test_file.path, obj_http, obj_s3)