frostfs-testcases/pytest_tests/testsuites/services/http_gate/test_http_gate.py

284 lines
12 KiB
Python
Raw Normal View History

import allure
import pytest
from frostfs_testlib import reporter
from frostfs_testlib.steps.cli.object import put_object_to_random_node
from frostfs_testlib.steps.epoch import get_epoch
from frostfs_testlib.steps.http_gate import (
attr_into_header,
get_object_by_attr_and_verify_hashes,
get_via_http_curl,
get_via_http_gate,
get_via_zip_http_gate,
try_to_get_object_and_expect_error,
upload_via_http_gate,
upload_via_http_gate_curl,
verify_object_hash,
)
from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
from frostfs_testlib.storage.dataclasses.wallet import WalletInfo
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
from frostfs_testlib.utils.file_utils import TestFile, generate_file, get_file_hash
from ....helpers.container_request import REP_1_1_1_PUBLIC, REP_2_2_2_PUBLIC, requires_container
from ....helpers.utility import wait_for_gc_pass_on_storage_nodes
OBJECT_NOT_FOUND_ERROR = "not found"
@allure.link(
"https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#frostfs-http-gateway",
name="frostfs-http-gateway",
)
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading", name="uploading")
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading", name="downloading")
@pytest.mark.nightly
@pytest.mark.sanity
@pytest.mark.http_gate
class TestHttpGate(ClusterTestBase):
@allure.title("Put over gRPC, Get over HTTP (object_size={object_size})")
@requires_container(REP_1_1_1_PUBLIC)
def test_put_grpc_get_http(self, default_wallet: WalletInfo, container: str, test_file: TestFile):
"""
Test that object can be put using gRPC interface and get using HTTP.
Steps:
1. Create object.
2. Put object using gRPC (frostfs-cli).
3. Download object using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading).
4. Get object using gRPC (frostfs-cli).
5. Compare hashes for got object.
6. Compare hashes for got and original objects.
Expected result:
Hashes must be the same.
"""
with reporter.step("Put object using gRPC"):
object_id = put_object_to_random_node(default_wallet, test_file.path, container, self.shell, self.cluster)
with reporter.step("Get object and check hash"):
verify_object_hash(
object_id,
test_file.path,
default_wallet,
container,
self.shell,
self.cluster.storage_nodes,
self.cluster.cluster_nodes[0],
)
@allure.link(
"https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#frostfs-http-gateway",
name="frostfs-http-gateway",
)
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading", name="uploading")
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading", name="downloading")
@pytest.mark.http_gate
@pytest.mark.http_put
class TestHttpPut(ClusterTestBase):
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading", name="uploading")
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading", name="downloading")
@allure.title("Put over HTTP, Get over HTTP (object_size={object_size})")
@pytest.mark.smoke
@requires_container(REP_2_2_2_PUBLIC)
def test_put_http_get_http(self, container: str, default_wallet: WalletInfo, test_file: TestFile):
"""
Test that object can be put and get using HTTP interface.
Steps:
1. Create simple and large objects.
2. Upload objects using HTTP (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading).
3. Download objects using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading).
4. Compare hashes for got and original objects.
Expected result:
Hashes must be the same.
"""
with reporter.step("Put object using HTTP"):
object_id = upload_via_http_gate(container, test_file.path, self.cluster.default_http_gate_endpoint)
with reporter.step("Get object and check hash"):
verify_object_hash(
object_id,
test_file.path,
default_wallet,
container,
self.shell,
self.cluster.storage_nodes,
self.cluster.cluster_nodes[0],
)
@allure.link(
"https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#by-attributes",
name="download by attributes",
)
2023-11-13 17:24:46 +00:00
@allure.title("Put over HTTP, Get over HTTP with {id} header")
@pytest.mark.parametrize(
2023-11-13 17:24:46 +00:00
"attributes,id",
[
2023-11-13 17:24:46 +00:00
({"fileName": "simple_obj_filename"}, "simple"),
({"file-Name": "simple obj filename"}, "hyphen"),
({"cat%jpeg": "cat%jpeg"}, "percent"),
],
ids=["simple", "hyphen", "percent"],
)
@requires_container(REP_2_2_2_PUBLIC)
def test_put_http_get_http_with_headers(self, container: str, attributes: dict, simple_object_size: ObjectSize, id: str):
"""
Test that object can be downloaded using different attributes in HTTP header.
Steps:
1. Create simple and large objects.
2. Upload objects using HTTP with particular attributes in the header.
3. Download objects by attributes using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#by-attributes).
4. Compare hashes for got and original objects.
Expected result:
Hashes must be the same.
"""
file_path = generate_file(simple_object_size.value)
with reporter.step("Put objects using HTTP with attribute"):
headers = attr_into_header(attributes)
oid = upload_via_http_gate(container, file_path, self.cluster.default_http_gate_endpoint, headers)
get_object_by_attr_and_verify_hashes(
oid,
file_path,
container,
attributes,
self.cluster.cluster_nodes[0],
)
2023-09-08 10:35:34 +00:00
@allure.title("Expiration-Epoch in HTTP header (epoch_gap={epoch_gap})")
@pytest.mark.parametrize("epoch_gap", [0, 1])
@requires_container(REP_2_2_2_PUBLIC)
def test_expiration_epoch_in_http(self, container: str, simple_object_size: ObjectSize, epoch_gap: int):
http_endpoint = self.cluster.default_http_gate_endpoint
min_valid_epoch = get_epoch(self.shell, self.cluster) + epoch_gap
file_path = generate_file(simple_object_size.value)
oids_to_be_expired = []
oids_to_be_valid = []
for gap_until in (0, 1, 2, 100):
valid_until = min_valid_epoch + gap_until
headers = {"X-Attribute-System-Expiration-Epoch": str(valid_until)}
with reporter.step("Put objects using HTTP with attribute Expiration-Epoch"):
oid = upload_via_http_gate(
cid=container,
path=file_path,
headers=headers,
endpoint=http_endpoint,
)
if get_epoch(self.shell, self.cluster) + 1 <= valid_until:
oids_to_be_valid.append(oid)
else:
oids_to_be_expired.append(oid)
with reporter.step("This object can be got"):
get_via_http_gate(container, oid, self.cluster.cluster_nodes[0])
self.tick_epoch()
# Wait for GC, because object with expiration is counted as alive until GC removes it
wait_for_gc_pass_on_storage_nodes()
for oid in oids_to_be_expired:
with reporter.step(f"{oid} shall be expired and cannot be got"):
try_to_get_object_and_expect_error(
cid=container,
oid=oid,
node=self.cluster.cluster_nodes[0],
error_pattern=OBJECT_NOT_FOUND_ERROR,
)
for oid in oids_to_be_valid:
with reporter.step(f"{oid} shall be valid and can be got"):
get_via_http_gate(cid=container, oid=oid, node=self.cluster.cluster_nodes[0])
2023-09-08 10:35:34 +00:00
@allure.title("Zip in HTTP header")
@requires_container(REP_2_2_2_PUBLIC)
def test_zip_in_http(self, container: str, complex_object_size: ObjectSize, simple_object_size: ObjectSize):
file_path_simple = generate_file(simple_object_size.value)
file_path_large = generate_file(complex_object_size.value)
common_prefix = "my_files"
headers1 = {"X-Attribute-FilePath": f"{common_prefix}/file1"}
headers2 = {"X-Attribute-FilePath": f"{common_prefix}/file2"}
upload_via_http_gate(
cid=container,
path=file_path_simple,
headers=headers1,
endpoint=self.cluster.default_http_gate_endpoint,
)
upload_via_http_gate(container, file_path_large, headers2, self.cluster.default_http_gate_endpoint)
upload_via_http_gate(container, file_path_large, headers2, self.cluster.default_http_gate_endpoint)
dir_path = get_via_zip_http_gate(cid=container, prefix=common_prefix, node=self.cluster.cluster_nodes[0])
with reporter.step("Verify hashes"):
assert get_file_hash(f"{dir_path}/file1") == get_file_hash(file_path_simple)
assert get_file_hash(f"{dir_path}/file2") == get_file_hash(file_path_large)
2023-08-07 09:43:16 +00:00
@allure.title("Put over HTTP/Curl, Get over HTTP/Curl for large object")
@requires_container(REP_2_2_2_PUBLIC)
def test_put_http_get_http_large_file(self, default_wallet: WalletInfo, container: str, complex_object_size: ObjectSize):
"""
This test checks upload and download using curl with 'large' object.
Large is object with size up to 20Mb.
"""
file_path = generate_file(complex_object_size.value)
with reporter.step("Put objects using HTTP"):
oid_gate = upload_via_http_gate(cid=container, path=file_path, endpoint=self.cluster.default_http_gate_endpoint)
oid_curl = upload_via_http_gate_curl(
cid=container,
filepath=file_path,
endpoint=self.cluster.default_http_gate_endpoint,
)
verify_object_hash(
oid=oid_gate,
file_name=file_path,
wallet=default_wallet,
cid=container,
shell=self.shell,
nodes=self.cluster.storage_nodes,
request_node=self.cluster.cluster_nodes[0],
)
verify_object_hash(
oid=oid_curl,
file_name=file_path,
wallet=default_wallet,
cid=container,
shell=self.shell,
nodes=self.cluster.storage_nodes,
request_node=self.cluster.cluster_nodes[0],
object_getter=get_via_http_curl,
)
@allure.title("Put/Get over HTTP using Curl utility (object_size={object_size})")
@requires_container(REP_2_2_2_PUBLIC)
def test_put_http_get_http_curl(self, default_wallet: WalletInfo, container: str, test_file: TestFile):
"""
Test checks upload and download over HTTP using curl utility.
"""
with reporter.step("Put object using curl utility"):
object_id = upload_via_http_gate_curl(container, test_file.path, self.cluster.default_http_gate_endpoint)
with reporter.step("Get object and check hash"):
verify_object_hash(
object_id,
test_file.path,
default_wallet,
container,
self.shell,
self.cluster.storage_nodes,
self.cluster.cluster_nodes[0],
get_via_http_curl,
)