370 lines
14 KiB
Python
370 lines
14 KiB
Python
import allure
|
|
import pytest
|
|
from frostfs_testlib import reporter
|
|
from frostfs_testlib.resources.wellknown_acl import PUBLIC_ACL
|
|
from frostfs_testlib.steps.cli.container import create_container
|
|
from frostfs_testlib.steps.cli.object import put_object_to_random_node
|
|
from frostfs_testlib.steps.epoch import get_epoch
|
|
from frostfs_testlib.steps.http.http_gate import (
|
|
attr_into_header,
|
|
get_object_by_attr_and_verify_hashes,
|
|
get_via_http_curl,
|
|
get_via_http_gate,
|
|
get_via_zip_http_gate,
|
|
try_to_get_object_and_expect_error,
|
|
upload_via_http_gate,
|
|
upload_via_http_gate_curl,
|
|
verify_object_hash,
|
|
)
|
|
from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
|
|
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
|
|
from frostfs_testlib.utils.file_utils import generate_file, get_file_hash
|
|
|
|
from ....helpers.utility import wait_for_gc_pass_on_storage_nodes
|
|
|
|
OBJECT_NOT_FOUND_ERROR = "not found"
|
|
|
|
|
|
@allure.link(
|
|
"https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#frostfs-http-gateway",
|
|
name="frostfs-http-gateway",
|
|
)
|
|
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading", name="uploading")
|
|
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading", name="downloading")
|
|
@pytest.mark.nightly
|
|
@pytest.mark.sanity
|
|
@pytest.mark.http_gate
|
|
class TestHttpGate(ClusterTestBase):
|
|
PLACEMENT_RULE_1 = "REP 1 IN X CBF 1 SELECT 1 FROM * AS X"
|
|
PLACEMENT_RULE_2 = "REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
|
|
|
|
@pytest.fixture(scope="class", autouse=True)
|
|
@allure.title("[Class/Autouse]: Prepare wallet and deposit")
|
|
def prepare_wallet(self, default_wallet):
|
|
TestHttpGate.wallet = default_wallet
|
|
|
|
@allure.title("Put over gRPC, Get over HTTP")
|
|
def test_put_grpc_get_http(self, complex_object_size: ObjectSize, simple_object_size: ObjectSize):
|
|
"""
|
|
Test that object can be put using gRPC interface and get using HTTP.
|
|
|
|
Steps:
|
|
1. Create simple and large objects.
|
|
2. Put objects using gRPC (frostfs-cli).
|
|
3. Download objects using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading).
|
|
4. Get objects using gRPC (frostfs-cli).
|
|
5. Compare hashes for got objects.
|
|
6. Compare hashes for got and original objects.
|
|
|
|
Expected result:
|
|
Hashes must be the same.
|
|
"""
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=self.cluster.default_rpc_endpoint,
|
|
rule=self.PLACEMENT_RULE_1,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
file_path_simple = generate_file(simple_object_size.value)
|
|
file_path_large = generate_file(complex_object_size.value)
|
|
|
|
with reporter.step("Put objects using gRPC"):
|
|
oid_simple = put_object_to_random_node(
|
|
wallet=self.wallet,
|
|
path=file_path_simple,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
cluster=self.cluster,
|
|
)
|
|
oid_large = put_object_to_random_node(
|
|
wallet=self.wallet,
|
|
path=file_path_large,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
cluster=self.cluster,
|
|
)
|
|
|
|
for oid, file_path in ((oid_simple, file_path_simple), (oid_large, file_path_large)):
|
|
verify_object_hash(
|
|
oid=oid,
|
|
file_name=file_path,
|
|
wallet=self.wallet,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
nodes=self.cluster.storage_nodes,
|
|
request_node=self.cluster.cluster_nodes[0],
|
|
)
|
|
|
|
|
|
@allure.link(
|
|
"https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#frostfs-http-gateway",
|
|
name="frostfs-http-gateway",
|
|
)
|
|
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading", name="uploading")
|
|
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading", name="downloading")
|
|
@pytest.mark.http_gate
|
|
@pytest.mark.http_put
|
|
class TestHttpPut(ClusterTestBase):
|
|
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading", name="uploading")
|
|
@allure.link("https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading", name="downloading")
|
|
@allure.title("Put over HTTP, Get over HTTP")
|
|
@pytest.mark.smoke
|
|
def test_put_http_get_http(self, complex_object_size: ObjectSize, simple_object_size: ObjectSize):
|
|
"""
|
|
Test that object can be put and get using HTTP interface.
|
|
|
|
Steps:
|
|
1. Create simple and large objects.
|
|
2. Upload objects using HTTP (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#uploading).
|
|
3. Download objects using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#downloading).
|
|
4. Compare hashes for got and original objects.
|
|
|
|
Expected result:
|
|
Hashes must be the same.
|
|
"""
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=self.cluster.default_rpc_endpoint,
|
|
rule=self.PLACEMENT_RULE_2,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
file_path_simple = generate_file(simple_object_size.value)
|
|
file_path_large = generate_file(complex_object_size.value)
|
|
|
|
with reporter.step("Put objects using HTTP"):
|
|
oid_simple = upload_via_http_gate(cid=cid, path=file_path_simple, endpoint=self.cluster.default_http_gate_endpoint)
|
|
oid_large = upload_via_http_gate(cid=cid, path=file_path_large, endpoint=self.cluster.default_http_gate_endpoint)
|
|
|
|
for oid, file_path in ((oid_simple, file_path_simple), (oid_large, file_path_large)):
|
|
verify_object_hash(
|
|
oid=oid,
|
|
file_name=file_path,
|
|
wallet=self.wallet,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
nodes=self.cluster.storage_nodes,
|
|
request_node=self.cluster.cluster_nodes[0],
|
|
)
|
|
|
|
@allure.link(
|
|
"https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#by-attributes",
|
|
name="download by attributes",
|
|
)
|
|
@allure.title("Put over HTTP, Get over HTTP with {id} header")
|
|
@pytest.mark.parametrize(
|
|
"attributes,id",
|
|
[
|
|
({"fileName": "simple_obj_filename"}, "simple"),
|
|
({"file-Name": "simple obj filename"}, "hyphen"),
|
|
({"cat%jpeg": "cat%jpeg"}, "percent"),
|
|
],
|
|
ids=["simple", "hyphen", "percent"],
|
|
)
|
|
def test_put_http_get_http_with_headers(self, attributes: dict, simple_object_size: ObjectSize, id: str):
|
|
"""
|
|
Test that object can be downloaded using different attributes in HTTP header.
|
|
|
|
Steps:
|
|
1. Create simple and large objects.
|
|
2. Upload objects using HTTP with particular attributes in the header.
|
|
3. Download objects by attributes using HTTP gate (https://git.frostfs.info/TrueCloudLab/frostfs-http-gw#by-attributes).
|
|
4. Compare hashes for got and original objects.
|
|
|
|
Expected result:
|
|
Hashes must be the same.
|
|
"""
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=self.cluster.default_rpc_endpoint,
|
|
rule=self.PLACEMENT_RULE_2,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
file_path = generate_file(simple_object_size.value)
|
|
|
|
with reporter.step("Put objects using HTTP with attribute"):
|
|
headers = attr_into_header(attributes)
|
|
oid = upload_via_http_gate(
|
|
cid=cid,
|
|
path=file_path,
|
|
headers=headers,
|
|
endpoint=self.cluster.default_http_gate_endpoint,
|
|
)
|
|
|
|
get_object_by_attr_and_verify_hashes(
|
|
oid=oid,
|
|
file_name=file_path,
|
|
cid=cid,
|
|
attrs=attributes,
|
|
node=self.cluster.cluster_nodes[0],
|
|
)
|
|
|
|
@allure.title("Expiration-Epoch in HTTP header (epoch_gap={epoch_gap})")
|
|
@pytest.mark.parametrize("epoch_gap", [0, 1])
|
|
def test_expiration_epoch_in_http(self, simple_object_size: ObjectSize, epoch_gap: int):
|
|
endpoint = self.cluster.default_rpc_endpoint
|
|
http_endpoint = self.cluster.default_http_gate_endpoint
|
|
min_valid_epoch = get_epoch(self.shell, self.cluster) + epoch_gap
|
|
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=endpoint,
|
|
rule=self.PLACEMENT_RULE_2,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
file_path = generate_file(simple_object_size.value)
|
|
oids_to_be_expired = []
|
|
oids_to_be_valid = []
|
|
|
|
for gap_until in (0, 1, 2, 100):
|
|
valid_until = min_valid_epoch + gap_until
|
|
headers = {"X-Attribute-System-Expiration-Epoch": str(valid_until)}
|
|
|
|
with reporter.step("Put objects using HTTP with attribute Expiration-Epoch"):
|
|
oid = upload_via_http_gate(
|
|
cid=cid,
|
|
path=file_path,
|
|
headers=headers,
|
|
endpoint=http_endpoint,
|
|
)
|
|
if get_epoch(self.shell, self.cluster) + 1 <= valid_until:
|
|
oids_to_be_valid.append(oid)
|
|
else:
|
|
oids_to_be_expired.append(oid)
|
|
with reporter.step("This object can be got"):
|
|
get_via_http_gate(cid=cid, oid=oid, node=self.cluster.cluster_nodes[0])
|
|
|
|
self.tick_epoch()
|
|
|
|
# Wait for GC, because object with expiration is counted as alive until GC removes it
|
|
wait_for_gc_pass_on_storage_nodes()
|
|
|
|
for oid in oids_to_be_expired:
|
|
with reporter.step(f"{oid} shall be expired and cannot be got"):
|
|
try_to_get_object_and_expect_error(
|
|
cid=cid,
|
|
oid=oid,
|
|
node=self.cluster.cluster_nodes[0],
|
|
error_pattern=OBJECT_NOT_FOUND_ERROR,
|
|
)
|
|
for oid in oids_to_be_valid:
|
|
with reporter.step(f"{oid} shall be valid and can be got"):
|
|
get_via_http_gate(cid=cid, oid=oid, node=self.cluster.cluster_nodes[0])
|
|
|
|
@allure.title("Zip in HTTP header")
|
|
def test_zip_in_http(self, complex_object_size: ObjectSize, simple_object_size: ObjectSize):
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=self.cluster.default_rpc_endpoint,
|
|
rule=self.PLACEMENT_RULE_2,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
file_path_simple = generate_file(simple_object_size.value)
|
|
file_path_large = generate_file(complex_object_size.value)
|
|
common_prefix = "my_files"
|
|
|
|
headers1 = {"X-Attribute-FilePath": f"{common_prefix}/file1"}
|
|
headers2 = {"X-Attribute-FilePath": f"{common_prefix}/file2"}
|
|
|
|
upload_via_http_gate(
|
|
cid=cid,
|
|
path=file_path_simple,
|
|
headers=headers1,
|
|
endpoint=self.cluster.default_http_gate_endpoint,
|
|
)
|
|
upload_via_http_gate(
|
|
cid=cid,
|
|
path=file_path_large,
|
|
headers=headers2,
|
|
endpoint=self.cluster.default_http_gate_endpoint,
|
|
)
|
|
|
|
dir_path = get_via_zip_http_gate(cid=cid, prefix=common_prefix, node=self.cluster.cluster_nodes[0])
|
|
|
|
with reporter.step("Verify hashes"):
|
|
assert get_file_hash(f"{dir_path}/file1") == get_file_hash(file_path_simple)
|
|
assert get_file_hash(f"{dir_path}/file2") == get_file_hash(file_path_large)
|
|
|
|
@pytest.mark.long
|
|
@allure.title("Put over HTTP/Curl, Get over HTTP/Curl for large object")
|
|
def test_put_http_get_http_large_file(self, complex_object_size: ObjectSize):
|
|
"""
|
|
This test checks upload and download using curl with 'large' object.
|
|
Large is object with size up to 20Mb.
|
|
"""
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=self.cluster.default_rpc_endpoint,
|
|
rule=self.PLACEMENT_RULE_2,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
|
|
file_path = generate_file(complex_object_size.value)
|
|
|
|
with reporter.step("Put objects using HTTP"):
|
|
oid_gate = upload_via_http_gate(cid=cid, path=file_path, endpoint=self.cluster.default_http_gate_endpoint)
|
|
oid_curl = upload_via_http_gate_curl(
|
|
cid=cid,
|
|
filepath=file_path,
|
|
endpoint=self.cluster.default_http_gate_endpoint,
|
|
)
|
|
|
|
verify_object_hash(
|
|
oid=oid_gate,
|
|
file_name=file_path,
|
|
wallet=self.wallet,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
nodes=self.cluster.storage_nodes,
|
|
request_node=self.cluster.cluster_nodes[0],
|
|
)
|
|
verify_object_hash(
|
|
oid=oid_curl,
|
|
file_name=file_path,
|
|
wallet=self.wallet,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
nodes=self.cluster.storage_nodes,
|
|
request_node=self.cluster.cluster_nodes[0],
|
|
object_getter=get_via_http_curl,
|
|
)
|
|
|
|
@allure.title("Put/Get over HTTP using Curl utility")
|
|
def test_put_http_get_http_curl(self, complex_object_size: ObjectSize, simple_object_size: ObjectSize):
|
|
"""
|
|
Test checks upload and download over HTTP using curl utility.
|
|
"""
|
|
cid = create_container(
|
|
self.wallet,
|
|
shell=self.shell,
|
|
endpoint=self.cluster.default_rpc_endpoint,
|
|
rule=self.PLACEMENT_RULE_2,
|
|
basic_acl=PUBLIC_ACL,
|
|
)
|
|
file_path_simple = generate_file(simple_object_size.value)
|
|
file_path_large = generate_file(complex_object_size.value)
|
|
|
|
with reporter.step("Put objects using curl utility"):
|
|
oid_simple = upload_via_http_gate_curl(cid=cid, filepath=file_path_simple, endpoint=self.cluster.default_http_gate_endpoint)
|
|
oid_large = upload_via_http_gate_curl(
|
|
cid=cid,
|
|
filepath=file_path_large,
|
|
endpoint=self.cluster.default_http_gate_endpoint,
|
|
)
|
|
|
|
for oid, file_path in ((oid_simple, file_path_simple), (oid_large, file_path_large)):
|
|
verify_object_hash(
|
|
oid=oid,
|
|
file_name=file_path,
|
|
wallet=self.wallet,
|
|
cid=cid,
|
|
shell=self.shell,
|
|
nodes=self.cluster.storage_nodes,
|
|
request_node=self.cluster.cluster_nodes[0],
|
|
object_getter=get_via_http_curl,
|
|
)
|