Update test titles to conform standard

master
Andrey Berezin 2023-09-08 13:35:34 +03:00 committed by Andrey Berezin
parent 4d2e27a317
commit 1cd077fdf3
31 changed files with 274 additions and 518 deletions

View File

@ -16,7 +16,6 @@ from frostfs_testlib.storage.dataclasses.frostfs_services import InnerRing, Stor
from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
from frostfs_testlib.utils import wallet_utils
from frostfs_testlib.utils.file_utils import generate_file
from pytest_lazyfixture import lazy_fixture
OBJECT_COUNT = 5
@ -70,15 +69,9 @@ def wallets(default_wallet: str, temp_directory: str, cluster: Cluster) -> Walle
)
@pytest.fixture(params=[lazy_fixture("simple_object_size"), lazy_fixture("complex_object_size")])
def file_size(request: pytest.FixtureRequest) -> int:
size = request.param
return size
@pytest.fixture()
def file_path(file_size: ObjectSize) -> str:
yield generate_file(file_size.value)
def file_path(object_size: ObjectSize) -> str:
yield generate_file(object_size.value)
@pytest.fixture(scope="function")

View File

@ -68,7 +68,7 @@ class TestACLBasic(ClusterTestBase):
# with allure.step('Delete public readonly container'):
# delete_container(user_wallet.wallet_path, cid_read_only)
@allure.title("Operations with basic ACL on public container (obj_size={file_size})")
@allure.title("Operations with basic ACL on public container (obj_size={object_size})")
def test_basic_acl_public(self, wallets: Wallets, public_container: str, file_path: str):
"""
Test basic ACL set during public container creation.
@ -114,7 +114,7 @@ class TestACLBasic(ClusterTestBase):
cluster=self.cluster,
)
@allure.title("Operations with basic ACL on PRIVATE container (obj_size={file_size})")
@allure.title("Operations with basic ACL on PRIVATE container (obj_size={object_size})")
def test_basic_acl_private(self, wallets: Wallets, private_container: str, file_path: str):
"""
Test basic ACL set during private container creation.
@ -148,7 +148,7 @@ class TestACLBasic(ClusterTestBase):
cluster=self.cluster,
)
@allure.title("Operations with basic ACL on READONLY container (obj_size={file_size})")
@allure.title("Operations with basic ACL on READONLY container (obj_size={object_size})")
def test_basic_acl_readonly(
self, wallets: Wallets, client_shell: Shell, read_only_container: str, file_path: str
):

View File

@ -21,7 +21,7 @@ from pytest_tests.testsuites.acl.conftest import Wallets
@pytest.mark.acl
@pytest.mark.acl_bearer
class TestACLBearer(ClusterTestBase):
@allure.title("Operations with BearerToken (role={role.value}, obj_size={file_size})")
@allure.title("Operations with BearerToken (role={role.value}, obj_size={object_size})")
@pytest.mark.parametrize("role", [EACLRole.USER, EACLRole.OTHERS])
def test_bearer_token_operations(
self,
@ -113,7 +113,7 @@ class TestACLBearer(ClusterTestBase):
cluster=self.cluster,
)
@allure.title("BearerToken for compound operations (obj_size={file_size})")
@allure.title("BearerToken for compound operations (obj_size={object_size})")
def test_bearer_token_compound_operations(self, wallets, eacl_container_with_objects):
endpoint = self.cluster.default_rpc_endpoint
cid, objects_oids, file_path = eacl_container_with_objects

View File

@ -58,7 +58,7 @@ class TestEACLContainer(ClusterTestBase):
yield cid, oid, file_path
@allure.title("Deny operations (role={deny_role.value}, obj_size={file_size})")
@allure.title("Deny operations (role={deny_role.value}, obj_size={object_size})")
@pytest.mark.parametrize("deny_role", [EACLRole.USER, EACLRole.OTHERS])
def test_extended_acl_deny_all_operations(
self,
@ -145,7 +145,7 @@ class TestEACLContainer(ClusterTestBase):
cluster=self.cluster,
)
@allure.title("Operations for only one other pubkey (obj_size={file_size})")
@allure.title("Operations for only one other pubkey (obj_size={object_size})")
def test_extended_acl_deny_all_operations_exclude_pubkey(
self, wallets: Wallets, eacl_container_with_objects: tuple[str, list[str], str]
):
@ -206,7 +206,7 @@ class TestEACLContainer(ClusterTestBase):
cluster=self.cluster,
)
@allure.title("Replication with eACL deny rules (obj_size={file_size})")
@allure.title("Replication with eACL deny rules (obj_size={object_size})")
def test_extended_acl_deny_replication(
self,
wallets: Wallets,
@ -248,7 +248,7 @@ class TestEACLContainer(ClusterTestBase):
storage_nodes,
)
@allure.title("Operations with extended ACL for SYSTEM (obj_size={file_size})")
@allure.title("Operations with extended ACL for SYSTEM (obj_size={object_size})")
def test_extended_actions_system(
self, wallets: Wallets, eacl_container_with_objects: tuple[str, list[str], str]
):

View File

@ -128,7 +128,9 @@ class TestEACLFilters(ClusterTestBase):
endpoint=self.cluster.default_rpc_endpoint,
)
@allure.title("Operations with request filter (match_type={match_type}, obj_size={file_size})")
@allure.title(
"Operations with request filter (match_type={match_type}, obj_size={object_size})"
)
@pytest.mark.parametrize(
"match_type", [EACLMatchType.STRING_EQUAL, EACLMatchType.STRING_NOT_EQUAL]
)
@ -246,7 +248,7 @@ class TestEACLFilters(ClusterTestBase):
)
@allure.title(
"Operations with deny user headers filter (match_type={match_type}, obj_size={file_size})"
"Operations with deny user headers filter (match_type={match_type}, obj_size={object_size})"
)
@pytest.mark.parametrize(
"match_type", [EACLMatchType.STRING_EQUAL, EACLMatchType.STRING_NOT_EQUAL]
@ -431,7 +433,7 @@ class TestEACLFilters(ClusterTestBase):
)
@allure.title(
"Operations with allow eACL user headers filters (match_type={match_type}, obj_size={file_size})"
"Operations with allow eACL user headers filters (match_type={match_type}, obj_size={object_size})"
)
@pytest.mark.parametrize(
"match_type", [EACLMatchType.STRING_EQUAL, EACLMatchType.STRING_NOT_EQUAL]

View File

@ -115,17 +115,29 @@ def max_object_size(cluster: Cluster, client_shell: Shell) -> int:
@pytest.fixture(scope="session")
def simple_object_size(max_object_size: int) -> int:
def simple_object_size(max_object_size: int) -> ObjectSize:
size = min(int(SIMPLE_OBJECT_SIZE), max_object_size)
return ObjectSize("simple", size)
@pytest.fixture(scope="session")
def complex_object_size(max_object_size: int) -> int:
def complex_object_size(max_object_size: int) -> ObjectSize:
size = max_object_size * int(COMPLEX_OBJECT_CHUNKS_COUNT) + int(COMPLEX_OBJECT_TAIL_SIZE)
return ObjectSize("complex", size)
# By default we want all tests to be executed with both object sizes
# This can be overriden in choosen tests if needed
@pytest.fixture(scope="session", params=["simple", "complex"])
def object_size(
simple_object_size: ObjectSize, complex_object_size: ObjectSize, request: pytest.FixtureRequest
) -> ObjectSize:
if request.param == "simple":
return simple_object_size
return complex_object_size
@pytest.fixture(scope="session")
def wallet_factory(temp_directory: str, client_shell: Shell, cluster: Cluster) -> WalletFactory:
return WalletFactory(temp_directory, client_shell, cluster)

View File

@ -22,8 +22,8 @@ class TestContainer(ClusterTestBase):
@pytest.mark.parametrize("name", ["", "test-container"], ids=["No name", "Set particular name"])
@pytest.mark.smoke
def test_container_creation(self, default_wallet: str, name: str):
scenario_title = f"with name {name}" if name else "without name"
allure.dynamic.title(f"User can create container {scenario_title}")
scenario_title = "with name" if name else "without name"
allure.dynamic.title(f"Create container {scenario_title}")
wallet = default_wallet
with open(wallet) as file:

View File

@ -97,7 +97,7 @@ def return_stopped_hosts(shell: Shell, cluster: Cluster) -> None:
@pytest.mark.failover
class TestFailoverStorage(ClusterTestBase):
@allure.title("Lose and return storage node's host ({stop_mode} stop)")
@allure.title("Shutdown and start node (stop_mode={stop_mode})")
@pytest.mark.parametrize("stop_mode", ["hard", "soft"])
@pytest.mark.failover_reboot
def test_lose_storage_node_host(
@ -157,7 +157,7 @@ class TestFailoverStorage(ClusterTestBase):
)
assert get_file_hash(source_file_path) == get_file_hash(got_file_path)
@allure.title("Panic storage node's host (sequenced_reboots={sequence})")
@allure.title("Panic reboot nodes (sequenced_reboots={sequence})")
@pytest.mark.parametrize("sequence", [True, False])
@pytest.mark.failover_panic
def test_panic_storage_node_host(
@ -229,7 +229,7 @@ class TestFailoverStorage(ClusterTestBase):
)
assert get_file_hash(source_file_path) == get_file_hash(got_file_path)
@allure.title("{s3_client}: Do not ignore unhealthy tree endpoints")
@allure.title("Do not ignore unhealthy tree endpoints (s3_client={s3_client})")
def test_unhealthy_tree(
self,
s3_client: S3ClientWrapper,
@ -294,7 +294,7 @@ class TestEmptyMap(ClusterTestBase):
@test_case.suite_name("failovers")
@test_case.suite_section("test_failover_storage")
@pytest.mark.failover_empty_map_offlne
@allure.title("{s3_client}: empty network map (offline all storage nodes)")
@allure.title("Empty network map via offline all storage nodes (s3_client={s3_client})")
def test_offline_all_storage_nodes(
self,
s3_client: S3ClientWrapper,
@ -364,7 +364,7 @@ class TestEmptyMap(ClusterTestBase):
@test_case.suite_name("failovers")
@test_case.suite_section("test_failover_storage")
@pytest.mark.failover_empty_map_stop_service
@allure.title("{s3_client}: empty network map (stop storage service on all nodes)")
@allure.title("Empty network map via stop all storage services (s3_client={s3_client})")
def test_stop_all_storage_nodes(
self,
s3_client: S3ClientWrapper,
@ -439,7 +439,7 @@ class TestEmptyMap(ClusterTestBase):
check_node_in_map(node, shell=self.shell, alive_node=node)
stopped_nodes.remove(node)
@allure.title("{s3_client}: Object loss from fstree/blobovnicza, versioning is enabled")
@allure.title("Object loss from fstree/blobovnicza (versioning=enabled, s3_client={s3_client})")
def test_s3_fstree_blobovnicza_loss_versioning_on(
self,
s3_client: S3ClientWrapper,
@ -484,7 +484,9 @@ class TestEmptyMap(ClusterTestBase):
with allure.step("Delete bucket"):
s3_client.delete_bucket(bucket)
@allure.title("{s3_client}: Object loss from fstree/blobovnicza, versioning is disabled")
@allure.title(
"Object loss from fstree/blobovnicza (versioning=disabled, s3_client={s3_client})"
)
def test_s3_fstree_blobovnicza_loss_versioning_off(
self,
s3_client: S3ClientWrapper,
@ -526,7 +528,7 @@ class TestEmptyMap(ClusterTestBase):
[VersioningStatus.ENABLED, VersioningStatus.UNDEFINED],
)
@allure.title(
"{s3_client}: After Pilorama.db loss on all nodes list objects should return nothing in second listing (versioning_status {versioning_status})"
"After Pilorama.db loss on all nodes list objects should return nothing in second listing (versioning_status={versioning_status}, s3_client={s3_client})"
)
def test_s3_pilorama_loss(
self,
@ -584,7 +586,7 @@ class TestStorageDataLoss(ClusterTestBase):
return piloramas
@allure.title(
"{s3_client}: After metabase loss on all nodes operations on objects and buckets should be still available via S3"
"After metabase loss on all nodes operations on objects and buckets should be still available via S3 (s3_client={s3_client})"
)
@pytest.mark.metabase_loss
def test_metabase_loss(
@ -737,7 +739,7 @@ class TestStorageDataLoss(ClusterTestBase):
assert not exception_messages, "\n".join(exception_messages)
@allure.title(
"{s3_client}: Loss of one node should trigger use of tree and storage service in another node"
"Loss of one node should trigger use of tree and storage service in another node (s3_client={s3_client})"
)
def test_s3_one_endpoint_loss(
self,
@ -763,7 +765,9 @@ class TestStorageDataLoss(ClusterTestBase):
put_object = s3_client.put_object(bucket, file_path)
s3_helper.check_objects_in_bucket(s3_client, bucket, expected_objects=[file_name])
@allure.title("{s3_client}: After Pilorama.db loss on one node object are retrievable")
@allure.title(
"After Pilorama.db loss on one node object is retrievable (s3_client={s3_client})"
)
def test_s3_one_pilorama_loss(
self,
s3_client: S3ClientWrapper,

View File

@ -225,7 +225,7 @@ class TestNodeManagement(ClusterTestBase):
],
)
@pytest.mark.node_mgmt
@allure.title("Object should have {expected_copies} copies with policy {placement_rule}")
@allure.title("Placement policy (copies={expected_copies}, policy={placement_rule})")
def test_placement_policy(
self, default_wallet, placement_rule, expected_copies, simple_object_size: ObjectSize
):
@ -286,9 +286,7 @@ class TestNodeManagement(ClusterTestBase):
],
)
@pytest.mark.node_mgmt
@allure.title(
"Object should have copies on nodes {expected_nodes_id} with policy {placement_rule}"
)
@allure.title("Placement policy (nodes_id={expected_nodes_id}, policy={placement_rule})")
def test_placement_policy_with_nodes(
self,
default_wallet,
@ -318,7 +316,7 @@ class TestNodeManagement(ClusterTestBase):
],
)
@pytest.mark.node_mgmt
@allure.title("[NEGATIVE] Placement policy: {placement_rule}")
@allure.title("[NEGATIVE] Placement policy (policy={placement_rule})")
def test_placement_policy_negative(
self, default_wallet, placement_rule, expected_copies, simple_object_size: ObjectSize
):
@ -331,7 +329,7 @@ class TestNodeManagement(ClusterTestBase):
self.validate_object_copies(wallet, placement_rule, file_path, expected_copies)
@pytest.mark.node_mgmt
@allure.title("FrostFS object could be dropped using control command")
@allure.title("Drop object using control command")
def test_drop_object(
self, default_wallet, complex_object_size: ObjectSize, simple_object_size: ObjectSize
):

View File

@ -29,7 +29,6 @@ from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
from frostfs_testlib.storage.dataclasses.storage_object_info import StorageObjectInfo
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
from frostfs_testlib.utils.file_utils import generate_file, get_file_content, get_file_hash
from pytest import FixtureRequest
logger = logging.getLogger("NeoLogger")
@ -90,18 +89,15 @@ def generate_ranges(
@pytest.fixture(
params=[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
# Scope session to upload/delete each files set only once
scope="module",
scope="module"
)
def storage_objects(
default_wallet: str, client_shell: Shell, cluster: Cluster, request: FixtureRequest
default_wallet: str, client_shell: Shell, cluster: Cluster, object_size: ObjectSize
) -> list[StorageObjectInfo]:
wallet = default_wallet
# Separate containers for complex/simple objects to avoid side-effects
cid = create_container(wallet, shell=client_shell, endpoint=cluster.default_rpc_endpoint)
object_size: ObjectSize = request.param
file_path = generate_file(object_size.value)
file_hash = get_file_hash(file_path)
@ -138,19 +134,15 @@ def storage_objects(
@pytest.mark.sanity
@pytest.mark.grpc_api
class TestObjectApi(ClusterTestBase):
@allure.title("Validate object storage policy by native API for {storage_objects}")
@allure.title("Storage policy by native API (obj_size={object_size})")
def test_object_storage_policies(
self,
request: FixtureRequest,
storage_objects: list[StorageObjectInfo],
simple_object_size: ObjectSize,
):
"""
Validate object storage policy
"""
allure.dynamic.title(
f"Validate object storage policy by native API for {request.node.callspec.id}"
)
with allure.step("Validate storage policy for objects"):
for storage_object in storage_objects:
@ -172,14 +164,11 @@ class TestObjectApi(ClusterTestBase):
)
assert copies == 2, "Expected 2 copies"
@allure.title("Validate get object native API for {storage_objects}")
def test_get_object_api(
self, request: FixtureRequest, storage_objects: list[StorageObjectInfo]
):
@allure.title("Get object by native API (obj_size={object_size})")
def test_get_object_api(self, storage_objects: list[StorageObjectInfo]):
"""
Validate get object native API
"""
allure.dynamic.title(f"Validate get object native API for {request.node.callspec.id}")
with allure.step("Get objects and compare hashes"):
for storage_object in storage_objects:
@ -193,14 +182,11 @@ class TestObjectApi(ClusterTestBase):
file_hash = get_file_hash(file_path)
assert storage_object.file_hash == file_hash
@allure.title("Validate head object native API for {storage_objects}")
def test_head_object_api(
self, request: FixtureRequest, storage_objects: list[StorageObjectInfo]
):
@allure.title("Head object by native API (obj_size={object_size})")
def test_head_object_api(self, storage_objects: list[StorageObjectInfo]):
"""
Validate head object native API
"""
allure.dynamic.title(f"Validate head object by native API for {request.node.callspec.id}")
storage_object_1 = storage_objects[0]
storage_object_2 = storage_objects[1]
@ -222,14 +208,11 @@ class TestObjectApi(ClusterTestBase):
)
self.check_header_is_presented(head_info, storage_object_2.attributes)
@allure.title("Validate object search by native API for {storage_objects}")
def test_search_object_api(
self, request: FixtureRequest, storage_objects: list[StorageObjectInfo]
):
@allure.title("Search objects by native API (obj_size={object_size})")
def test_search_object_api(self, storage_objects: list[StorageObjectInfo]):
"""
Validate object search by native API
"""
allure.dynamic.title(f"Validate object search by native API for {request.node.callspec.id}")
oids = [storage_object.oid for storage_object in storage_objects]
wallet = storage_objects[0].wallet_file_path
@ -266,12 +249,7 @@ class TestObjectApi(ClusterTestBase):
)
assert sorted(expected_oids) == sorted(result)
@allure.title("Validate object search with removed items for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Search objects with removed items (obj_size={object_size})")
def test_object_search_should_return_tombstone_items(
self, default_wallet: str, object_size: ObjectSize
):
@ -336,18 +314,13 @@ class TestObjectApi(ClusterTestBase):
object_type == "TOMBSTONE"
), f"Object wasn't deleted properly. Found object {tombstone_oid} with type {object_type}"
@allure.title("Validate native get_range_hash object API for {storage_objects}")
@allure.title("Get range hash by native API (obj_size={object_size})")
@pytest.mark.sanity
@pytest.mark.grpc_api
def test_object_get_range_hash(
self, request: FixtureRequest, storage_objects: list[StorageObjectInfo], max_object_size
):
def test_object_get_range_hash(self, storage_objects: list[StorageObjectInfo], max_object_size):
"""
Validate get_range_hash for object by native gRPC API
"""
allure.dynamic.title(
f"Validate native get_range_hash object API for {request.node.callspec.id}"
)
wallet = storage_objects[0].wallet_file_path
cid = storage_objects[0].cid
@ -375,16 +348,13 @@ class TestObjectApi(ClusterTestBase):
get_file_hash(file_path, range_len, range_start) == range_hash
), f"Expected range hash to match {range_cut} slice of file payload"
@allure.title("Validate native get_range object API for {storage_objects}")
@allure.title("Get range by native API (obj_size={object_size})")
@pytest.mark.sanity
@pytest.mark.grpc_api
def test_object_get_range(
self, request: FixtureRequest, storage_objects: list[StorageObjectInfo], max_object_size
):
def test_object_get_range(self, storage_objects: list[StorageObjectInfo], max_object_size):
"""
Validate get_range for object by native gRPC API
"""
allure.dynamic.title(f"Validate native get_range object API for {request.node.callspec.id}")
wallet = storage_objects[0].wallet_file_path
cid = storage_objects[0].cid
@ -415,22 +385,16 @@ class TestObjectApi(ClusterTestBase):
== range_content
), f"Expected range content to match {range_cut} slice of file payload"
@allure.title(
"[NEGATIVE] Invalid range in get_range native object API should return error for {storage_objects}"
)
@allure.title("[NEGATIVE] Get invalid range by native API (obj_size={object_size})")
@pytest.mark.sanity
@pytest.mark.grpc_api
def test_object_get_range_negatives(
self,
request: FixtureRequest,
storage_objects: list[StorageObjectInfo],
):
"""
Validate get_range negative for object by native gRPC API
"""
allure.dynamic.title(
f"[NEGATIVE] Invalid range in get_range native object API should return error for {request.node.callspec.id}"
)
wallet = storage_objects[0].wallet_file_path
cid = storage_objects[0].cid
@ -474,20 +438,14 @@ class TestObjectApi(ClusterTestBase):
range_cut=range_cut,
)
@allure.title(
"[NEGATIVE] Invalid range in get_range_hash native object API should return error for {storage_objects}"
)
@allure.title("[NEGATIVE] Get invalid range hash by native API (obj_size={object_size})")
def test_object_get_range_hash_negatives(
self,
request: FixtureRequest,
storage_objects: list[StorageObjectInfo],
):
"""
Validate get_range_hash negative for object by native gRPC API
"""
allure.dynamic.title(
f"[NEGATIVE] Invalid range in get_range_hash native object API should return error for {request.node.callspec.id}"
)
wallet = storage_objects[0].wallet_file_path
cid = storage_objects[0].cid

View File

@ -64,13 +64,12 @@ def user_container(
def storage_objects(
user_container: StorageContainer,
bearer_token_file_all_allow: str,
request: FixtureRequest,
object_size: ObjectSize,
client_shell: Shell,
cluster: Cluster,
) -> list[StorageObjectInfo]:
epoch = get_epoch(client_shell, cluster)
storage_objects: list[StorageObjectInfo] = []
object_size: ObjectSize = request.param
for node in cluster.storage_nodes:
storage_objects.append(
user_container.generate_object(
@ -87,27 +86,18 @@ def storage_objects(
@pytest.mark.bearer
class TestObjectApiWithBearerToken(ClusterTestBase):
@allure.title(
"Object can be deleted from any node using s3gate wallet with bearer token for {storage_objects}"
"Object can be deleted from any node using s3gate wallet with bearer token (obj_size={object_size})"
)
@pytest.mark.parametrize(
"storage_objects,user_container",
[
(pytest.lazy_fixture("simple_object_size"), SINGLE_PLACEMENT_RULE),
(pytest.lazy_fixture("complex_object_size"), SINGLE_PLACEMENT_RULE),
],
ids=["simple object size", "complex object size"],
"user_container",
[SINGLE_PLACEMENT_RULE],
indirect=True,
)
def test_delete_object_with_s3_wallet_bearer(
self,
storage_objects: list[StorageObjectInfo],
bearer_token_file_all_allow: str,
request: FixtureRequest,
):
allure.dynamic.title(
f"Object can be deleted from any node using s3gate wallet with bearer token for {request.node.callspec.id}"
)
s3_gate_wallet = self.cluster.s3_gates[0]
with allure.step("Try to delete each object from first storage node"):
for storage_object in storage_objects:
@ -123,16 +113,12 @@ class TestObjectApiWithBearerToken(ClusterTestBase):
)
@allure.title(
"Object can be fetched from any node using s3gate wallet with bearer token for {object_size}"
"Object can be fetched from any node using s3gate wallet with bearer token (obj_size={object_size})"
)
@pytest.mark.parametrize(
"object_size, user_container",
[
(pytest.lazy_fixture("simple_object_size"), REP_2_FOR_3_NODES_PLACEMENT_RULE),
(pytest.lazy_fixture("complex_object_size"), REP_2_FOR_3_NODES_PLACEMENT_RULE),
],
ids=["simple object size", "complex object size"],
indirect=["user_container"],
"user_container",
[REP_2_FOR_3_NODES_PLACEMENT_RULE],
indirect=True,
)
def test_get_object_with_s3_wallet_bearer_from_all_nodes(
self,

View File

@ -22,12 +22,7 @@ logger = logging.getLogger("NeoLogger")
@pytest.mark.sanity
@pytest.mark.grpc_api
class TestObjectApiLifetime(ClusterTestBase):
@allure.title("Object should be removed when lifetime expired for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Object is removed when lifetime expired (obj_size={object_size})")
def test_object_api_lifetime(self, default_wallet: str, object_size: ObjectSize):
"""
Test object deleted after expiration epoch.

View File

@ -35,7 +35,6 @@ from frostfs_testlib.storage.dataclasses.wallet import WalletFactory, WalletInfo
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
from frostfs_testlib.testing.test_control import expect_not_raises, wait_for_success
from frostfs_testlib.utils import datetime_utils
from pytest import FixtureRequest
from pytest_tests.helpers.utility import wait_for_gc_pass_on_storage_nodes
@ -71,12 +70,11 @@ def locked_storage_object(
user_container: StorageContainer,
client_shell: Shell,
cluster: Cluster,
request: FixtureRequest,
object_size: ObjectSize,
):
"""
Intention of this fixture is to provide storage object which is NOT expected to be deleted during test act phase
"""
object_size: ObjectSize = request.param
with allure.step("Creating locked object"):
current_epoch = ensure_fresh_epoch(client_shell, cluster)
expiration_epoch = current_epoch + FIXTURE_LOCK_LIFETIME
@ -127,7 +125,9 @@ def locked_storage_object(
@wait_for_success(datetime_utils.parse_time(STORAGE_GC_TIME))
def check_object_not_found(wallet_file_path: str, cid: str, oid: str, shell: Shell, rpc_endpoint: str):
def check_object_not_found(
wallet_file_path: str, cid: str, oid: str, shell: Shell, rpc_endpoint: str
):
with pytest.raises(Exception, match=OBJECT_NOT_FOUND):
head_object(
wallet_file_path,
@ -137,7 +137,10 @@ def check_object_not_found(wallet_file_path: str, cid: str, oid: str, shell: She
rpc_endpoint,
)
def verify_object_available(wallet_file_path: str, cid: str, oid: str, shell: Shell, rpc_endpoint: str):
def verify_object_available(
wallet_file_path: str, cid: str, oid: str, shell: Shell, rpc_endpoint: str
):
with expect_not_raises():
head_object(
wallet_file_path,
@ -147,18 +150,18 @@ def verify_object_available(wallet_file_path: str, cid: str, oid: str, shell: Sh
rpc_endpoint,
)
@pytest.mark.sanity
@pytest.mark.grpc_object_lock
class TestObjectLockWithGrpc(ClusterTestBase):
@pytest.fixture()
def new_locked_storage_object(
self, user_container: StorageContainer, request: FixtureRequest
self, user_container: StorageContainer, object_size: ObjectSize
) -> StorageObjectInfo:
"""
Intention of this fixture is to provide new storage object for tests which may delete or corrupt the object or it's complementary objects
So we need a new one each time we ask for it
"""
object_size: ObjectSize = request.param
with allure.step("Creating locked object"):
current_epoch = self.get_epoch()
@ -176,25 +179,14 @@ class TestObjectLockWithGrpc(ClusterTestBase):
return storage_object
@allure.title("Locked object should be protected from deletion for {locked_storage_object}")
@pytest.mark.parametrize(
"locked_storage_object",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
indirect=True,
)
@allure.title("Locked object is protected from deletion (obj_size={object_size})")
def test_locked_object_cannot_be_deleted(
self,
request: FixtureRequest,
locked_storage_object: StorageObjectInfo,
):
"""
Locked object should be protected from deletion
"""
allure.dynamic.title(
f"Locked object should be protected from deletion for {request.node.callspec.id}"
)
with pytest.raises(Exception, match=OBJECT_IS_LOCKED):
delete_object(
locked_storage_object.wallet_file_path,
@ -204,11 +196,9 @@ class TestObjectLockWithGrpc(ClusterTestBase):
self.cluster.default_rpc_endpoint,
)
@allure.title("Lock object itself should be protected from deletion")
@allure.title("Lock object itself is protected from deletion")
# We operate with only lock object here so no complex object needed in this test
@pytest.mark.parametrize(
"locked_storage_object", [pytest.lazy_fixture("simple_object_size")], indirect=True
)
@pytest.mark.parametrize("object_size", ["simple"], indirect=True)
def test_lock_object_itself_cannot_be_deleted(
self,
locked_storage_object: StorageObjectInfo,
@ -231,9 +221,7 @@ class TestObjectLockWithGrpc(ClusterTestBase):
@allure.title("Lock object itself cannot be locked")
# We operate with only lock object here so no complex object needed in this test
@pytest.mark.parametrize(
"locked_storage_object", [pytest.lazy_fixture("simple_object_size")], indirect=True
)
@pytest.mark.parametrize("object_size", ["simple"], indirect=True)
def test_lock_object_cannot_be_locked(
self,
locked_storage_object: StorageObjectInfo,
@ -259,9 +247,7 @@ class TestObjectLockWithGrpc(ClusterTestBase):
"Lock must contain valid lifetime or expire_at field: (lifetime={wrong_lifetime}, expire-at={wrong_expire_at})"
)
# We operate with only lock object here so no complex object needed in this test
@pytest.mark.parametrize(
"locked_storage_object", [pytest.lazy_fixture("simple_object_size")], indirect=True
)
@pytest.mark.parametrize("object_size", ["simple"], indirect=True)
@pytest.mark.parametrize(
"wrong_lifetime,wrong_expire_at,expected_error",
[
@ -298,12 +284,7 @@ class TestObjectLockWithGrpc(ClusterTestBase):
expire_at=wrong_expire_at,
)
@allure.title("Expired object should be deleted after locks are expired for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Expired object is deleted when locks are expired (obj_size={object_size})")
def test_expired_object_should_be_deleted_after_locks_are_expired(
self,
user_container: StorageContainer,
@ -351,18 +332,15 @@ class TestObjectLockWithGrpc(ClusterTestBase):
with allure.step("Wait for object to be deleted after third epoch"):
self.tick_epoch()
check_object_not_found(storage_object.wallet_file_path,
storage_object.cid,
storage_object.oid,
self.shell,
self.cluster.default_rpc_endpoint)
check_object_not_found(
storage_object.wallet_file_path,
storage_object.cid,
storage_object.oid,
self.shell,
self.cluster.default_rpc_endpoint,
)
@allure.title("Should be possible to lock multiple objects at once for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Lock multiple objects at once (obj_size={object_size})")
def test_should_be_possible_to_lock_multiple_objects_at_once(
self,
user_container: StorageContainer,
@ -408,12 +386,7 @@ class TestObjectLockWithGrpc(ClusterTestBase):
with expect_not_raises():
delete_objects(storage_objects, self.shell, self.cluster)
@allure.title("Already outdated lock should not be applied for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Outdated lock cannot be applied (obj_size={object_size})")
def test_already_outdated_lock_should_not_be_applied(
self,
user_container: StorageContainer,
@ -445,14 +418,7 @@ class TestObjectLockWithGrpc(ClusterTestBase):
expire_at=expiration_epoch,
)
@allure.title(
"After lock expiration with lifetime user should be able to delete object for {object_size}"
)
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Delete object when lock is expired by lifetime (obj_size={object_size})")
@expect_not_raises()
def test_after_lock_expiration_with_lifetime_user_should_be_able_to_delete_object(
self,
@ -487,14 +453,7 @@ class TestObjectLockWithGrpc(ClusterTestBase):
self.cluster.default_rpc_endpoint,
)
@allure.title(
"After lock expiration with expire_at user should be able to delete object for {object_size}"
)
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Delete object when lock is expired by expire_at (obj_size={object_size})")
@expect_not_raises()
def test_after_lock_expiration_with_expire_at_user_should_be_able_to_delete_object(
self,
@ -531,11 +490,11 @@ class TestObjectLockWithGrpc(ClusterTestBase):
self.cluster.default_rpc_endpoint,
)
@allure.title("Complex object chunks should also be protected from deletion")
@allure.title("Complex object chunks are protected from deletion")
@pytest.mark.parametrize(
# Only complex objects are required for this test
"locked_storage_object",
[pytest.lazy_fixture("complex_object_size")],
"object_size",
["complex"],
indirect=True,
)
def test_complex_object_chunks_should_also_be_protected_from_deletion(
@ -560,12 +519,12 @@ class TestObjectLockWithGrpc(ClusterTestBase):
self.cluster.default_rpc_endpoint,
)
@allure.title("Link object of locked complex object can be dropped")
@allure.title("Drop link object of locked complex object")
@pytest.mark.grpc_control
@pytest.mark.parametrize(
"new_locked_storage_object",
"object_size",
# Only complex object is required
[pytest.lazy_fixture("complex_object_size")],
["complex"],
indirect=True,
)
def test_link_object_of_locked_complex_object_can_be_dropped(
@ -590,12 +549,12 @@ class TestObjectLockWithGrpc(ClusterTestBase):
with expect_not_raises():
drop_object(node, new_locked_storage_object.cid, link_object_id)
@allure.title("Chunks of locked complex object can be dropped")
@allure.title("Drop chunks of locked complex object")
@pytest.mark.grpc_control
@pytest.mark.parametrize(
"new_locked_storage_object",
"object_size",
# Only complex object is required
[pytest.lazy_fixture("complex_object_size")],
["complex"],
indirect=True,
)
def test_chunks_of_locked_complex_object_can_be_dropped(
@ -617,18 +576,9 @@ class TestObjectLockWithGrpc(ClusterTestBase):
with expect_not_raises():
drop_object(node, new_locked_storage_object.cid, chunk_object_id)
@allure.title("Locked object with {new_locked_storage_object} can be dropped")
@allure.title("Drop locked object (obj_size={object_size})")
@pytest.mark.grpc_control
@pytest.mark.parametrize(
"new_locked_storage_object",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
indirect=True,
)
def test_locked_object_can_be_dropped(
self, new_locked_storage_object: StorageObjectInfo, request: pytest.FixtureRequest
):
allure.dynamic.title(f"Locked {request.node.callspec.id} can be dropped")
def test_locked_object_can_be_dropped(self, new_locked_storage_object: StorageObjectInfo):
nodes_with_object = get_nodes_with_object(
new_locked_storage_object.cid,
new_locked_storage_object.oid,
@ -640,11 +590,11 @@ class TestObjectLockWithGrpc(ClusterTestBase):
with expect_not_raises():
drop_object(node, new_locked_storage_object.cid, new_locked_storage_object.oid)
@allure.title("Link object of complex object should also be protected from deletion")
@allure.title("Link object of complex object is protected from deletion")
@pytest.mark.parametrize(
# Only complex objects are required for this test
"locked_storage_object",
[pytest.lazy_fixture("complex_object_size")],
"object_size",
["complex"],
indirect=True,
)
def test_link_object_of_complex_object_should_also_be_protected_from_deletion(
@ -673,26 +623,17 @@ class TestObjectLockWithGrpc(ClusterTestBase):
self.cluster.default_rpc_endpoint,
)
@allure.title("Expired object should be removed after all locks were expired for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Expired object is removed after all locks are expired (obj_size={object_size})")
def test_expired_object_should_be_removed_after_relocks_expare_at(
self,
request: FixtureRequest,
user_container: StorageContainer,
object_size: ObjectSize,
):
allure.dynamic.title(
f"Expired object should be removed after all locks were expired for {request.node.callspec.id}"
current_epoch = self.ensure_fresh_epoch()
storage_object = user_container.generate_object(
object_size.value, expire_at=current_epoch + 1
)
current_epoch = self.ensure_fresh_epoch()
storage_object = user_container.generate_object(object_size.value, expire_at=current_epoch + 1)
with allure.step("Apply first lock to object for 3 epochs"):
lock_object_id_0 = lock_object(
storage_object.wallet_file_path,
@ -702,16 +643,17 @@ class TestObjectLockWithGrpc(ClusterTestBase):
self.cluster.default_rpc_endpoint,
expire_at=current_epoch + 3,
)
self.tick_epochs(2)
with allure.step("Check first lock is still available"):
verify_object_available(
storage_object.wallet_file_path,
storage_object.cid,
lock_object_id_0,
self.shell,
self.cluster.default_rpc_endpoint)
self.cluster.default_rpc_endpoint,
)
with allure.step("Apply second lock to object for 3 more epochs"):
lock_object_id_1 = lock_object(
@ -731,17 +673,17 @@ class TestObjectLockWithGrpc(ClusterTestBase):
storage_object.cid,
lock_object_id_0,
self.shell,
self.cluster.default_rpc_endpoint
)
self.cluster.default_rpc_endpoint,
)
with allure.step("Verify second lock is still available"):
verify_object_available(
storage_object.wallet_file_path,
storage_object.cid,
lock_object_id_1,
self.shell,
self.cluster.default_rpc_endpoint
)
self.cluster.default_rpc_endpoint,
)
with allure.step("Apply third lock to object for 3 more epochs"):
lock_object(
@ -760,37 +702,31 @@ class TestObjectLockWithGrpc(ClusterTestBase):
storage_object.cid,
storage_object.oid,
self.shell,
self.cluster.default_rpc_endpoint
)
self.cluster.default_rpc_endpoint,
)
@allure.title("Two expired objects with one lock should be deleted after lock expiration for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
@allure.title(
"Two expired objects with one lock are deleted after lock expiration (obj_size={object_size})"
)
def test_two_objects_expiration_with_one_lock(
self,
request: FixtureRequest,
user_container: StorageContainer,
object_size: ObjectSize,
):
allure.dynamic.title(
f"Two expired objects with one lock should be deleted after lock expiration for {request.node.callspec.id}"
)
current_epoch = self.ensure_fresh_epoch()
storage_objects: list[StorageObjectInfo] = []
with allure.step("Generate two objects"):
for epoch_i in range(2):
storage_objects.append(
user_container.generate_object(object_size.value, expire_at=current_epoch + epoch_i + 3)
user_container.generate_object(
object_size.value, expire_at=current_epoch + epoch_i + 3
)
)
self.tick_epoch()
with allure.step("Lock objects for 4 epochs"):
lock_object(
storage_objects[0].wallet_file_path,
@ -803,16 +739,16 @@ class TestObjectLockWithGrpc(ClusterTestBase):
with allure.step("Verify objects are available during next three epochs"):
for epoch_i in range(3):
self.tick_epoch()
with allure.step(f"Check objects at epoch {current_epoch + epoch_i + 2}"):
for storage_object in storage_objects:
verify_object_available(
storage_object.wallet_file_path,
storage_object.cid,
storage_object.oid,
self.shell,
self.cluster.default_rpc_endpoint
)
self.tick_epoch()
with allure.step(f"Check objects at epoch {current_epoch + epoch_i + 2}"):
for storage_object in storage_objects:
verify_object_available(
storage_object.wallet_file_path,
storage_object.cid,
storage_object.oid,
self.shell,
self.cluster.default_rpc_endpoint,
)
with allure.step("Verify objects are deleted after lock was expired"):
self.tick_epoch()
@ -822,6 +758,5 @@ class TestObjectLockWithGrpc(ClusterTestBase):
storage_object.cid,
storage_object.oid,
self.shell,
self.cluster.default_rpc_endpoint
)
self.cluster.default_rpc_endpoint,
)

View File

@ -27,12 +27,7 @@ class TestReplication(ClusterTestBase):
yield
cluster_state_controller.start_stopped_hosts()
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Test replication for {object_size}")
@allure.title("Replication (obj_size={object_size})")
def test_replication(
self,
default_wallet: str,

View File

@ -95,11 +95,6 @@ class Test_http_bearer(ClusterTestBase):
)
@pytest.mark.skip("Temp disable for v0.37")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
def test_put_with_bearer_when_eacl_restrict(
self,
object_size: ObjectSize,

View File

@ -218,7 +218,7 @@ class TestHttpPut(ClusterTestBase):
http_hostname=self.cluster.default_http_hostname[0],
)
@allure.title("Test Expiration-Epoch in HTTP header with epoch_gap={epoch_gap}")
@allure.title("Expiration-Epoch in HTTP header (epoch_gap={epoch_gap})")
@pytest.mark.parametrize("epoch_gap", [0, 1])
def test_expiration_epoch_in_http(self, simple_object_size: ObjectSize, epoch_gap: int):
endpoint = self.cluster.default_rpc_endpoint
@ -283,7 +283,7 @@ class TestHttpPut(ClusterTestBase):
http_hostname=self.cluster.default_http_hostname[0],
)
@allure.title("Test Zip in HTTP header")
@allure.title("Zip in HTTP header")
def test_zip_in_http(self, complex_object_size: ObjectSize, simple_object_size: ObjectSize):
cid = create_container(
self.wallet,

View File

@ -22,7 +22,6 @@ from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
from frostfs_testlib.storage.dataclasses.storage_object_info import StorageObjectInfo
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
from frostfs_testlib.utils.file_utils import generate_file
from pytest import FixtureRequest
OBJECT_ALREADY_REMOVED_ERROR = "object already removed"
logger = logging.getLogger("NeoLogger")
@ -47,19 +46,11 @@ class Test_http_headers(ClusterTestBase):
def prepare_wallet(self, default_wallet):
Test_http_headers.wallet = default_wallet
@pytest.fixture(
params=[
pytest.lazy_fixture("simple_object_size"),
# TODO: Temp disable for v0.37
# pytest.lazy_fixture("complex_object_size"),
],
# TODO: Temp disable for v0.37
# ids=["simple object size", "complex object size"],
ids=["simple object size"],
scope="class",
)
def storage_objects_with_attributes(self, request: FixtureRequest) -> list[StorageObjectInfo]:
object_size: ObjectSize = request.param
def storage_objects_with_attributes(self, object_size: ObjectSize) -> list[StorageObjectInfo]:
# TODO: Deal with http tests
if object_size.value > 1000:
pytest.skip("Complex objects for HTTP temporarly disabled for v0.37")
storage_objects = []
wallet = self.wallet
cid = create_container(

View File

@ -27,12 +27,7 @@ class Test_http_object(ClusterTestBase):
def prepare_wallet(self, default_wallet):
Test_http_object.wallet = default_wallet
@allure.title("Put over gRPC, Get over HTTP for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Put over gRPC, Get over HTTP with attributes (obj_size={object_size})")
def test_object_put_get_attributes(self, object_size: ObjectSize):
"""
Test that object can be put using gRPC interface and get using HTTP.

View File

@ -24,13 +24,8 @@ class Test_http_streaming(ClusterTestBase):
def prepare_wallet(self, default_wallet):
Test_http_streaming.wallet = default_wallet
@allure.title("Test Put via pipe (steaming), Get over HTTP and verify hashes")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("complex_object_size")],
ids=["complex object size"],
)
def test_object_can_be_put_get_by_streaming(self, object_size: ObjectSize):
@allure.title("Put via pipe (streaming), Get over HTTP and verify hashes")
def test_object_can_be_put_get_by_streaming(self, complex_object_size: ObjectSize):
"""
Test that object can be put using gRPC interface and get using HTTP.
@ -53,7 +48,7 @@ class Test_http_streaming(ClusterTestBase):
)
with allure.step("Allocate big object"):
# Generate file
file_path = generate_file(object_size.value)
file_path = generate_file(complex_object_size.value)
with allure.step(
"Put objects using curl utility and Get object and verify hashes [ get/$CID/$OID ]"

View File

@ -209,12 +209,7 @@ class Test_http_system_header(ClusterTestBase):
error_pattern=f"{EXPIRATION_EXPIRATION_RFC} must be in the future",
)
@allure.title("Priority of attributes epoch>duration for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Priority of attributes epoch>duration (obj_size={object_size})")
@pytest.mark.skip("Temp disable for v0.37")
def test_http_attr_priority_epoch_duration(
self, user_container: str, object_size: ObjectSize, epoch_duration: int
@ -256,12 +251,7 @@ class Test_http_system_header(ClusterTestBase):
self.wallet, user_container, oid, self.shell, self.cluster
)
@allure.title("Priority of attributes duration>timestamp for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Priority of attributes duration>timestamp (obj_size={object_size})")
@pytest.mark.skip("Temp disable for v0.37")
def test_http_attr_priority_dur_timestamp(
self, user_container: str, object_size: ObjectSize, epoch_duration: int
@ -310,12 +300,7 @@ class Test_http_system_header(ClusterTestBase):
self.wallet, user_container, oid, self.shell, self.cluster
)
@allure.title("Priority of attributes timestamp>Expiration-RFC for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Priority of attributes timestamp>Expiration-RFC (obj_size={object_size})")
@pytest.mark.skip("Temp disable for v0.37")
def test_http_attr_priority_timestamp_rfc(
self, user_container: str, object_size: ObjectSize, epoch_duration: int
@ -364,14 +349,12 @@ class Test_http_system_header(ClusterTestBase):
self.wallet, user_container, oid, self.shell, self.cluster
)
@allure.title("Object should be deleted when expiration passed for {object_size}")
@allure.title("Object should be deleted when expiration passed (obj_size={object_size})")
@pytest.mark.parametrize(
"object_size",
# TODO: Temp disabled for v0.37
# [pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
[pytest.lazy_fixture("simple_object_size")],
# ids=["simple object size", "complex object size"],
ids=["simple object size"],
# TODO: "complex" temporarly disabled for v0.37
["simple"],
indirect=True,
)
def test_http_rfc_object_unavailable_after_expir(
self, user_container: str, object_size: ObjectSize, epoch_duration: int

View File

@ -10,7 +10,7 @@ from frostfs_testlib.utils.file_utils import generate_file
@pytest.mark.acl
@pytest.mark.s3_gate
class TestS3GateACL:
@allure.title("{s3_client}: Object ACL")
@allure.title("Object ACL (s3_client={s3_client})")
@pytest.mark.parametrize("s3_client", [AwsCliClient], indirect=True)
def test_s3_object_ACL(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
@ -44,7 +44,7 @@ class TestS3GateACL:
obj_acl = s3_client.get_object_acl(bucket, file_name)
s3_helper.assert_s3_acl(acl_grants=obj_acl, permitted_users="AllUsers")
@allure.title("{s3_client}: Bucket ACL")
@allure.title("Bucket ACL (s3_client={s3_client})")
@pytest.mark.parametrize("s3_client", [AwsCliClient, Boto3ClientWrapper], indirect=True)
def test_s3_bucket_ACL(self, s3_client: S3ClientWrapper):
with allure.step("Create bucket with ACL = public-read-write"):

View File

@ -17,7 +17,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc):
@pytest.mark.s3_gate
@pytest.mark.s3_gate_bucket
class TestS3GateBucket:
@allure.title("{s3_client}: Create Bucket with different ACL")
@allure.title("Create Bucket with different ACL (s3_client={s3_client})")
def test_s3_create_bucket_with_ACL(self, s3_client: S3ClientWrapper):
with allure.step("Create bucket with ACL private"):
@ -46,7 +46,7 @@ class TestS3GateBucket:
bucket_acl_3 = s3_client.get_bucket_acl(bucket_3)
s3_helper.assert_s3_acl(acl_grants=bucket_acl_3, permitted_users="AllUsers")
@allure.title("{s3_client}: Create Bucket with different ACL by grant")
@allure.title("Create Bucket with different ACL by grant (s3_client={s3_client})")
def test_s3_create_bucket_with_grands(self, s3_client: S3ClientWrapper):
with allure.step("Create bucket with --grant-read"):
@ -73,7 +73,7 @@ class TestS3GateBucket:
bucket_acl_2 = s3_client.get_bucket_acl(bucket_2)
s3_helper.assert_s3_acl(acl_grants=bucket_acl_2, permitted_users="AllUsers")
@allure.title("{s3_client}: create bucket with object lock")
@allure.title("Create bucket with object lock (s3_client={s3_client})")
def test_s3_bucket_object_lock(
self, s3_client: S3ClientWrapper, simple_object_size: ObjectSize
):
@ -108,7 +108,7 @@ class TestS3GateBucket:
s3_client, bucket_1, file_name, "COMPLIANCE", date_obj_1, "ON"
)
@allure.title("{s3_client}: delete bucket")
@allure.title("Delete bucket (s3_client={s3_client})")
def test_s3_delete_bucket(self, s3_client: S3ClientWrapper, simple_object_size: ObjectSize):
file_path_1 = generate_file(simple_object_size.value)
file_name_1 = s3_helper.object_key_from_file_path(file_path_1)

View File

@ -34,7 +34,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc):
@pytest.mark.s3_gate
@pytest.mark.s3_gate_base
class TestS3Gate:
@allure.title("{s3_client}: Bucket API")
@allure.title("Bucket API (s3_client={s3_client})")
def test_s3_buckets(
self,
s3_client: S3ClientWrapper,
@ -107,25 +107,22 @@ class TestS3Gate:
with pytest.raises(Exception, match=r".*Not Found.*"):
s3_client.head_bucket(bucket_1)
@allure.title("{s3_client}: Object API for {object_size}")
@allure.title("Object API (obj_size={object_size}, s3_client={s3_client})")
@pytest.mark.parametrize(
"object_size",
["simple object size", "complex object size"],
ids=["simple object size", "complex object size"],
["simple", "complex"],
indirect=True,
)
def test_s3_api_object(
self,
s3_client: S3ClientWrapper,
object_size: str,
object_size: ObjectSize,
two_buckets: tuple[str, str],
simple_object_size: ObjectSize,
complex_object_size: ObjectSize,
):
"""
Test base S3 Object API (Put/Head/List) for simple and complex objects.
"""
size = simple_object_size if object_size == "simple object size" else complex_object_size
file_path = generate_file(size.value)
file_path = generate_file(object_size.value)
file_name = s3_helper.object_key_from_file_path(file_path)
bucket_1, bucket_2 = two_buckets
@ -147,7 +144,7 @@ class TestS3Gate:
for attrs in (["ETag"], ["ObjectSize", "StorageClass"]):
s3_client.get_object_attributes(bucket, file_name, attrs)
@allure.title("{s3_client}: Sync directory")
@allure.title("Sync directory (s3_client={s3_client})")
def test_s3_sync_dir(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -179,7 +176,7 @@ class TestS3Gate:
key_to_path.get(obj_key)
), "Expected hashes are the same"
@allure.title("{s3_client}: Object versioning")
@allure.title("Object versioning (s3_client={s3_client})")
def test_s3_api_versioning(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -259,7 +256,7 @@ class TestS3Gate:
), f"Expected object content is\n{version_2_content}\nGot\n{got_content}"
@pytest.mark.s3_gate_multipart
@allure.title("{s3_client}: Object Multipart API")
@allure.title("Object Multipart API (s3_client={s3_client})")
def test_s3_api_multipart(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -316,7 +313,7 @@ class TestS3Gate:
self.check_object_attributes(s3_client, bucket, object_key, parts_count)
@allure.title("{s3_client}: Bucket tagging API")
@allure.title("Bucket tagging API (s3_client={s3_client})")
def test_s3_api_bucket_tagging(self, s3_client: S3ClientWrapper, bucket: str):
"""
Test checks S3 Bucket tagging API (Put tag/Get tag).
@ -329,7 +326,7 @@ class TestS3Gate:
s3_client.delete_bucket_tagging(bucket)
s3_helper.check_tags_by_bucket(s3_client, bucket, [])
@allure.title("{s3_client}: Object tagging API")
@allure.title("Object tagging API (s3_client={s3_client})")
def test_s3_api_object_tagging(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -361,7 +358,7 @@ class TestS3Gate:
s3_client.delete_object_tagging(bucket, obj_key)
s3_helper.check_tags_by_object(s3_client, bucket, obj_key, [])
@allure.title("{s3_client}: Delete object & delete objects")
@allure.title("Delete object & delete objects (s3_client={s3_client})")
def test_s3_api_delete(
self,
s3_client: S3ClientWrapper,
@ -427,7 +424,7 @@ class TestS3Gate:
with pytest.raises(Exception, match="The specified key does not exist"):
s3_client.get_object(bucket_2, object_key)
@allure.title("{s3_client}: Copy object to the same bucket")
@allure.title("Copy object to the same bucket (s3_client={s3_client})")
def test_s3_copy_same_bucket(
self,
s3_client: S3ClientWrapper,
@ -476,7 +473,7 @@ class TestS3Gate:
unexpected_objects=[file_name_simple],
)
@allure.title("{s3_client}: Copy object to another bucket")
@allure.title("Copy object to another bucket (s3_client={s3_client})")
def test_s3_copy_to_another_bucket(
self,
s3_client: S3ClientWrapper,

View File

@ -20,7 +20,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc):
@pytest.mark.parametrize("version_id", [None, "second"])
class TestS3GateLocking:
@allure.title(
"{s3_client}: Retention period & legal lock on the object with version_id={version_id}"
"Retention period and legal lock on object (version_id={version_id}, s3_client={s3_client})"
)
def test_s3_object_locking(
self, s3_client: S3ClientWrapper, version_id: str, simple_object_size: ObjectSize
@ -77,7 +77,7 @@ class TestS3GateLocking:
s3_client.delete_object(bucket, file_name, version_id)
@allure.title(
"{s3_client}: Impossible to change the retention mode COMPLIANCE with version_id={version_id}"
"Impossible to change retention mode COMPLIANCE (version_id={version_id}, s3_client={s3_client})"
)
def test_s3_mode_compliance(
self, s3_client: S3ClientWrapper, version_id: str, simple_object_size: ObjectSize
@ -117,7 +117,9 @@ class TestS3GateLocking:
with pytest.raises(Exception):
s3_client.put_object_retention(bucket, file_name, retention, version_id)
@allure.title("{s3_client}: Change retention mode GOVERNANCE with version_id={version_id}")
@allure.title(
"Change retention mode GOVERNANCE (version_id={version_id}, s3_client={s3_client})"
)
def test_s3_mode_governance(
self, s3_client: S3ClientWrapper, version_id: str, simple_object_size: ObjectSize
):
@ -179,7 +181,9 @@ class TestS3GateLocking:
s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF"
)
@allure.title("{s3_client}: Object Cannot Be Locked with version_id={version_id}")
@allure.title(
"[NEGATIVE] Lock object in bucket with disabled locking (version_id={version_id}, s3_client={s3_client})"
)
def test_s3_legal_hold(
self, s3_client: S3ClientWrapper, version_id: str, simple_object_size: ObjectSize
):
@ -201,7 +205,7 @@ class TestS3GateLocking:
@pytest.mark.s3_gate
class TestS3GateLockingBucket:
@allure.title("{s3_client}: Bucket Lock")
@allure.title("Bucket Lock (s3_client={s3_client})")
def test_s3_bucket_lock(self, s3_client: S3ClientWrapper, simple_object_size: ObjectSize):
file_path = generate_file(simple_object_size.value)
file_name = s3_helper.object_key_from_file_path(file_path)

View File

@ -23,7 +23,7 @@ class TestS3GateMultipart(ClusterTestBase):
"The upload ID may be invalid, or the upload may have been aborted or completed."
)
@allure.title("{s3_client}: Object Multipart API")
@allure.title("Object Multipart API (s3_client={s3_client})")
@pytest.mark.parametrize("versioning_status", [VersioningStatus.ENABLED], indirect=True)
def test_s3_object_multipart(self, s3_client: S3ClientWrapper, bucket: str):
parts_count = 5
@ -58,7 +58,7 @@ class TestS3GateMultipart(ClusterTestBase):
got_object = s3_client.get_object(bucket, object_key)
assert get_file_hash(got_object) == get_file_hash(file_name_large)
@allure.title("{s3_client}: Abort Multipart Upload")
@allure.title("Abort Multipart Upload (s3_client={s3_client})")
@pytest.mark.parametrize("versioning_status", [VersioningStatus.ENABLED], indirect=True)
def test_s3_abort_multipart(
self,
@ -113,7 +113,7 @@ class TestS3GateMultipart(ClusterTestBase):
)
assert len(objects) == 0, f"Expected no objects in container, got\n{objects}"
@allure.title("{s3_client}: Upload Part Copy")
@allure.title("Upload Part Copy (s3_client={s3_client})")
@pytest.mark.parametrize("versioning_status", [VersioningStatus.ENABLED], indirect=True)
def test_s3_multipart_copy(self, s3_client: S3ClientWrapper, bucket: str):
parts_count = 3

View File

@ -31,35 +31,6 @@ def pytest_generate_tests(metafunc: pytest.Metafunc):
@pytest.mark.s3_gate
@pytest.mark.s3_gate_object
class TestS3GateObject:
@allure.title("Set object size for current test")
@pytest.fixture
def object_size(self, request: pytest.FixtureRequest) -> int:
object_size = request.param
return object_size
@allure.title("Put objects in a bucket")
@pytest.fixture
def objects_in_bucket(
self,
s3_client: S3ClientWrapper,
bucket: str,
object_size: int,
request: pytest.FixtureRequest,
) -> list[str]:
objects: list[str] = []
objects_count = int(request.param)
with allure.step(
f"Put {objects_count} objects of size '{object_size}' bytes into bucket '{bucket}'"
):
for _ in range(objects_count):
file_path = generate_file(object_size)
file_name = s3_helper.object_key_from_file_path(file_path)
objects.append(file_name)
s3_client.put_object(bucket, file_path)
return objects
@pytest.fixture
def second_wallet_public_key(self):
second_wallet = os.path.join(os.getcwd(), ASSETS_DIR, f"{str(uuid.uuid4())}.json")
@ -67,7 +38,7 @@ class TestS3GateObject:
public_key = wallet_utils.get_wallet_public_key(second_wallet, DEFAULT_WALLET_PASS)
yield public_key
@allure.title("{s3_client}: Copy object")
@allure.title("Copy object (s3_client={s3_client})")
def test_s3_copy_object(
self,
s3_client: S3ClientWrapper,
@ -123,7 +94,7 @@ class TestS3GateObject:
with pytest.raises(Exception):
s3_client.copy_object(bucket_1, file_name)
@allure.title("{s3_client}: Copy version of object")
@allure.title("Copy version of object (s3_client={s3_client})")
def test_s3_copy_version_object(
self,
s3_client: S3ClientWrapper,
@ -170,7 +141,7 @@ class TestS3GateObject:
with pytest.raises(Exception):
s3_client.copy_object(bucket_1, obj_key)
@allure.title("{s3_client}: Checking copy with acl")
@allure.title("Copy with acl (s3_client={s3_client})")
def test_s3_copy_acl(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -191,7 +162,7 @@ class TestS3GateObject:
obj_acl = s3_client.get_object_acl(bucket, copy_obj_path)
s3_helper.assert_s3_acl(acl_grants=obj_acl, permitted_users="CanonicalUser")
@allure.title("{s3_client}: Copy object with metadata")
@allure.title("Copy object with metadata (s3_client={s3_client})")
def test_s3_copy_metadate(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -238,7 +209,7 @@ class TestS3GateObject:
obj_head.get("Metadata") == object_metadata_1
), f"Metadata must be {object_metadata_1}"
@allure.title("{s3_client}: Copy object with tagging")
@allure.title("Copy object with tagging (s3_client={s3_client})")
def test_s3_copy_tagging(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -289,7 +260,7 @@ class TestS3GateObject:
for tag in expected_tags:
assert tag in got_tags, f"Expected tag {tag} in {got_tags}"
@allure.title("{s3_client}: Delete version of object")
@allure.title("Delete version of object (s3_client={s3_client})")
def test_s3_delete_versioning(
self,
s3_client: S3ClientWrapper,
@ -352,7 +323,7 @@ class TestS3GateObject:
assert versions.get("DeleteMarkers", None), "Expected delete Marker"
assert "DeleteMarker" in delete_obj.keys(), "Expected delete Marker"
@allure.title("{s3_client}: bulk delete version of object")
@allure.title("Bulk delete version of object (s3_client={s3_client})")
def test_s3_bulk_delete_versioning(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -407,7 +378,7 @@ class TestS3GateObject:
obj_versions.sort() == version_to_save.sort()
), f"Object should have versions: {version_to_save}"
@allure.title("{s3_client}: Get versions of object")
@allure.title("Get versions of object (s3_client={s3_client})")
def test_s3_get_versioning(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -444,7 +415,7 @@ class TestS3GateObject:
object_3.get("VersionId") == version_id_2
), f"Get object with version {version_id_2}"
@allure.title("{s3_client}: Get range")
@allure.title("Get range (s3_client={s3_client})")
def test_s3_get_range(
self,
s3_client: S3ClientWrapper,
@ -546,26 +517,34 @@ class TestS3GateObject:
return result_list
@allure.title("{s3_client}: Bulk deletion should be limited to 1000 objects")
@pytest.mark.parametrize(
"objects_in_bucket, object_size",
[(3, 10)],
indirect=True,
)
@allure.title("Bulk deletion is limited to 1000 objects (s3_client={s3_client})")
def test_s3_bulk_deletion_limit(
self, s3_client: S3ClientWrapper, bucket: str, objects_in_bucket: list[str]
self,
s3_client: S3ClientWrapper,
bucket: str,
simple_object_size: ObjectSize,
):
objects_in_bucket = []
objects_count = 3
with allure.step(f"Put {objects_count} into bucket"):
for _ in range(objects_count):
file_path = generate_file(simple_object_size.value)
file_name = s3_helper.object_key_from_file_path(file_path)
objects_in_bucket.append(file_name)
s3_client.put_object(bucket, file_path)
# Extend deletion list to 1001 elements with same keys for test speed
objects_to_delete = self.copy_extend_list(objects_in_bucket, 1001)
with allure.step("Delete 1001 objects and expect error"):
with allure.step("Send delete request with 1001 objects and expect error"):
with pytest.raises(Exception, match=S3_MALFORMED_XML_REQUEST):
s3_client.delete_objects(bucket, objects_to_delete)
with allure.step("Delete 1000 objects without error"):
with allure.step("Send delete request with 1000 objects without error"):
with expect_not_raises():
s3_client.delete_objects(bucket, objects_to_delete[:1000])
@allure.title("{s3_client}: Copy object with metadata")
@allure.title("Copy object with metadata (s3_client={s3_client})")
@pytest.mark.smoke
def test_s3_head_object(
self,
@ -606,7 +585,9 @@ class TestS3GateObject:
), f"Expected VersionId is {version_id_1}"
assert response.get("ContentLength") != 0, "Expected ContentLength is not zero"
@allure.title("{s3_client}: list of objects with version {list_type}")
@allure.title(
"List of objects with version (method_version={list_type}, s3_client={s3_client})"
)
@pytest.mark.parametrize("list_type", ["v1", "v2"])
def test_s3_list_object(
self,
@ -648,7 +629,7 @@ class TestS3GateObject:
), f"bucket should have object key {file_name_2}"
assert "DeleteMarker" in delete_obj.keys(), "Expected delete Marker"
@allure.title("{s3_client}: put object")
@allure.title("Put object (s3_client={s3_client})")
def test_s3_put_object(
self,
s3_client: S3ClientWrapper,
@ -754,7 +735,7 @@ class TestS3GateObject:
{"Key": tag_key_3, "Value": str(tag_value_3)}
], "Tags must be the same"
@allure.title("{s3_client}: put object with ACL and versioning is {bucket_versioning}")
@allure.title("Put object with ACL (versioning={bucket_versioning}, s3_client={s3_client})")
@pytest.mark.parametrize("bucket_versioning", ["ENABLED", "SUSPENDED"])
def test_s3_put_object_acl(
self,
@ -839,7 +820,7 @@ class TestS3GateObject:
object_6 = s3_client.get_object(bucket, file_name_5)
assert get_file_hash(file_path_5) == get_file_hash(object_6), "Hashes must be the same"
@allure.title("{s3_client}: put object with lock-mode")
@allure.title("Put object with lock-mode (s3_client={s3_client})")
def test_s3_put_object_lock_mode(
self,
s3_client: S3ClientWrapper,
@ -920,7 +901,7 @@ class TestS3GateObject:
object_lock_retain_until_date=date_obj,
)
@allure.title("{s3_client}: Sync directory with sync type {sync_type}")
@allure.title("Sync directory (sync_type={sync_type}, s3_client={s3_client})")
@pytest.mark.parametrize("sync_type", ["sync", "cp"])
def test_s3_sync_dir(
self,
@ -976,7 +957,7 @@ class TestS3GateObject:
# obj_acl = s3_client.get_object_acl(bucket, obj_key)
# s3_helper.assert_s3_acl(acl_grants = obj_acl, permitted_users = "AllUsers")
@allure.title("{s3_client}: Put 10 nested level object")
@allure.title("Put 10 nested level object (s3_client={s3_client})")
def test_s3_put_10_folder(
self,
s3_client: S3ClientWrapper,
@ -995,7 +976,7 @@ class TestS3GateObject:
s3_client.put_object(bucket, file_path_1)
s3_helper.check_objects_in_bucket(s3_client, bucket, [file_name])
@allure.title("{s3_client}: Delete non-existing object from empty bucket")
@allure.title("Delete non-existing object from empty bucket (s3_client={s3_client})")
def test_s3_delete_non_existing_object(self, s3_client: S3ClientWrapper, bucket: str):
s3_helper.set_bucket_versioning(s3_client, bucket, VersioningStatus.ENABLED)
@ -1012,7 +993,7 @@ class TestS3GateObject:
objects_list = s3_client.list_objects_versions(bucket)
assert not objects_list, f"Expected empty bucket, got {objects_list}"
@allure.title("{s3_client}: Delete the same object twice")
@allure.title("Delete the same object twice (s3_client={s3_client})")
def test_s3_delete_twice(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):

View File

@ -25,7 +25,7 @@ def pytest_generate_tests(metafunc: pytest.Metafunc):
@pytest.mark.s3_gate
class TestS3GatePolicy(ClusterTestBase):
@allure.title("{s3_client}: bucket creation with retention policy applied")
@allure.title("Bucket creation with retention policy applied (s3_client={s3_client})")
def test_s3_bucket_location(
self, default_wallet: str, s3_client: S3ClientWrapper, simple_object_size: ObjectSize
):
@ -91,13 +91,13 @@ class TestS3GatePolicy(ClusterTestBase):
)
assert copies_2 == 3
@allure.title("{s3_client}: bucket with unexisting location constraint")
@allure.title("Bucket with unexisting location constraint (s3_client={s3_client})")
def test_s3_bucket_wrong_location(self, s3_client: S3ClientWrapper):
with allure.step("Create bucket with unenxisting location constraint policy"):
with pytest.raises(Exception):
s3_client.create_bucket(location_constraint="UNEXISTING LOCATION CONSTRAINT")
@allure.title("{s3_client}: bucket policy")
@allure.title("Bucket policy (s3_client={s3_client})")
def test_s3_bucket_policy(self, s3_client: S3ClientWrapper):
with allure.step("Create bucket with default policy"):
bucket = s3_client.create_bucket()
@ -127,7 +127,7 @@ class TestS3GatePolicy(ClusterTestBase):
policy_1 = s3_client.get_bucket_policy(bucket)
print(policy_1)
@allure.title("{s3_client}: bucket CORS")
@allure.title("Bucket CORS (s3_client={s3_client})")
def test_s3_cors(self, s3_client: S3ClientWrapper):
with allure.step("Create bucket without cors"):
bucket = s3_client.create_bucket()

View File

@ -28,7 +28,7 @@ class TestS3GateTagging:
tags.append((tag_key, tag_value))
return tags
@allure.title("{s3_client}: Object tagging")
@allure.title("Object tagging (s3_client={s3_client})")
def test_s3_object_tagging(
self, s3_client: S3ClientWrapper, bucket: str, simple_object_size: ObjectSize
):
@ -78,7 +78,7 @@ class TestS3GateTagging:
s3_client.delete_object_tagging(bucket, file_name)
s3_helper.check_tags_by_object(s3_client, bucket, file_name, [])
@allure.title("{s3_client}: bucket tagging")
@allure.title("Bucket tagging (s3_client={s3_client})")
def test_s3_bucket_tagging(self, s3_client: S3ClientWrapper, bucket: str):
with allure.step("Put 10 bucket tags"):

View File

@ -17,13 +17,13 @@ def pytest_generate_tests(metafunc: pytest.Metafunc):
@pytest.mark.s3_gate
@pytest.mark.s3_gate_versioning
class TestS3GateVersioning:
@allure.title("{s3_client}: Impossible to disable versioning with object_lock")
@allure.title("Impossible to disable versioning with object_lock (s3_client={s3_client})")
def test_s3_version_off(self, s3_client: S3ClientWrapper):
bucket = s3_client.create_bucket(object_lock_enabled_for_bucket=True)
with pytest.raises(Exception):
s3_helper.set_bucket_versioning(s3_client, bucket, VersioningStatus.SUSPENDED)
@allure.title("{s3_client}: Enable and disable versioning without object_lock")
@allure.title("Enable and disable versioning without object_lock (s3_client={s3_client})")
def test_s3_version(self, s3_client: S3ClientWrapper, simple_object_size: ObjectSize):
file_path = generate_file(simple_object_size.value)
file_name = s3_helper.object_key_from_file_path(file_path)

View File

@ -16,12 +16,7 @@ from frostfs_testlib.utils.file_utils import generate_file
@pytest.mark.sanity
@pytest.mark.session_token
class TestDynamicObjectSession(ClusterTestBase):
@allure.title("Object Operations with Session Token for {object_size}")
@pytest.mark.parametrize(
"object_size",
[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
)
@allure.title("Object Operations with Session Token (obj_size={object_size})")
def test_object_session_token(self, default_wallet: str, object_size: ObjectSize):
"""
Test how operations over objects are executed with a session token

View File

@ -41,7 +41,6 @@ from frostfs_testlib.storage.dataclasses.wallet import WalletInfo
from frostfs_testlib.testing.cluster_test_base import ClusterTestBase
from frostfs_testlib.testing.test_control import expect_not_raises
from frostfs_testlib.utils.file_utils import generate_file
from pytest import FixtureRequest
logger = logging.getLogger("NeoLogger")
@ -62,8 +61,6 @@ def storage_containers(
@pytest.fixture(
params=[pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")],
ids=["simple object size", "complex object size"],
# Scope module to upload/delete each files set only once
scope="module",
)
@ -72,10 +69,9 @@ def storage_objects(
client_shell: Shell,
storage_containers: list[str],
cluster: Cluster,
request: FixtureRequest,
object_size: ObjectSize,
) -> list[StorageObjectInfo]:
object_size: ObjectSize = request.param
file_path = generate_file(object_size.value)
storage_objects = []
@ -152,14 +148,15 @@ def static_sessions(
@pytest.mark.static_session
class TestObjectStaticSession(ClusterTestBase):
@allure.title("Read operations with static session: {storage_objects} {verb.value}")
@allure.title(
"Read operations with static session (method={method_under_test.__name__}, obj_size={object_size})"
)
@pytest.mark.parametrize(
"method_under_test,verb",
[
(head_object, ObjectVerb.HEAD),
(get_object, ObjectVerb.GET),
],
ids=["head", "get"],
)
def test_static_session_read(
self,
@ -168,14 +165,10 @@ class TestObjectStaticSession(ClusterTestBase):
static_sessions: dict[ObjectVerb, str],
method_under_test,
verb: ObjectVerb,
request: FixtureRequest,
):
"""
Validate static session with read operations
"""
allure.dynamic.title(
f"Read operation with static session: {request.node.callspec.id.replace('-', ' ')}"
)
for node in self.cluster.storage_nodes:
for storage_object in storage_objects[0:2]:
@ -188,11 +181,12 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[verb],
)
@allure.title("Range operations with static session for: {storage_objects} {verb.value}")
@allure.title(
"Range operations with static session (method={method_under_test.__name__}, obj_size={object_size})"
)
@pytest.mark.parametrize(
"method_under_test,verb",
[(get_range, ObjectVerb.RANGE), (get_range_hash, ObjectVerb.RANGEHASH)],
ids=["range", "rangehash"],
)
def test_static_session_range(
self,
@ -201,15 +195,11 @@ class TestObjectStaticSession(ClusterTestBase):
static_sessions: dict[ObjectVerb, str],
method_under_test,
verb: ObjectVerb,
request: FixtureRequest,
max_object_size,
):
"""
Validate static session with range operations
"""
allure.dynamic.title(
f"Range operation with static session: {request.node.callspec.id.replace('-', ' ')}"
)
storage_object = storage_objects[0]
ranges_to_test = get_ranges(
storage_object, max_object_size, self.shell, self.cluster.default_rpc_endpoint
@ -228,18 +218,16 @@ class TestObjectStaticSession(ClusterTestBase):
range_cut=range_to_test,
)
@allure.title("Search operation with static session for {storage_objects}")
@allure.title("Search operation with static session (obj_size={object_size})")
def test_static_session_search(
self,
user_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Validate static session with search operations
"""
allure.dynamic.title(f"Search operation with static session for {request.node.callspec.id}")
cid = storage_objects[0].cid
expected_object_ids = [storage_object.oid for storage_object in storage_objects[0:2]]
@ -253,20 +241,18 @@ class TestObjectStaticSession(ClusterTestBase):
)
assert sorted(expected_object_ids) == sorted(actual_object_ids)
@allure.title("Static session with object id not in session for {storage_objects}")
@allure.title(
"[NEGATIVE] Static session with object id not in session (obj_size={object_size})"
)
def test_static_session_unrelated_object(
self,
user_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Validate static session with object id not in session
"""
allure.dynamic.title(
f"Static session with object id not in session for {request.node.callspec.id}"
)
with pytest.raises(Exception, match=UNRELATED_OBJECT):
head_object(
user_wallet.path,
@ -277,20 +263,16 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[ObjectVerb.HEAD],
)
@allure.title("Static session with user id not in session for {storage_objects}")
@allure.title("[NEGATIVE] Static session with user id not in session (obj_size={object_size})")
def test_static_session_head_unrelated_user(
self,
stranger_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Validate static session with user id not in session
"""
allure.dynamic.title(
f"Static session with user id not in session for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
with pytest.raises(Exception, match=UNRELATED_KEY):
@ -303,20 +285,16 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[ObjectVerb.HEAD],
)
@allure.title("Static session with wrong verb in session for {storage_objects}")
@allure.title("[NEGATIVE] Static session with wrong verb in session (obj_size={object_size})")
def test_static_session_head_wrong_verb(
self,
user_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Validate static session with wrong verb in session
"""
allure.dynamic.title(
f"Static session with wrong verb in session for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
with pytest.raises(Exception, match=WRONG_VERB):
@ -329,21 +307,19 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[ObjectVerb.HEAD],
)
@allure.title("Static session with container id not in session for {storage_objects}")
@allure.title(
"[NEGATIVE] Static session with container id not in session (obj_size={object_size})"
)
def test_static_session_unrelated_container(
self,
user_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
storage_containers: list[str],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Validate static session with container id not in session
"""
allure.dynamic.title(
f"Static session with container id not in session for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
with pytest.raises(Exception, match=UNRELATED_CONTAINER):
@ -356,7 +332,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[ObjectVerb.GET],
)
@allure.title("Static session which signed by another wallet for {storage_objects}")
@allure.title("[NEGATIVE] Static session signed by another wallet (obj_size={object_size})")
def test_static_session_signed_by_other(
self,
owner_wallet: WalletInfo,
@ -365,14 +341,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which signed by another wallet
"""
allure.dynamic.title(
f"Static session which signed by another wallet for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
session_token_file = generate_object_session_token(
@ -394,7 +366,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=signed_token_file,
)
@allure.title("Static session which signed for another container for {storage_objects}")
@allure.title("[NEGATIVE] Static session for another container (obj_size={object_size})")
def test_static_session_signed_for_other_container(
self,
owner_wallet: WalletInfo,
@ -402,14 +374,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which signed for another container
"""
allure.dynamic.title(
f"Static session which signed for another container for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
container = storage_containers[1]
@ -432,7 +400,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=signed_token_file,
)
@allure.title("Static session which wasn't signed for {storage_objects}")
@allure.title("[NEGATIVE] Static session without sign (obj_size={object_size})")
def test_static_session_without_sign(
self,
owner_wallet: WalletInfo,
@ -440,12 +408,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which wasn't signed
"""
allure.dynamic.title(f"Static session which wasn't signed for {request.node.callspec.id}")
storage_object = storage_objects[0]
session_token_file = generate_object_session_token(
@ -466,7 +432,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=session_token_file,
)
@allure.title("Static session which expires at next epoch for {storage_objects}")
@allure.title("Static session which expires at next epoch (obj_size={object_size})")
def test_static_session_expiration_at_next(
self,
owner_wallet: WalletInfo,
@ -474,14 +440,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which expires at next epoch
"""
allure.dynamic.title(
f"Static session which expires at next epoch for {request.node.callspec.id}"
)
epoch = ensure_fresh_epoch(self.shell, self.cluster)
container = storage_containers[0]
@ -537,7 +499,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=token_expire_at_next_epoch,
)
@allure.title("Static session which is valid starting from next epoch for {storage_objects}")
@allure.title("Static session which is valid since next epoch (obj_size={object_size})")
def test_static_session_start_at_next(
self,
owner_wallet: WalletInfo,
@ -545,14 +507,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which is valid starting from next epoch
"""
allure.dynamic.title(
f"Static session which is valid starting from next epoch for {request.node.callspec.id}"
)
epoch = ensure_fresh_epoch(self.shell, self.cluster)
container = storage_containers[0]
@ -622,7 +580,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=token_start_at_next_epoch,
)
@allure.title("Static session which is already expired for {storage_objects}")
@allure.title("[NEGATIVE] Expired static session (obj_size={object_size})")
def test_static_session_already_expired(
self,
owner_wallet: WalletInfo,
@ -630,14 +588,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which is already expired
"""
allure.dynamic.title(
f"Static session which is already expired for {request.node.callspec.id}"
)
epoch = ensure_fresh_epoch(self.shell, self.cluster)
container = storage_containers[0]
@ -665,20 +619,16 @@ class TestObjectStaticSession(ClusterTestBase):
session=token_already_expired,
)
@allure.title("Delete verb should be restricted for static session for {storage_objects}")
@allure.title("Delete verb is restricted for static session (obj_size={object_size})")
def test_static_session_delete_verb(
self,
user_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Delete verb should be restricted for static session
"""
allure.dynamic.title(
f"Delete verb should be restricted for static session for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED):
delete_object(
@ -690,20 +640,16 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[ObjectVerb.DELETE],
)
@allure.title("Put verb should be restricted for static session for {storage_objects}")
@allure.title("Put verb is restricted for static session (obj_size={object_size})")
def test_static_session_put_verb(
self,
user_wallet: WalletInfo,
storage_objects: list[StorageObjectInfo],
static_sessions: dict[ObjectVerb, str],
request: FixtureRequest,
):
"""
Put verb should be restricted for static session
"""
allure.dynamic.title(
f"Put verb should be restricted for static session for {request.node.callspec.id}"
)
storage_object = storage_objects[0]
with pytest.raises(Exception, match=OBJECT_ACCESS_DENIED):
put_object_to_random_node(
@ -715,7 +661,7 @@ class TestObjectStaticSession(ClusterTestBase):
session=static_sessions[ObjectVerb.PUT],
)
@allure.title("Static session which is issued in future epoch for {storage_objects}")
@allure.title("[NEGATIVE] Static session is issued in future epoch (obj_size={object_size})")
def test_static_session_invalid_issued_epoch(
self,
owner_wallet: WalletInfo,
@ -723,14 +669,10 @@ class TestObjectStaticSession(ClusterTestBase):
storage_containers: list[str],
storage_objects: list[StorageObjectInfo],
temp_directory: str,
request: FixtureRequest,
):
"""
Validate static session which is issued in future epoch
"""
allure.dynamic.title(
f"Static session which is issued in future epoch for {request.node.callspec.id}"
)
epoch = ensure_fresh_epoch(self.shell, self.cluster)
container = storage_containers[0]