Some checks failed
DCO action / DCO (pull_request) Has been cancelled
Signed-off-by: Kirill Sosnovskikh <k.sosnovskikh@yadro.com>
211 lines
8.6 KiB
Python
211 lines
8.6 KiB
Python
import logging
|
|
import os
|
|
from datetime import datetime, timedelta
|
|
from typing import Optional
|
|
|
|
from dateutil.parser import parse
|
|
|
|
from frostfs_testlib import reporter
|
|
from frostfs_testlib.s3 import S3ClientWrapper, VersioningStatus
|
|
from frostfs_testlib.s3.interfaces import BucketContainerResolver
|
|
from frostfs_testlib.shell import Shell
|
|
from frostfs_testlib.steps.cli.container import search_nodes_with_container
|
|
from frostfs_testlib.storage.cluster import Cluster, ClusterNode
|
|
from frostfs_testlib.storage.dataclasses.wallet import WalletInfo
|
|
from frostfs_testlib.utils.file_utils import TestFile, get_file_hash
|
|
|
|
logger = logging.getLogger("NeoLogger")
|
|
|
|
|
|
@reporter.step("Expected all objects are presented in the bucket")
|
|
def check_objects_in_bucket(
|
|
s3_client: S3ClientWrapper,
|
|
bucket: str,
|
|
expected_objects: list,
|
|
unexpected_objects: Optional[list] = None,
|
|
) -> None:
|
|
unexpected_objects = unexpected_objects or []
|
|
bucket_objects = s3_client.list_objects(bucket)
|
|
assert len(bucket_objects) == len(expected_objects), f"Expected {len(expected_objects)} objects in the bucket"
|
|
for bucket_object in expected_objects:
|
|
assert bucket_object in bucket_objects, f"Expected object {bucket_object} in objects list {bucket_objects}"
|
|
|
|
for bucket_object in unexpected_objects:
|
|
assert bucket_object not in bucket_objects, f"Expected object {bucket_object} not in objects list {bucket_objects}"
|
|
|
|
|
|
@reporter.step("Try to get object and got error")
|
|
def try_to_get_objects_and_expect_error(s3_client: S3ClientWrapper, bucket: str, object_keys: list) -> None:
|
|
for obj in object_keys:
|
|
try:
|
|
s3_client.get_object(bucket, obj)
|
|
raise AssertionError(f"Object {obj} found in bucket {bucket}")
|
|
except Exception as err:
|
|
assert "The specified key does not exist" in str(err), f"Expected error in exception {err}"
|
|
|
|
|
|
@reporter.step("Set versioning status to '{status}' for bucket '{bucket}'")
|
|
def set_bucket_versioning(s3_client: S3ClientWrapper, bucket: str, status: VersioningStatus):
|
|
if status == VersioningStatus.UNDEFINED:
|
|
return
|
|
|
|
s3_client.put_bucket_versioning(bucket, status=status)
|
|
bucket_status = s3_client.get_bucket_versioning_status(bucket)
|
|
assert bucket_status == status.value, f"Expected {bucket_status} status. Got {status.value}"
|
|
|
|
|
|
def object_key_from_file_path(full_path: str) -> str:
|
|
return os.path.basename(full_path)
|
|
|
|
|
|
def assert_tags(actual_tags: list, expected_tags: Optional[list] = None, unexpected_tags: Optional[list] = None) -> None:
|
|
expected_tags = [{"Key": key, "Value": value} for key, value in expected_tags] if expected_tags else []
|
|
unexpected_tags = [{"Key": key, "Value": value} for key, value in unexpected_tags] if unexpected_tags else []
|
|
if expected_tags == []:
|
|
assert not actual_tags, f"Expected there is no tags, got {actual_tags}"
|
|
assert len(expected_tags) == len(actual_tags)
|
|
for tag in expected_tags:
|
|
assert tag in actual_tags, f"Tag {tag} must be in {actual_tags}"
|
|
for tag in unexpected_tags:
|
|
assert tag not in actual_tags, f"Tag {tag} should not be in {actual_tags}"
|
|
|
|
|
|
@reporter.step("Expected all tags are presented in object")
|
|
def check_tags_by_object(
|
|
s3_client: S3ClientWrapper,
|
|
bucket: str,
|
|
key: str,
|
|
expected_tags: list,
|
|
unexpected_tags: Optional[list] = None,
|
|
) -> None:
|
|
actual_tags = s3_client.get_object_tagging(bucket, key)
|
|
assert_tags(expected_tags=expected_tags, unexpected_tags=unexpected_tags, actual_tags=actual_tags)
|
|
|
|
|
|
@reporter.step("Expected all tags are presented in bucket")
|
|
def check_tags_by_bucket(
|
|
s3_client: S3ClientWrapper,
|
|
bucket: str,
|
|
expected_tags: list,
|
|
unexpected_tags: Optional[list] = None,
|
|
) -> None:
|
|
actual_tags = s3_client.get_bucket_tagging(bucket)
|
|
assert_tags(expected_tags=expected_tags, unexpected_tags=unexpected_tags, actual_tags=actual_tags)
|
|
|
|
|
|
def assert_object_lock_mode(
|
|
s3_client: S3ClientWrapper,
|
|
bucket: str,
|
|
file_name: str,
|
|
object_lock_mode: str,
|
|
retain_until_date: datetime,
|
|
legal_hold_status: str = "OFF",
|
|
retain_period: Optional[int] = None,
|
|
):
|
|
object_dict = s3_client.get_object(bucket, file_name, full_output=True)
|
|
assert object_dict.get("ObjectLockMode") == object_lock_mode, f"Expected Object Lock Mode is {object_lock_mode}"
|
|
assert (
|
|
object_dict.get("ObjectLockLegalHoldStatus") == legal_hold_status
|
|
), f"Expected Object Lock Legal Hold Status is {legal_hold_status}"
|
|
object_retain_date = object_dict.get("ObjectLockRetainUntilDate")
|
|
retain_date = parse(object_retain_date) if isinstance(object_retain_date, str) else object_retain_date
|
|
if retain_until_date:
|
|
assert retain_date.strftime("%Y-%m-%dT%H:%M:%S") == retain_until_date.strftime(
|
|
"%Y-%m-%dT%H:%M:%S"
|
|
), f'Expected Object Lock Retain Until Date is {str(retain_until_date.strftime("%Y-%m-%dT%H:%M:%S"))}'
|
|
elif retain_period:
|
|
last_modify_date = object_dict.get("LastModified")
|
|
last_modify = parse(last_modify_date) if isinstance(last_modify_date, str) else last_modify_date
|
|
assert (
|
|
retain_date - last_modify + timedelta(seconds=1)
|
|
).days == retain_period, f"Expected retention period is {retain_period} days"
|
|
|
|
|
|
def _format_grants_as_strings(grants: list[dict]) -> list:
|
|
grantee_format = "{g_type}::{uri}:{permission}"
|
|
return set(
|
|
[
|
|
grantee_format.format(
|
|
g_type=grant.get("Grantee", {}).get("Type", ""),
|
|
uri=grant.get("Grantee", {}).get("URI", ""),
|
|
permission=grant.get("Permission", ""),
|
|
)
|
|
for grant in grants
|
|
]
|
|
)
|
|
|
|
|
|
@reporter.step("Verify ACL permissions")
|
|
def verify_acl_permissions(actual_acl_grants: list[dict], expected_acl_grants: list[dict], strict: bool = True):
|
|
actual_grants = _format_grants_as_strings(actual_acl_grants)
|
|
expected_grants = _format_grants_as_strings(expected_acl_grants)
|
|
|
|
assert expected_grants <= actual_grants, "Permissions mismatch"
|
|
if strict:
|
|
assert expected_grants == actual_grants, "Extra permissions found, must not be there"
|
|
|
|
|
|
@reporter.step("Delete bucket with all objects")
|
|
def delete_bucket_with_objects(s3_client: S3ClientWrapper, bucket: str):
|
|
versioning_status = s3_client.get_bucket_versioning_status(bucket)
|
|
if versioning_status == VersioningStatus.ENABLED.value:
|
|
# From versioned bucket we should delete all versions and delete markers of all objects
|
|
objects_versions = s3_client.list_objects_versions(bucket)
|
|
if objects_versions:
|
|
s3_client.delete_object_versions_without_dm(bucket, objects_versions)
|
|
objects_delete_markers = s3_client.list_delete_markers(bucket)
|
|
if objects_delete_markers:
|
|
s3_client.delete_object_versions_without_dm(bucket, objects_delete_markers)
|
|
|
|
else:
|
|
# From non-versioned bucket it's sufficient to delete objects by key
|
|
objects = s3_client.list_objects(bucket)
|
|
if objects:
|
|
s3_client.delete_objects(bucket, objects)
|
|
objects_delete_markers = s3_client.list_delete_markers(bucket)
|
|
if objects_delete_markers:
|
|
s3_client.delete_object_versions_without_dm(bucket, objects_delete_markers)
|
|
|
|
# Delete the bucket itself
|
|
s3_client.delete_bucket(bucket)
|
|
|
|
|
|
@reporter.step("Search nodes bucket")
|
|
def search_nodes_with_bucket(
|
|
cluster: Cluster,
|
|
bucket_name: str,
|
|
wallet: WalletInfo,
|
|
shell: Shell,
|
|
endpoint: str,
|
|
bucket_container_resolver: BucketContainerResolver,
|
|
) -> list[ClusterNode]:
|
|
cid = None
|
|
for cluster_node in cluster.cluster_nodes:
|
|
cid = bucket_container_resolver.resolve(cluster_node, bucket_name)
|
|
if cid:
|
|
break
|
|
nodes_list = search_nodes_with_container(wallet=wallet, cid=cid, shell=shell, endpoint=endpoint, cluster=cluster)
|
|
return nodes_list
|
|
|
|
|
|
def get_bytes_relative_to_object(value: int | str, object_size: int = None, part_size: int = None) -> int:
|
|
if isinstance(value, int):
|
|
return value
|
|
|
|
if "part" not in value and "object" not in value:
|
|
return int(value)
|
|
|
|
if object_size is not None:
|
|
value = value.replace("object", str(object_size))
|
|
|
|
if part_size is not None:
|
|
value = value.replace("part", str(part_size))
|
|
|
|
return int(eval(value))
|
|
|
|
|
|
def get_range_relative_to_object(rng: str, object_size: int = None, part_size: int = None, int_values: bool = False) -> str | int:
|
|
start, end = rng.split(":")
|
|
start = get_bytes_relative_to_object(start, object_size, part_size)
|
|
end = get_bytes_relative_to_object(end, object_size, part_size)
|
|
return (start, end) if int_values else f"bytes {start}-{end}/*"
|