forked from TrueCloudLab/frostfs-testlib
Compare commits
4 commits
c9e4c2c7bb
...
779775994e
Author | SHA1 | Date | |
---|---|---|---|
779775994e | |||
376499a7e8 | |||
f4460194bc | |||
3a4204f2e4 |
12 changed files with 51 additions and 16 deletions
|
@ -110,7 +110,7 @@ class FrostfsAdmMorph(CliCommand):
|
|||
**{param: param_value for param, param_value in locals().items() if param not in ["self"]},
|
||||
)
|
||||
|
||||
def dump_hashes(self, rpc_endpoint: str) -> CommandResult:
|
||||
def dump_hashes(self, rpc_endpoint: str, domain: Optional[str] = None) -> CommandResult:
|
||||
"""Dump deployed contract hashes.
|
||||
|
||||
Args:
|
||||
|
|
|
@ -15,6 +15,8 @@ class NetmapParser:
|
|||
"epoch_duration": r"Epoch duration: (?P<epoch_duration>\d+)",
|
||||
"inner_ring_candidate_fee": r"Inner Ring candidate fee: (?P<inner_ring_candidate_fee>\d+)",
|
||||
"maximum_object_size": r"Maximum object size: (?P<maximum_object_size>\d+)",
|
||||
"maximum_count_of_data_shards": r"Maximum count of data shards: (?P<maximum_count_of_data_shards>\d+)",
|
||||
"maximum_count_of_parity_shards": r"Maximum count of parity shards: (?P<maximum_count_of_parity_shards>\d+)",
|
||||
"withdrawal_fee": r"Withdrawal fee: (?P<withdrawal_fee>\d+)",
|
||||
"homomorphic_hashing_disabled": r"Homomorphic hashing disabled: (?P<homomorphic_hashing_disabled>true|false)",
|
||||
"maintenance_mode_allowed": r"Maintenance mode allowed: (?P<maintenance_mode_allowed>true|false)",
|
||||
|
|
|
@ -240,6 +240,7 @@ class DockerHost(Host):
|
|||
until: Optional[datetime] = None,
|
||||
unit: Optional[str] = None,
|
||||
exclude_filter: Optional[str] = None,
|
||||
priority: Optional[str] = None
|
||||
) -> str:
|
||||
client = self._get_docker_client()
|
||||
filtered_logs = ""
|
||||
|
|
|
@ -297,6 +297,7 @@ class Host(ABC):
|
|||
until: Optional[datetime] = None,
|
||||
unit: Optional[str] = None,
|
||||
exclude_filter: Optional[str] = None,
|
||||
priority: Optional[str] = None
|
||||
) -> str:
|
||||
"""Get logs from host filtered by regex.
|
||||
|
||||
|
@ -305,6 +306,8 @@ class Host(ABC):
|
|||
since: If set, limits the time from which logs should be collected. Must be in UTC.
|
||||
until: If set, limits the time until which logs should be collected. Must be in UTC.
|
||||
unit: required unit.
|
||||
priority: logs level, 0 - emergency, 7 - debug. All messages with that code and higher.
|
||||
For example, if we specify the -p 2 option, journalctl will show all messages with levels 2, 1 and 0.
|
||||
|
||||
Returns:
|
||||
Found entries as str if any found.
|
||||
|
|
|
@ -4,6 +4,6 @@ ALL_USERS_GROUP_READ_GRANT = {"Grantee": {"Type": "Group", "URI": ALL_USERS_GROU
|
|||
CANONICAL_USER_FULL_CONTROL_GRANT = {"Grantee": {"Type": "CanonicalUser"}, "Permission": "FULL_CONTROL"}
|
||||
|
||||
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl
|
||||
PRIVATE_GRANTS = [CANONICAL_USER_FULL_CONTROL_GRANT]
|
||||
PUBLIC_READ_GRANTS = [CANONICAL_USER_FULL_CONTROL_GRANT, ALL_USERS_GROUP_READ_GRANT]
|
||||
PUBLIC_READ_WRITE_GRANTS = [CANONICAL_USER_FULL_CONTROL_GRANT, ALL_USERS_GROUP_WRITE_GRANT, ALL_USERS_GROUP_READ_GRANT]
|
||||
PRIVATE_GRANTS = []
|
||||
PUBLIC_READ_GRANTS = [ALL_USERS_GROUP_READ_GRANT]
|
||||
PUBLIC_READ_WRITE_GRANTS = [ALL_USERS_GROUP_WRITE_GRANT, ALL_USERS_GROUP_READ_GRANT]
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from time import sleep
|
||||
from typing import Literal, Optional, Union
|
||||
|
@ -11,6 +10,7 @@ from frostfs_testlib.resources.common import ASSETS_DIR, MAX_REQUEST_ATTEMPTS, R
|
|||
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict
|
||||
from frostfs_testlib.shell import CommandOptions
|
||||
from frostfs_testlib.shell.local_shell import LocalShell
|
||||
from frostfs_testlib.utils import string_utils
|
||||
|
||||
# TODO: Refactor this code to use shell instead of _cmd_run
|
||||
from frostfs_testlib.utils.cli_utils import _configure_aws_cli
|
||||
|
@ -68,7 +68,7 @@ class AwsCliClient(S3ClientWrapper):
|
|||
location_constraint: Optional[str] = None,
|
||||
) -> str:
|
||||
if bucket is None:
|
||||
bucket = str(uuid.uuid4())
|
||||
bucket = string_utils.unique_name("bucket-")
|
||||
|
||||
if object_lock_enabled_for_bucket is None:
|
||||
object_lock = ""
|
||||
|
@ -229,7 +229,7 @@ class AwsCliClient(S3ClientWrapper):
|
|||
if bucket is None:
|
||||
bucket = source_bucket
|
||||
if key is None:
|
||||
key = os.path.join(os.getcwd(), str(uuid.uuid4()))
|
||||
key = string_utils.unique_name("copy-object-")
|
||||
copy_source = f"{source_bucket}/{source_key}"
|
||||
|
||||
cmd = (
|
||||
|
@ -315,7 +315,7 @@ class AwsCliClient(S3ClientWrapper):
|
|||
object_range: Optional[tuple[int, int]] = None,
|
||||
full_output: bool = False,
|
||||
) -> dict | TestFile:
|
||||
test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, str(uuid.uuid4())))
|
||||
test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, string_utils.unique_name("dl-object-")))
|
||||
version = f" --version-id {version_id}" if version_id else ""
|
||||
cmd = (
|
||||
f"aws {self.common_flags} s3api get-object --bucket {bucket} --key {key} "
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from functools import wraps
|
||||
from time import sleep
|
||||
|
@ -16,6 +15,7 @@ from mypy_boto3_s3 import S3Client
|
|||
from frostfs_testlib import reporter
|
||||
from frostfs_testlib.resources.common import ASSETS_DIR, MAX_REQUEST_ATTEMPTS, RETRY_MODE, S3_SYNC_WAIT_TIME
|
||||
from frostfs_testlib.s3.interfaces import S3ClientWrapper, VersioningStatus, _make_objs_dict
|
||||
from frostfs_testlib.utils import string_utils
|
||||
|
||||
# TODO: Refactor this code to use shell instead of _cmd_run
|
||||
from frostfs_testlib.utils.cli_utils import _configure_aws_cli, log_command_execution
|
||||
|
@ -115,7 +115,7 @@ class Boto3ClientWrapper(S3ClientWrapper):
|
|||
location_constraint: Optional[str] = None,
|
||||
) -> str:
|
||||
if bucket is None:
|
||||
bucket = str(uuid.uuid4())
|
||||
bucket = string_utils.unique_name("bucket-")
|
||||
|
||||
params = {"Bucket": bucket}
|
||||
if object_lock_enabled_for_bucket is not None:
|
||||
|
@ -439,7 +439,7 @@ class Boto3ClientWrapper(S3ClientWrapper):
|
|||
if bucket is None:
|
||||
bucket = source_bucket
|
||||
if key is None:
|
||||
key = os.path.join(os.getcwd(), str(uuid.uuid4()))
|
||||
key = string_utils.unique_name("copy-object-")
|
||||
copy_source = f"{source_bucket}/{source_key}"
|
||||
|
||||
params = {
|
||||
|
@ -476,7 +476,7 @@ class Boto3ClientWrapper(S3ClientWrapper):
|
|||
if full_output:
|
||||
return response
|
||||
|
||||
test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, str(uuid.uuid4())))
|
||||
test_file = TestFile(os.path.join(os.getcwd(), ASSETS_DIR, string_utils.unique_name("dl-object-")))
|
||||
with open(test_file, "wb") as file:
|
||||
chunk = response["Body"].read(1024)
|
||||
while chunk:
|
||||
|
|
|
@ -19,7 +19,7 @@ def check_metrics_counter(
|
|||
counter_act += get_metrics_value(cluster_node, parse_from_command, **metrics_greps)
|
||||
assert eval(
|
||||
f"{counter_act} {operator} {counter_exp}"
|
||||
), f"Expected: {counter_exp}, Actual: {counter_act} in node: {cluster_node}"
|
||||
), f"Expected: {counter_exp} {operator} Actual: {counter_act} in node: {cluster_node}"
|
||||
|
||||
|
||||
@reporter.step("Get metrics value from node: {node}")
|
||||
|
|
|
@ -531,3 +531,11 @@ class ClusterStateController:
|
|||
except Exception as err:
|
||||
logger.warning(f"Host ping fails with error {err}")
|
||||
return HostStatus.ONLINE
|
||||
|
||||
@reporter.step("Get contract by domain - {domain_name}")
|
||||
def get_domain_contracts(self, cluster_node: ClusterNode, domain_name: str):
|
||||
frostfs_adm = FrostfsAdm(
|
||||
shell=cluster_node.host.get_shell(),
|
||||
frostfs_adm_exec_path=FROSTFS_ADM_EXEC,
|
||||
)
|
||||
return frostfs_adm.morph.dump_hashes(cluster_node.morph_chain.get_endpoint(), domain_name).stdout
|
||||
|
|
|
@ -70,6 +70,8 @@ class NodeNetInfo:
|
|||
epoch_duration: str = None
|
||||
inner_ring_candidate_fee: str = None
|
||||
maximum_object_size: str = None
|
||||
maximum_count_of_data_shards: str = None
|
||||
maximum_count_of_parity_shards: str = None
|
||||
withdrawal_fee: str = None
|
||||
homomorphic_hashing_disabled: str = None
|
||||
maintenance_mode_allowed: str = None
|
||||
|
|
|
@ -6,6 +6,7 @@ from typing import Any, Optional
|
|||
|
||||
from frostfs_testlib import reporter
|
||||
from frostfs_testlib.resources.common import ASSETS_DIR
|
||||
from frostfs_testlib.utils import string_utils
|
||||
|
||||
logger = logging.getLogger("NeoLogger")
|
||||
|
||||
|
@ -41,7 +42,9 @@ def ensure_directory_opener(path, flags):
|
|||
return os.open(path, flags)
|
||||
|
||||
|
||||
@reporter.step("Generate file with size {size}")
|
||||
# TODO: Do not add {size} to title yet, since it produces dynamic info in top level steps
|
||||
# Use object_size dt in future as argument
|
||||
@reporter.step("Generate file")
|
||||
def generate_file(size: int) -> TestFile:
|
||||
"""Generates a binary file with the specified size in bytes.
|
||||
|
||||
|
@ -51,7 +54,7 @@ def generate_file(size: int) -> TestFile:
|
|||
Returns:
|
||||
The path to the generated file.
|
||||
"""
|
||||
test_file = TestFile(os.path.join(ASSETS_DIR, str(uuid.uuid4())))
|
||||
test_file = TestFile(os.path.join(ASSETS_DIR, string_utils.unique_name("object-")))
|
||||
with open(test_file, "wb", opener=ensure_directory_opener) as file:
|
||||
file.write(os.urandom(size))
|
||||
logger.info(f"File with size {size} bytes has been generated: {test_file}")
|
||||
|
@ -59,7 +62,9 @@ def generate_file(size: int) -> TestFile:
|
|||
return test_file
|
||||
|
||||
|
||||
@reporter.step("Generate file with content of size {size}")
|
||||
# TODO: Do not add {size} to title yet, since it produces dynamic info in top level steps
|
||||
# Use object_size dt in future as argument
|
||||
@reporter.step("Generate file with content")
|
||||
def generate_file_with_content(
|
||||
size: int,
|
||||
file_path: Optional[str | TestFile] = None,
|
||||
|
|
|
@ -1,12 +1,26 @@
|
|||
import random
|
||||
import re
|
||||
import string
|
||||
from datetime import datetime
|
||||
|
||||
ONLY_ASCII_LETTERS = string.ascii_letters
|
||||
DIGITS_AND_ASCII_LETTERS = string.ascii_letters + string.digits
|
||||
NON_DIGITS_AND_LETTERS = string.punctuation
|
||||
|
||||
|
||||
def unique_name(prefix: str = ""):
|
||||
"""
|
||||
Generate unique short name of anything with prefix.
|
||||
This should be unique in scope of multiple runs
|
||||
|
||||
Args:
|
||||
prefix: prefix for unique name generation
|
||||
Returns:
|
||||
unique name string
|
||||
"""
|
||||
return f"{prefix}{hex(int(datetime.now().timestamp() * 1000000))}"
|
||||
|
||||
|
||||
def random_string(length: int = 5, source: str = ONLY_ASCII_LETTERS):
|
||||
"""
|
||||
Generate random string from source letters list
|
||||
|
|
Loading…
Reference in a new issue