Add repr and str for most classes used in parametrize
Signed-off-by: Andrey Berezin <a.berezin@yadro.com>
This commit is contained in:
parent
38742badf2
commit
2240be09d2
14 changed files with 187 additions and 48 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -11,3 +11,4 @@ venv.*
|
||||||
/dist
|
/dist
|
||||||
/build
|
/build
|
||||||
*.egg-info
|
*.egg-info
|
||||||
|
wallet_config.yml
|
2
Makefile
2
Makefile
|
@ -19,7 +19,7 @@ paths:
|
||||||
@echo Virtual environment: ${current_dir}/${VENV_DIR}
|
@echo Virtual environment: ${current_dir}/${VENV_DIR}
|
||||||
@rm -rf ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth
|
@rm -rf ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth
|
||||||
@touch ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth
|
@touch ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth
|
||||||
@echo ${current_dir}/src/frostfs_testlib | tee ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth
|
@echo ${current_dir}/src | tee ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth
|
||||||
|
|
||||||
create: ${VENV_DIR}
|
create: ${VENV_DIR}
|
||||||
|
|
||||||
|
|
|
@ -69,3 +69,4 @@ push = false
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"ignore:Blowfish has been deprecated:cryptography.utils.CryptographyDeprecationWarning",
|
"ignore:Blowfish has been deprecated:cryptography.utils.CryptographyDeprecationWarning",
|
||||||
]
|
]
|
||||||
|
testpaths = ["tests"]
|
|
@ -4,6 +4,8 @@ from enum import Enum
|
||||||
from types import MappingProxyType
|
from types import MappingProxyType
|
||||||
from typing import Any, Optional, get_args
|
from typing import Any, Optional, get_args
|
||||||
|
|
||||||
|
from frostfs_testlib.utils.converting_utils import calc_unit
|
||||||
|
|
||||||
|
|
||||||
class LoadType(Enum):
|
class LoadType(Enum):
|
||||||
gRPC = "grpc"
|
gRPC = "grpc"
|
||||||
|
@ -45,6 +47,7 @@ s3_preset_scenarios = [LoadScenario.S3, LoadScenario.S3_CAR]
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class MetaField:
|
class MetaField:
|
||||||
|
name: str
|
||||||
metadata: MappingProxyType
|
metadata: MappingProxyType
|
||||||
value: Any
|
value: Any
|
||||||
|
|
||||||
|
@ -53,6 +56,7 @@ def metadata_field(
|
||||||
applicable_scenarios: list[LoadScenario],
|
applicable_scenarios: list[LoadScenario],
|
||||||
preset_param: Optional[str] = None,
|
preset_param: Optional[str] = None,
|
||||||
scenario_variable: Optional[str] = None,
|
scenario_variable: Optional[str] = None,
|
||||||
|
string_repr: Optional[bool] = True,
|
||||||
distributed: Optional[bool] = False,
|
distributed: Optional[bool] = False,
|
||||||
):
|
):
|
||||||
return field(
|
return field(
|
||||||
|
@ -61,6 +65,7 @@ def metadata_field(
|
||||||
"applicable_scenarios": applicable_scenarios,
|
"applicable_scenarios": applicable_scenarios,
|
||||||
"preset_argument": preset_param,
|
"preset_argument": preset_param,
|
||||||
"env_variable": scenario_variable,
|
"env_variable": scenario_variable,
|
||||||
|
"string_repr": string_repr,
|
||||||
"distributed": distributed,
|
"distributed": distributed,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -100,25 +105,27 @@ class K6ProcessAllocationStrategy(Enum):
|
||||||
class Preset:
|
class Preset:
|
||||||
# ------ COMMON ------
|
# ------ COMMON ------
|
||||||
# Amount of objects which should be created
|
# Amount of objects which should be created
|
||||||
objects_count: Optional[int] = metadata_field(all_load_scenarios, "preload_obj", None)
|
objects_count: Optional[int] = metadata_field(all_load_scenarios, "preload_obj", None, False)
|
||||||
# Preset json. Filled automatically.
|
# Preset json. Filled automatically.
|
||||||
pregen_json: Optional[str] = metadata_field(all_load_scenarios, "out", "PREGEN_JSON")
|
pregen_json: Optional[str] = metadata_field(all_load_scenarios, "out", "PREGEN_JSON", False)
|
||||||
# Workers count for preset
|
# Workers count for preset
|
||||||
workers: Optional[int] = metadata_field(all_load_scenarios, "workers", None)
|
workers: Optional[int] = metadata_field(all_load_scenarios, "workers", None, False)
|
||||||
|
|
||||||
# ------ GRPC ------
|
# ------ GRPC ------
|
||||||
# Amount of containers which should be created
|
# Amount of containers which should be created
|
||||||
containers_count: Optional[int] = metadata_field(grpc_preset_scenarios, "containers", None)
|
containers_count: Optional[int] = metadata_field(
|
||||||
|
grpc_preset_scenarios, "containers", None, False
|
||||||
|
)
|
||||||
# Container placement policy for containers for gRPC
|
# Container placement policy for containers for gRPC
|
||||||
container_placement_policy: Optional[str] = metadata_field(
|
container_placement_policy: Optional[str] = metadata_field(
|
||||||
grpc_preset_scenarios, "policy", None
|
grpc_preset_scenarios, "policy", None, False
|
||||||
)
|
)
|
||||||
|
|
||||||
# ------ S3 ------
|
# ------ S3 ------
|
||||||
# Amount of buckets which should be created
|
# Amount of buckets which should be created
|
||||||
buckets_count: Optional[int] = metadata_field(s3_preset_scenarios, "buckets", None)
|
buckets_count: Optional[int] = metadata_field(s3_preset_scenarios, "buckets", None, False)
|
||||||
# S3 region (AKA placement policy for S3 buckets)
|
# S3 region (AKA placement policy for S3 buckets)
|
||||||
s3_location: Optional[str] = metadata_field(s3_preset_scenarios, "location", None)
|
s3_location: Optional[str] = metadata_field(s3_preset_scenarios, "location", None, False)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -155,88 +162,93 @@ class LoadParams:
|
||||||
[LoadScenario.S3, LoadScenario.S3_CAR, LoadScenario.VERIFY, LoadScenario.HTTP],
|
[LoadScenario.S3, LoadScenario.S3_CAR, LoadScenario.VERIFY, LoadScenario.HTTP],
|
||||||
"no-verify-ssl",
|
"no-verify-ssl",
|
||||||
"NO_VERIFY_SSL",
|
"NO_VERIFY_SSL",
|
||||||
|
False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# ------- COMMON SCENARIO PARAMS -------
|
# ------- COMMON SCENARIO PARAMS -------
|
||||||
# Load time is the maximum duration for k6 to give load. Default is the BACKGROUND_LOAD_DEFAULT_TIME value.
|
# Load time is the maximum duration for k6 to give load. Default is the BACKGROUND_LOAD_DEFAULT_TIME value.
|
||||||
load_time: Optional[int] = metadata_field(all_load_scenarios, None, "DURATION")
|
load_time: Optional[int] = metadata_field(all_load_scenarios, None, "DURATION", False)
|
||||||
# Object size in KB for load and preset.
|
# Object size in KB for load and preset.
|
||||||
object_size: Optional[int] = metadata_field(all_load_scenarios, "size", "WRITE_OBJ_SIZE")
|
object_size: Optional[int] = metadata_field(all_load_scenarios, "size", "WRITE_OBJ_SIZE", False)
|
||||||
# Output registry K6 file. Filled automatically.
|
# Output registry K6 file. Filled automatically.
|
||||||
registry_file: Optional[str] = metadata_field(all_scenarios, None, "REGISTRY_FILE")
|
registry_file: Optional[str] = metadata_field(all_scenarios, None, "REGISTRY_FILE", False)
|
||||||
# Specifies the minimum duration of every single execution (i.e. iteration).
|
# Specifies the minimum duration of every single execution (i.e. iteration).
|
||||||
# Any iterations that are shorter than this value will cause that VU to
|
# Any iterations that are shorter than this value will cause that VU to
|
||||||
# sleep for the remainder of the time until the specified minimum duration is reached.
|
# sleep for the remainder of the time until the specified minimum duration is reached.
|
||||||
min_iteration_duration: Optional[str] = metadata_field(
|
min_iteration_duration: Optional[str] = metadata_field(
|
||||||
all_load_scenarios, None, "K6_MIN_ITERATION_DURATION"
|
all_load_scenarios, None, "K6_MIN_ITERATION_DURATION", False
|
||||||
)
|
)
|
||||||
# Specifies K6 setupTimeout time. Currently hardcoded in xk6 as 5 seconds for all scenarios
|
# Specifies K6 setupTimeout time. Currently hardcoded in xk6 as 5 seconds for all scenarios
|
||||||
# https://k6.io/docs/using-k6/k6-options/reference/#setup-timeout
|
# https://k6.io/docs/using-k6/k6-options/reference/#setup-timeout
|
||||||
setup_timeout: Optional[str] = metadata_field(all_scenarios, None, "K6_SETUP_TIMEOUT")
|
setup_timeout: Optional[str] = metadata_field(all_scenarios, None, "K6_SETUP_TIMEOUT", False)
|
||||||
|
|
||||||
# ------- CONSTANT VUS SCENARIO PARAMS -------
|
# ------- CONSTANT VUS SCENARIO PARAMS -------
|
||||||
# Amount of Writers VU.
|
# Amount of Writers VU.
|
||||||
writers: Optional[int] = metadata_field(constant_vus_scenarios, None, "WRITERS", True)
|
writers: Optional[int] = metadata_field(constant_vus_scenarios, None, "WRITERS", True, True)
|
||||||
# Amount of Readers VU.
|
# Amount of Readers VU.
|
||||||
readers: Optional[int] = metadata_field(constant_vus_scenarios, None, "READERS", True)
|
readers: Optional[int] = metadata_field(constant_vus_scenarios, None, "READERS", True, True)
|
||||||
# Amount of Deleters VU.
|
# Amount of Deleters VU.
|
||||||
deleters: Optional[int] = metadata_field(constant_vus_scenarios, None, "DELETERS", True)
|
deleters: Optional[int] = metadata_field(constant_vus_scenarios, None, "DELETERS", True, True)
|
||||||
|
|
||||||
# ------- CONSTANT ARRIVAL RATE SCENARIO PARAMS -------
|
# ------- CONSTANT ARRIVAL RATE SCENARIO PARAMS -------
|
||||||
# Number of iterations to start during each timeUnit period for write.
|
# Number of iterations to start during each timeUnit period for write.
|
||||||
write_rate: Optional[int] = metadata_field(
|
write_rate: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "WRITE_RATE", True
|
constant_arrival_rate_scenarios, None, "WRITE_RATE", True, True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Number of iterations to start during each timeUnit period for read.
|
# Number of iterations to start during each timeUnit period for read.
|
||||||
read_rate: Optional[int] = metadata_field(
|
read_rate: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "READ_RATE", True
|
constant_arrival_rate_scenarios, None, "READ_RATE", True, True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Number of iterations to start during each timeUnit period for delete.
|
# Number of iterations to start during each timeUnit period for delete.
|
||||||
delete_rate: Optional[int] = metadata_field(
|
delete_rate: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "DELETE_RATE", True
|
constant_arrival_rate_scenarios, None, "DELETE_RATE", True, True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Amount of preAllocatedVUs for write operations.
|
# Amount of preAllocatedVUs for write operations.
|
||||||
preallocated_writers: Optional[int] = metadata_field(
|
preallocated_writers: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "PRE_ALLOC_WRITERS", True
|
constant_arrival_rate_scenarios, None, "PRE_ALLOC_WRITERS", True, True
|
||||||
)
|
)
|
||||||
# Amount of maxVUs for write operations.
|
# Amount of maxVUs for write operations.
|
||||||
max_writers: Optional[int] = metadata_field(
|
max_writers: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "MAX_WRITERS", True
|
constant_arrival_rate_scenarios, None, "MAX_WRITERS", False, True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Amount of preAllocatedVUs for read operations.
|
# Amount of preAllocatedVUs for read operations.
|
||||||
preallocated_readers: Optional[int] = metadata_field(
|
preallocated_readers: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "PRE_ALLOC_READERS", True
|
constant_arrival_rate_scenarios, None, "PRE_ALLOC_READERS", True, True
|
||||||
)
|
)
|
||||||
# Amount of maxVUs for read operations.
|
# Amount of maxVUs for read operations.
|
||||||
max_readers: Optional[int] = metadata_field(
|
max_readers: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "MAX_READERS", True
|
constant_arrival_rate_scenarios, None, "MAX_READERS", False, True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Amount of preAllocatedVUs for read operations.
|
# Amount of preAllocatedVUs for read operations.
|
||||||
preallocated_deleters: Optional[int] = metadata_field(
|
preallocated_deleters: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "PRE_ALLOC_DELETERS", True
|
constant_arrival_rate_scenarios, None, "PRE_ALLOC_DELETERS", True, True
|
||||||
)
|
)
|
||||||
# Amount of maxVUs for delete operations.
|
# Amount of maxVUs for delete operations.
|
||||||
max_deleters: Optional[int] = metadata_field(
|
max_deleters: Optional[int] = metadata_field(
|
||||||
constant_arrival_rate_scenarios, None, "MAX_DELETERS", True
|
constant_arrival_rate_scenarios, None, "MAX_DELETERS", False, True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Period of time to apply the rate value.
|
# Period of time to apply the rate value.
|
||||||
time_unit: Optional[str] = metadata_field(constant_arrival_rate_scenarios, None, "TIME_UNIT")
|
time_unit: Optional[str] = metadata_field(
|
||||||
|
constant_arrival_rate_scenarios, None, "TIME_UNIT", False
|
||||||
|
)
|
||||||
|
|
||||||
# ------- VERIFY SCENARIO PARAMS -------
|
# ------- VERIFY SCENARIO PARAMS -------
|
||||||
# Maximum verification time for k6 to verify objects. Default is BACKGROUND_LOAD_MAX_VERIFY_TIME (3600).
|
# Maximum verification time for k6 to verify objects. Default is BACKGROUND_LOAD_MAX_VERIFY_TIME (3600).
|
||||||
verify_time: Optional[int] = metadata_field([LoadScenario.VERIFY], None, "TIME_LIMIT")
|
verify_time: Optional[int] = metadata_field([LoadScenario.VERIFY], None, "TIME_LIMIT", False)
|
||||||
# Amount of Verification VU.
|
# Amount of Verification VU.
|
||||||
verify_clients: Optional[int] = metadata_field([LoadScenario.VERIFY], None, "CLIENTS", True)
|
verify_clients: Optional[int] = metadata_field(
|
||||||
|
[LoadScenario.VERIFY], None, "CLIENTS", True, False
|
||||||
|
)
|
||||||
|
|
||||||
# ------- LOCAL SCENARIO PARAMS -------
|
# ------- LOCAL SCENARIO PARAMS -------
|
||||||
# Config file location (filled automatically)
|
# Config file location (filled automatically)
|
||||||
config_file: Optional[str] = metadata_field([LoadScenario.LOCAL], None, "CONFIG_FILE")
|
config_file: Optional[str] = metadata_field([LoadScenario.LOCAL], None, "CONFIG_FILE", False)
|
||||||
|
|
||||||
def set_id(self, load_id):
|
def set_id(self, load_id):
|
||||||
self.load_id = load_id
|
self.load_id = load_id
|
||||||
|
@ -267,6 +279,15 @@ class LoadParams:
|
||||||
|
|
||||||
return command_args
|
return command_args
|
||||||
|
|
||||||
|
def _get_applicable_fields(self):
|
||||||
|
applicable_fields = [
|
||||||
|
meta_field
|
||||||
|
for meta_field in self._get_meta_fields(self)
|
||||||
|
if self.scenario in meta_field.metadata["applicable_scenarios"] and meta_field.value
|
||||||
|
]
|
||||||
|
|
||||||
|
return applicable_fields
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_preset_argument(meta_field: MetaField) -> str:
|
def _get_preset_argument(meta_field: MetaField) -> str:
|
||||||
if isinstance(meta_field.value, bool):
|
if isinstance(meta_field.value, bool):
|
||||||
|
@ -280,7 +301,7 @@ class LoadParams:
|
||||||
data_fields = fields(instance)
|
data_fields = fields(instance)
|
||||||
|
|
||||||
fields_with_data = [
|
fields_with_data = [
|
||||||
MetaField(field.metadata, getattr(instance, field.name))
|
MetaField(field.name, field.metadata, getattr(instance, field.name))
|
||||||
for field in data_fields
|
for field in data_fields
|
||||||
if field.metadata and getattr(instance, field.name) is not None
|
if field.metadata and getattr(instance, field.name) is not None
|
||||||
]
|
]
|
||||||
|
@ -293,3 +314,18 @@ class LoadParams:
|
||||||
fields_with_data += LoadParams._get_meta_fields(getattr(instance, field.name))
|
fields_with_data += LoadParams._get_meta_fields(getattr(instance, field.name))
|
||||||
|
|
||||||
return fields_with_data or []
|
return fields_with_data or []
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
size, unit = calc_unit(self.object_size, 1)
|
||||||
|
static_params = [f"{self.scenario.value} ({size:.4g} {unit})"]
|
||||||
|
dynamic_params = [
|
||||||
|
f"{meta_field.name}={meta_field.value}"
|
||||||
|
for meta_field in self._get_applicable_fields()
|
||||||
|
if meta_field.metadata["string_repr"]
|
||||||
|
]
|
||||||
|
params = ", ".join(static_params + dynamic_params)
|
||||||
|
|
||||||
|
return f"load: {params}"
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return self.__str__()
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional, Tuple
|
from typing import Optional
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from frostfs_testlib.load.load_config import K6ProcessAllocationStrategy, LoadParams, LoadScenario
|
from frostfs_testlib.load.load_config import K6ProcessAllocationStrategy, LoadParams, LoadScenario
|
||||||
from frostfs_testlib.load.load_metrics import get_metrics_object
|
from frostfs_testlib.load.load_metrics import get_metrics_object
|
||||||
|
from frostfs_testlib.utils.converting_utils import calc_unit
|
||||||
|
|
||||||
|
|
||||||
class LoadReport:
|
class LoadReport:
|
||||||
|
@ -62,17 +63,6 @@ class LoadReport:
|
||||||
|
|
||||||
return html
|
return html
|
||||||
|
|
||||||
def _calc_unit(self, value: float, skip_units: int = 0) -> Tuple[float, str]:
|
|
||||||
units = ["B", "KiB", "MiB", "GiB", "TiB"]
|
|
||||||
|
|
||||||
for unit in units[skip_units:]:
|
|
||||||
if value < 1024:
|
|
||||||
return value, unit
|
|
||||||
|
|
||||||
value = value / 1024.0
|
|
||||||
|
|
||||||
return value, unit
|
|
||||||
|
|
||||||
def _seconds_to_formatted_duration(self, seconds: int) -> str:
|
def _seconds_to_formatted_duration(self, seconds: int) -> str:
|
||||||
"""Converts N number of seconds to formatted output ignoring zeroes.
|
"""Converts N number of seconds to formatted output ignoring zeroes.
|
||||||
Examples:
|
Examples:
|
||||||
|
@ -122,7 +112,7 @@ class LoadReport:
|
||||||
):
|
):
|
||||||
throughput_html = ""
|
throughput_html = ""
|
||||||
if throughput > 0:
|
if throughput > 0:
|
||||||
throughput, unit = self._calc_unit(throughput)
|
throughput, unit = calc_unit(throughput)
|
||||||
throughput_html = self._row("Throughput", f"{throughput:.2f} {unit}/sec")
|
throughput_html = self._row("Throughput", f"{throughput:.2f} {unit}/sec")
|
||||||
|
|
||||||
per_node_errors_html = ""
|
per_node_errors_html = ""
|
||||||
|
@ -137,7 +127,7 @@ class LoadReport:
|
||||||
):
|
):
|
||||||
per_node_errors_html += self._row(f"At {node_key}", errors)
|
per_node_errors_html += self._row(f"At {node_key}", errors)
|
||||||
|
|
||||||
object_size, object_size_unit = self._calc_unit(self.load_params.object_size, 1)
|
object_size, object_size_unit = calc_unit(self.load_params.object_size, 1)
|
||||||
duration = self._seconds_to_formatted_duration(self.load_params.load_time)
|
duration = self._seconds_to_formatted_duration(self.load_params.load_time)
|
||||||
model = self._get_model_string()
|
model = self._get_model_string()
|
||||||
# write 8KB 15h49m 50op/sec 50th open model/closed model/min_iteration duration=1s - 1.636MB/s 199.57451/s
|
# write 8KB 15h49m 50op/sec 50th open model/closed model/min_iteration duration=1s - 1.636MB/s 199.57451/s
|
||||||
|
|
|
@ -24,6 +24,8 @@ LONG_TIMEOUT = 240
|
||||||
|
|
||||||
|
|
||||||
class AwsCliClient(S3ClientWrapper):
|
class AwsCliClient(S3ClientWrapper):
|
||||||
|
__repr_name__: str = "AWS CLI"
|
||||||
|
|
||||||
# Flags that we use for all S3 commands: disable SSL verification (as we use self-signed
|
# Flags that we use for all S3 commands: disable SSL verification (as we use self-signed
|
||||||
# certificate in devenv) and disable automatic pagination in CLI output
|
# certificate in devenv) and disable automatic pagination in CLI output
|
||||||
common_flags = "--no-verify-ssl --no-paginate"
|
common_flags = "--no-verify-ssl --no-paginate"
|
||||||
|
|
|
@ -44,6 +44,8 @@ def report_error(func):
|
||||||
|
|
||||||
|
|
||||||
class Boto3ClientWrapper(S3ClientWrapper):
|
class Boto3ClientWrapper(S3ClientWrapper):
|
||||||
|
__repr_name__: str = "Boto3 client"
|
||||||
|
|
||||||
@reporter.step_deco("Configure S3 client (boto3)")
|
@reporter.step_deco("Configure S3 client (boto3)")
|
||||||
@report_error
|
@report_error
|
||||||
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
|
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
from abc import ABC, abstractmethod
|
from abc import abstractmethod
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Literal, Optional, Union
|
from typing import Literal, Optional, Union
|
||||||
|
|
||||||
|
from frostfs_testlib.testing.readable import HumanReadableABC
|
||||||
|
|
||||||
|
|
||||||
def _make_objs_dict(key_names):
|
def _make_objs_dict(key_names):
|
||||||
objs_list = []
|
objs_list = []
|
||||||
|
@ -29,7 +31,7 @@ ACL_COPY = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class S3ClientWrapper(ABC):
|
class S3ClientWrapper(HumanReadableABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
|
def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -7,11 +7,12 @@ import yaml
|
||||||
from frostfs_testlib.hosting.config import ServiceConfig
|
from frostfs_testlib.hosting.config import ServiceConfig
|
||||||
from frostfs_testlib.hosting.interfaces import Host
|
from frostfs_testlib.hosting.interfaces import Host
|
||||||
from frostfs_testlib.storage.constants import ConfigAttributes
|
from frostfs_testlib.storage.constants import ConfigAttributes
|
||||||
|
from frostfs_testlib.testing.readable import HumanReadableABC
|
||||||
from frostfs_testlib.utils import wallet_utils
|
from frostfs_testlib.utils import wallet_utils
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class NodeBase(ABC):
|
class NodeBase(HumanReadableABC):
|
||||||
"""
|
"""
|
||||||
Represents a node of some underlying service
|
Represents a node of some underlying service
|
||||||
"""
|
"""
|
||||||
|
|
13
src/frostfs_testlib/storage/dataclasses/object_size.py
Normal file
13
src/frostfs_testlib/storage/dataclasses/object_size.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ObjectSize:
|
||||||
|
name: str
|
||||||
|
value: int
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.name} object size"
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return self.__str__()
|
27
src/frostfs_testlib/testing/readable.py
Normal file
27
src/frostfs_testlib/testing/readable.py
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
from abc import ABCMeta
|
||||||
|
|
||||||
|
|
||||||
|
class HumanReadableABCMeta(ABCMeta):
|
||||||
|
def __str__(cls):
|
||||||
|
if "__repr_name__" in cls.__dict__:
|
||||||
|
return cls.__dict__["__repr_name__"]
|
||||||
|
return cls.__name__
|
||||||
|
|
||||||
|
def __repr__(cls):
|
||||||
|
if "__repr_name__" in cls.__dict__:
|
||||||
|
return cls.__dict__["__repr_name__"]
|
||||||
|
return cls.__name__
|
||||||
|
|
||||||
|
|
||||||
|
class HumanReadableABC(metaclass=HumanReadableABCMeta):
|
||||||
|
@classmethod
|
||||||
|
def __str__(cls):
|
||||||
|
if "__repr_name__" in cls.__dict__:
|
||||||
|
return cls.__dict__["__repr_name__"]
|
||||||
|
return type(cls).__name__
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __repr__(cls):
|
||||||
|
if "__repr_name__" in cls.__dict__:
|
||||||
|
return cls.__dict__["__repr_name__"]
|
||||||
|
return type(cls).__name__
|
|
@ -1,10 +1,23 @@
|
||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
import json
|
import json
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
import base58
|
import base58
|
||||||
|
|
||||||
|
|
||||||
|
def calc_unit(value: float, skip_units: int = 0) -> Tuple[float, str]:
|
||||||
|
units = ["B", "KiB", "MiB", "GiB", "TiB"]
|
||||||
|
|
||||||
|
for unit in units[skip_units:]:
|
||||||
|
if value < 1024:
|
||||||
|
return value, unit
|
||||||
|
|
||||||
|
value = value / 1024.0
|
||||||
|
|
||||||
|
return value, unit
|
||||||
|
|
||||||
|
|
||||||
def str_to_ascii_hex(input: str) -> str:
|
def str_to_ascii_hex(input: str) -> str:
|
||||||
b = binascii.hexlify(input.encode())
|
b = binascii.hexlify(input.encode())
|
||||||
return str(b)[2:-1]
|
return str(b)[2:-1]
|
||||||
|
|
37
tests/test_dataclasses.py
Normal file
37
tests/test_dataclasses.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from frostfs_testlib.s3 import AwsCliClient, Boto3ClientWrapper
|
||||||
|
from frostfs_testlib.storage.dataclasses.frostfs_services import (
|
||||||
|
HTTPGate,
|
||||||
|
InnerRing,
|
||||||
|
MorphChain,
|
||||||
|
S3Gate,
|
||||||
|
StorageNode,
|
||||||
|
)
|
||||||
|
from frostfs_testlib.storage.dataclasses.object_size import ObjectSize
|
||||||
|
|
||||||
|
|
||||||
|
class TestDataclassesStr:
|
||||||
|
"""Here we are testing important classes string representation."""
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"obj, expected",
|
||||||
|
[
|
||||||
|
(Boto3ClientWrapper, "Boto3 client"),
|
||||||
|
(AwsCliClient, "AWS CLI"),
|
||||||
|
(ObjectSize("simple", 1), "simple object size"),
|
||||||
|
(ObjectSize("simple", 10), "simple object size"),
|
||||||
|
(ObjectSize("complex", 5000), "complex object size"),
|
||||||
|
(ObjectSize("complex", 5555), "complex object size"),
|
||||||
|
(StorageNode, "StorageNode"),
|
||||||
|
(MorphChain, "MorphChain"),
|
||||||
|
(S3Gate, "S3Gate"),
|
||||||
|
(HTTPGate, "HTTPGate"),
|
||||||
|
(InnerRing, "InnerRing"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_classes_string_representation(self, obj: Any, expected: str):
|
||||||
|
assert f"{obj}" == expected
|
||||||
|
assert repr(obj) == expected
|
|
@ -46,6 +46,20 @@ class TestLoadConfig:
|
||||||
preset = Preset()
|
preset = Preset()
|
||||||
self._check_all_values_none(preset)
|
self._check_all_values_none(preset)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("load_params", [LoadScenario.S3_CAR], indirect=True)
|
||||||
|
def test_string_representation_s3_car(self, load_params: LoadParams):
|
||||||
|
load_params.object_size = 524288
|
||||||
|
expected = "load: s3_car (512 MiB), write_rate=10, read_rate=9, delete_rate=11, preallocated_writers=20, preallocated_readers=20, preallocated_deleters=21"
|
||||||
|
assert f"{load_params}" == expected
|
||||||
|
assert repr(load_params) == expected
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("load_params", [LoadScenario.gRPC], indirect=True)
|
||||||
|
def test_string_representation_grpc(self, load_params: LoadParams):
|
||||||
|
load_params.object_size = 512
|
||||||
|
expected = "load: grpc (512 KiB), writers=7, readers=7, deleters=8"
|
||||||
|
assert f"{load_params}" == expected
|
||||||
|
assert repr(load_params) == expected
|
||||||
|
|
||||||
def test_load_set_id_changes_fields(self):
|
def test_load_set_id_changes_fields(self):
|
||||||
load_params = LoadParams(load_type=LoadType.S3)
|
load_params = LoadParams(load_type=LoadType.S3)
|
||||||
load_params.preset = Preset()
|
load_params.preset = Preset()
|
||||||
|
|
Loading…
Reference in a new issue