diff --git a/.gitignore b/.gitignore index e2967ea..4691fe4 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ venv.* /dist /build *.egg-info +wallet_config.yml \ No newline at end of file diff --git a/Makefile b/Makefile index 365e2fc..644eab0 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ paths: @echo Virtual environment: ${current_dir}/${VENV_DIR} @rm -rf ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth @touch ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth - @echo ${current_dir}/src/frostfs_testlib | tee ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth + @echo ${current_dir}/src | tee ${VENV_DIR}/lib/python${PYTHON_VERSION}/site-packages/_paths.pth create: ${VENV_DIR} diff --git a/pyproject.toml b/pyproject.toml index 8fca533..f85b883 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,4 +68,5 @@ push = false [tool.pytest.ini_options] filterwarnings = [ "ignore:Blowfish has been deprecated:cryptography.utils.CryptographyDeprecationWarning", -] \ No newline at end of file +] +testpaths = ["tests"] \ No newline at end of file diff --git a/src/frostfs_testlib/load/load_config.py b/src/frostfs_testlib/load/load_config.py index 9a7e49c..4e0b71f 100644 --- a/src/frostfs_testlib/load/load_config.py +++ b/src/frostfs_testlib/load/load_config.py @@ -4,6 +4,8 @@ from enum import Enum from types import MappingProxyType from typing import Any, Optional, get_args +from frostfs_testlib.utils.converting_utils import calc_unit + class LoadType(Enum): gRPC = "grpc" @@ -45,6 +47,7 @@ s3_preset_scenarios = [LoadScenario.S3, LoadScenario.S3_CAR] @dataclass class MetaField: + name: str metadata: MappingProxyType value: Any @@ -53,6 +56,7 @@ def metadata_field( applicable_scenarios: list[LoadScenario], preset_param: Optional[str] = None, scenario_variable: Optional[str] = None, + string_repr: Optional[bool] = True, distributed: Optional[bool] = False, ): return field( @@ -61,6 +65,7 @@ def metadata_field( "applicable_scenarios": applicable_scenarios, "preset_argument": preset_param, "env_variable": scenario_variable, + "string_repr": string_repr, "distributed": distributed, }, ) @@ -100,25 +105,27 @@ class K6ProcessAllocationStrategy(Enum): class Preset: # ------ COMMON ------ # Amount of objects which should be created - objects_count: Optional[int] = metadata_field(all_load_scenarios, "preload_obj", None) + objects_count: Optional[int] = metadata_field(all_load_scenarios, "preload_obj", None, False) # Preset json. Filled automatically. - pregen_json: Optional[str] = metadata_field(all_load_scenarios, "out", "PREGEN_JSON") + pregen_json: Optional[str] = metadata_field(all_load_scenarios, "out", "PREGEN_JSON", False) # Workers count for preset - workers: Optional[int] = metadata_field(all_load_scenarios, "workers", None) + workers: Optional[int] = metadata_field(all_load_scenarios, "workers", None, False) # ------ GRPC ------ # Amount of containers which should be created - containers_count: Optional[int] = metadata_field(grpc_preset_scenarios, "containers", None) + containers_count: Optional[int] = metadata_field( + grpc_preset_scenarios, "containers", None, False + ) # Container placement policy for containers for gRPC container_placement_policy: Optional[str] = metadata_field( - grpc_preset_scenarios, "policy", None + grpc_preset_scenarios, "policy", None, False ) # ------ S3 ------ # Amount of buckets which should be created - buckets_count: Optional[int] = metadata_field(s3_preset_scenarios, "buckets", None) + buckets_count: Optional[int] = metadata_field(s3_preset_scenarios, "buckets", None, False) # S3 region (AKA placement policy for S3 buckets) - s3_location: Optional[str] = metadata_field(s3_preset_scenarios, "location", None) + s3_location: Optional[str] = metadata_field(s3_preset_scenarios, "location", None, False) @dataclass @@ -155,88 +162,93 @@ class LoadParams: [LoadScenario.S3, LoadScenario.S3_CAR, LoadScenario.VERIFY, LoadScenario.HTTP], "no-verify-ssl", "NO_VERIFY_SSL", + False, ) # ------- COMMON SCENARIO PARAMS ------- # Load time is the maximum duration for k6 to give load. Default is the BACKGROUND_LOAD_DEFAULT_TIME value. - load_time: Optional[int] = metadata_field(all_load_scenarios, None, "DURATION") + load_time: Optional[int] = metadata_field(all_load_scenarios, None, "DURATION", False) # Object size in KB for load and preset. - object_size: Optional[int] = metadata_field(all_load_scenarios, "size", "WRITE_OBJ_SIZE") + object_size: Optional[int] = metadata_field(all_load_scenarios, "size", "WRITE_OBJ_SIZE", False) # Output registry K6 file. Filled automatically. - registry_file: Optional[str] = metadata_field(all_scenarios, None, "REGISTRY_FILE") + registry_file: Optional[str] = metadata_field(all_scenarios, None, "REGISTRY_FILE", False) # Specifies the minimum duration of every single execution (i.e. iteration). # Any iterations that are shorter than this value will cause that VU to # sleep for the remainder of the time until the specified minimum duration is reached. min_iteration_duration: Optional[str] = metadata_field( - all_load_scenarios, None, "K6_MIN_ITERATION_DURATION" + all_load_scenarios, None, "K6_MIN_ITERATION_DURATION", False ) # Specifies K6 setupTimeout time. Currently hardcoded in xk6 as 5 seconds for all scenarios # https://k6.io/docs/using-k6/k6-options/reference/#setup-timeout - setup_timeout: Optional[str] = metadata_field(all_scenarios, None, "K6_SETUP_TIMEOUT") + setup_timeout: Optional[str] = metadata_field(all_scenarios, None, "K6_SETUP_TIMEOUT", False) # ------- CONSTANT VUS SCENARIO PARAMS ------- # Amount of Writers VU. - writers: Optional[int] = metadata_field(constant_vus_scenarios, None, "WRITERS", True) + writers: Optional[int] = metadata_field(constant_vus_scenarios, None, "WRITERS", True, True) # Amount of Readers VU. - readers: Optional[int] = metadata_field(constant_vus_scenarios, None, "READERS", True) + readers: Optional[int] = metadata_field(constant_vus_scenarios, None, "READERS", True, True) # Amount of Deleters VU. - deleters: Optional[int] = metadata_field(constant_vus_scenarios, None, "DELETERS", True) + deleters: Optional[int] = metadata_field(constant_vus_scenarios, None, "DELETERS", True, True) # ------- CONSTANT ARRIVAL RATE SCENARIO PARAMS ------- # Number of iterations to start during each timeUnit period for write. write_rate: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "WRITE_RATE", True + constant_arrival_rate_scenarios, None, "WRITE_RATE", True, True ) # Number of iterations to start during each timeUnit period for read. read_rate: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "READ_RATE", True + constant_arrival_rate_scenarios, None, "READ_RATE", True, True ) # Number of iterations to start during each timeUnit period for delete. delete_rate: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "DELETE_RATE", True + constant_arrival_rate_scenarios, None, "DELETE_RATE", True, True ) # Amount of preAllocatedVUs for write operations. preallocated_writers: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "PRE_ALLOC_WRITERS", True + constant_arrival_rate_scenarios, None, "PRE_ALLOC_WRITERS", True, True ) # Amount of maxVUs for write operations. max_writers: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "MAX_WRITERS", True + constant_arrival_rate_scenarios, None, "MAX_WRITERS", False, True ) # Amount of preAllocatedVUs for read operations. preallocated_readers: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "PRE_ALLOC_READERS", True + constant_arrival_rate_scenarios, None, "PRE_ALLOC_READERS", True, True ) # Amount of maxVUs for read operations. max_readers: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "MAX_READERS", True + constant_arrival_rate_scenarios, None, "MAX_READERS", False, True ) # Amount of preAllocatedVUs for read operations. preallocated_deleters: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "PRE_ALLOC_DELETERS", True + constant_arrival_rate_scenarios, None, "PRE_ALLOC_DELETERS", True, True ) # Amount of maxVUs for delete operations. max_deleters: Optional[int] = metadata_field( - constant_arrival_rate_scenarios, None, "MAX_DELETERS", True + constant_arrival_rate_scenarios, None, "MAX_DELETERS", False, True ) # Period of time to apply the rate value. - time_unit: Optional[str] = metadata_field(constant_arrival_rate_scenarios, None, "TIME_UNIT") + time_unit: Optional[str] = metadata_field( + constant_arrival_rate_scenarios, None, "TIME_UNIT", False + ) # ------- VERIFY SCENARIO PARAMS ------- # Maximum verification time for k6 to verify objects. Default is BACKGROUND_LOAD_MAX_VERIFY_TIME (3600). - verify_time: Optional[int] = metadata_field([LoadScenario.VERIFY], None, "TIME_LIMIT") + verify_time: Optional[int] = metadata_field([LoadScenario.VERIFY], None, "TIME_LIMIT", False) # Amount of Verification VU. - verify_clients: Optional[int] = metadata_field([LoadScenario.VERIFY], None, "CLIENTS", True) + verify_clients: Optional[int] = metadata_field( + [LoadScenario.VERIFY], None, "CLIENTS", True, False + ) # ------- LOCAL SCENARIO PARAMS ------- # Config file location (filled automatically) - config_file: Optional[str] = metadata_field([LoadScenario.LOCAL], None, "CONFIG_FILE") + config_file: Optional[str] = metadata_field([LoadScenario.LOCAL], None, "CONFIG_FILE", False) def set_id(self, load_id): self.load_id = load_id @@ -267,6 +279,15 @@ class LoadParams: return command_args + def _get_applicable_fields(self): + applicable_fields = [ + meta_field + for meta_field in self._get_meta_fields(self) + if self.scenario in meta_field.metadata["applicable_scenarios"] and meta_field.value + ] + + return applicable_fields + @staticmethod def _get_preset_argument(meta_field: MetaField) -> str: if isinstance(meta_field.value, bool): @@ -280,7 +301,7 @@ class LoadParams: data_fields = fields(instance) fields_with_data = [ - MetaField(field.metadata, getattr(instance, field.name)) + MetaField(field.name, field.metadata, getattr(instance, field.name)) for field in data_fields if field.metadata and getattr(instance, field.name) is not None ] @@ -293,3 +314,18 @@ class LoadParams: fields_with_data += LoadParams._get_meta_fields(getattr(instance, field.name)) return fields_with_data or [] + + def __str__(self) -> str: + size, unit = calc_unit(self.object_size, 1) + static_params = [f"{self.scenario.value} ({size:.4g} {unit})"] + dynamic_params = [ + f"{meta_field.name}={meta_field.value}" + for meta_field in self._get_applicable_fields() + if meta_field.metadata["string_repr"] + ] + params = ", ".join(static_params + dynamic_params) + + return f"load: {params}" + + def __repr__(self) -> str: + return self.__str__() diff --git a/src/frostfs_testlib/load/load_report.py b/src/frostfs_testlib/load/load_report.py index fa71069..e1056b7 100644 --- a/src/frostfs_testlib/load/load_report.py +++ b/src/frostfs_testlib/load/load_report.py @@ -1,10 +1,11 @@ from datetime import datetime -from typing import Optional, Tuple +from typing import Optional import yaml from frostfs_testlib.load.load_config import K6ProcessAllocationStrategy, LoadParams, LoadScenario from frostfs_testlib.load.load_metrics import get_metrics_object +from frostfs_testlib.utils.converting_utils import calc_unit class LoadReport: @@ -62,17 +63,6 @@ class LoadReport: return html - def _calc_unit(self, value: float, skip_units: int = 0) -> Tuple[float, str]: - units = ["B", "KiB", "MiB", "GiB", "TiB"] - - for unit in units[skip_units:]: - if value < 1024: - return value, unit - - value = value / 1024.0 - - return value, unit - def _seconds_to_formatted_duration(self, seconds: int) -> str: """Converts N number of seconds to formatted output ignoring zeroes. Examples: @@ -122,7 +112,7 @@ class LoadReport: ): throughput_html = "" if throughput > 0: - throughput, unit = self._calc_unit(throughput) + throughput, unit = calc_unit(throughput) throughput_html = self._row("Throughput", f"{throughput:.2f} {unit}/sec") per_node_errors_html = "" @@ -137,7 +127,7 @@ class LoadReport: ): per_node_errors_html += self._row(f"At {node_key}", errors) - object_size, object_size_unit = self._calc_unit(self.load_params.object_size, 1) + object_size, object_size_unit = calc_unit(self.load_params.object_size, 1) duration = self._seconds_to_formatted_duration(self.load_params.load_time) model = self._get_model_string() # write 8KB 15h49m 50op/sec 50th open model/closed model/min_iteration duration=1s - 1.636MB/s 199.57451/s diff --git a/src/frostfs_testlib/s3/aws_cli_client.py b/src/frostfs_testlib/s3/aws_cli_client.py index a9aeb37..2e61679 100644 --- a/src/frostfs_testlib/s3/aws_cli_client.py +++ b/src/frostfs_testlib/s3/aws_cli_client.py @@ -24,6 +24,8 @@ LONG_TIMEOUT = 240 class AwsCliClient(S3ClientWrapper): + __repr_name__: str = "AWS CLI" + # Flags that we use for all S3 commands: disable SSL verification (as we use self-signed # certificate in devenv) and disable automatic pagination in CLI output common_flags = "--no-verify-ssl --no-paginate" diff --git a/src/frostfs_testlib/s3/boto3_client.py b/src/frostfs_testlib/s3/boto3_client.py index 6d6fc74..2251efe 100644 --- a/src/frostfs_testlib/s3/boto3_client.py +++ b/src/frostfs_testlib/s3/boto3_client.py @@ -44,6 +44,8 @@ def report_error(func): class Boto3ClientWrapper(S3ClientWrapper): + __repr_name__: str = "Boto3 client" + @reporter.step_deco("Configure S3 client (boto3)") @report_error def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None: diff --git a/src/frostfs_testlib/s3/interfaces.py b/src/frostfs_testlib/s3/interfaces.py index 3f31395..166abff 100644 --- a/src/frostfs_testlib/s3/interfaces.py +++ b/src/frostfs_testlib/s3/interfaces.py @@ -1,8 +1,10 @@ -from abc import ABC, abstractmethod +from abc import abstractmethod from datetime import datetime from enum import Enum from typing import Literal, Optional, Union +from frostfs_testlib.testing.readable import HumanReadableABC + def _make_objs_dict(key_names): objs_list = [] @@ -29,7 +31,7 @@ ACL_COPY = [ ] -class S3ClientWrapper(ABC): +class S3ClientWrapper(HumanReadableABC): @abstractmethod def __init__(self, access_key_id: str, secret_access_key: str, s3gate_endpoint: str) -> None: pass diff --git a/src/frostfs_testlib/storage/dataclasses/node_base.py b/src/frostfs_testlib/storage/dataclasses/node_base.py index 150b963..9748bc2 100644 --- a/src/frostfs_testlib/storage/dataclasses/node_base.py +++ b/src/frostfs_testlib/storage/dataclasses/node_base.py @@ -7,11 +7,12 @@ import yaml from frostfs_testlib.hosting.config import ServiceConfig from frostfs_testlib.hosting.interfaces import Host from frostfs_testlib.storage.constants import ConfigAttributes +from frostfs_testlib.testing.readable import HumanReadableABC from frostfs_testlib.utils import wallet_utils @dataclass -class NodeBase(ABC): +class NodeBase(HumanReadableABC): """ Represents a node of some underlying service """ diff --git a/src/frostfs_testlib/storage/dataclasses/object_size.py b/src/frostfs_testlib/storage/dataclasses/object_size.py new file mode 100644 index 0000000..520bdc3 --- /dev/null +++ b/src/frostfs_testlib/storage/dataclasses/object_size.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass + + +@dataclass +class ObjectSize: + name: str + value: int + + def __str__(self) -> str: + return f"{self.name} object size" + + def __repr__(self) -> str: + return self.__str__() diff --git a/src/frostfs_testlib/testing/readable.py b/src/frostfs_testlib/testing/readable.py new file mode 100644 index 0000000..66384b7 --- /dev/null +++ b/src/frostfs_testlib/testing/readable.py @@ -0,0 +1,27 @@ +from abc import ABCMeta + + +class HumanReadableABCMeta(ABCMeta): + def __str__(cls): + if "__repr_name__" in cls.__dict__: + return cls.__dict__["__repr_name__"] + return cls.__name__ + + def __repr__(cls): + if "__repr_name__" in cls.__dict__: + return cls.__dict__["__repr_name__"] + return cls.__name__ + + +class HumanReadableABC(metaclass=HumanReadableABCMeta): + @classmethod + def __str__(cls): + if "__repr_name__" in cls.__dict__: + return cls.__dict__["__repr_name__"] + return type(cls).__name__ + + @classmethod + def __repr__(cls): + if "__repr_name__" in cls.__dict__: + return cls.__dict__["__repr_name__"] + return type(cls).__name__ diff --git a/src/frostfs_testlib/utils/converting_utils.py b/src/frostfs_testlib/utils/converting_utils.py index 24b77ae..273d9b4 100644 --- a/src/frostfs_testlib/utils/converting_utils.py +++ b/src/frostfs_testlib/utils/converting_utils.py @@ -1,10 +1,23 @@ import base64 import binascii import json +from typing import Tuple import base58 +def calc_unit(value: float, skip_units: int = 0) -> Tuple[float, str]: + units = ["B", "KiB", "MiB", "GiB", "TiB"] + + for unit in units[skip_units:]: + if value < 1024: + return value, unit + + value = value / 1024.0 + + return value, unit + + def str_to_ascii_hex(input: str) -> str: b = binascii.hexlify(input.encode()) return str(b)[2:-1] diff --git a/tests/test_dataclasses.py b/tests/test_dataclasses.py new file mode 100644 index 0000000..11cda7a --- /dev/null +++ b/tests/test_dataclasses.py @@ -0,0 +1,37 @@ +from typing import Any + +import pytest + +from frostfs_testlib.s3 import AwsCliClient, Boto3ClientWrapper +from frostfs_testlib.storage.dataclasses.frostfs_services import ( + HTTPGate, + InnerRing, + MorphChain, + S3Gate, + StorageNode, +) +from frostfs_testlib.storage.dataclasses.object_size import ObjectSize + + +class TestDataclassesStr: + """Here we are testing important classes string representation.""" + + @pytest.mark.parametrize( + "obj, expected", + [ + (Boto3ClientWrapper, "Boto3 client"), + (AwsCliClient, "AWS CLI"), + (ObjectSize("simple", 1), "simple object size"), + (ObjectSize("simple", 10), "simple object size"), + (ObjectSize("complex", 5000), "complex object size"), + (ObjectSize("complex", 5555), "complex object size"), + (StorageNode, "StorageNode"), + (MorphChain, "MorphChain"), + (S3Gate, "S3Gate"), + (HTTPGate, "HTTPGate"), + (InnerRing, "InnerRing"), + ], + ) + def test_classes_string_representation(self, obj: Any, expected: str): + assert f"{obj}" == expected + assert repr(obj) == expected diff --git a/tests/test_load_config.py b/tests/test_load_config.py index a9b6de1..89a10ea 100644 --- a/tests/test_load_config.py +++ b/tests/test_load_config.py @@ -46,6 +46,20 @@ class TestLoadConfig: preset = Preset() self._check_all_values_none(preset) + @pytest.mark.parametrize("load_params", [LoadScenario.S3_CAR], indirect=True) + def test_string_representation_s3_car(self, load_params: LoadParams): + load_params.object_size = 524288 + expected = "load: s3_car (512 MiB), write_rate=10, read_rate=9, delete_rate=11, preallocated_writers=20, preallocated_readers=20, preallocated_deleters=21" + assert f"{load_params}" == expected + assert repr(load_params) == expected + + @pytest.mark.parametrize("load_params", [LoadScenario.gRPC], indirect=True) + def test_string_representation_grpc(self, load_params: LoadParams): + load_params.object_size = 512 + expected = "load: grpc (512 KiB), writers=7, readers=7, deleters=8" + assert f"{load_params}" == expected + assert repr(load_params) == expected + def test_load_set_id_changes_fields(self): load_params = LoadParams(load_type=LoadType.S3) load_params.preset = Preset()