[#350] Cleanup utility keywords

Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
Vladimir Domnich 2022-10-11 08:18:08 +00:00 committed by Vladimir
parent f9d1a4dfae
commit 0aeb998be9
17 changed files with 90 additions and 185 deletions

View file

@ -0,0 +1,67 @@
import hashlib
import logging
import os
import uuid
from typing import Optional
import allure
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
logger = logging.getLogger("NeoLogger")
def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str:
"""Generates a binary file with the specified size in bytes.
Args:
size: Size in bytes, can be declared as 6e+6 for example.
Returns:
The path to the generated file.
"""
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
with open(file_path, "wb") as file:
file.write(os.urandom(size))
logger.info(f"File with size {size} bytes has been generated: {file_path}")
return file_path
@allure.step("Get File Hash")
def get_file_hash(file_path: str, len: Optional[int] = None) -> str:
"""Generates hash for the specified file.
Args:
file_path: Path to the file to generate hash for.
len: How many bytes to read.
Returns:
Hash of the file as hex-encoded string.
"""
file_hash = hashlib.sha256()
with open(file_path, "rb") as out:
if len:
file_hash.update(out.read(len))
else:
file_hash.update(out.read())
return file_hash.hexdigest()
@allure.step("Concatenation set of files to one file")
def concat_files(file_paths: list, resulting_file_path: Optional[str] = None) -> str:
"""Concatenates several files into a single file.
Args:
file_paths: Paths to the files to concatenate.
resulting_file_name: Path to the file where concatenated content should be stored.
Returns:
Path to the resulting file.
"""
if not resulting_file_path:
resulting_file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
with open(resulting_file_path, "wb") as f:
for file in file_paths:
with open(file, "rb") as part_file:
f.write(part_file.read())
return resulting_file_path

View file

@ -11,10 +11,10 @@ from common import (
STORAGE_WALLET_PATH, STORAGE_WALLET_PATH,
WALLET_CONFIG, WALLET_CONFIG,
) )
from file_helper import generate_file
from python_keywords.acl import EACLRole from python_keywords.acl import EACLRole
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.neofs_verbs import put_object from python_keywords.neofs_verbs import put_object
from python_keywords.utility_keywords import generate_file
from wallet import init_wallet from wallet import init_wallet
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL

View file

@ -13,6 +13,7 @@ from common import (
SIMPLE_OBJ_SIZE, SIMPLE_OBJ_SIZE,
) )
from epoch import tick_epoch from epoch import tick_epoch
from file_helper import generate_file
from grpc_responses import OBJECT_ACCESS_DENIED, OBJECT_NOT_FOUND from grpc_responses import OBJECT_ACCESS_DENIED, OBJECT_NOT_FOUND
from python_keywords.acl import ( from python_keywords.acl import (
EACLAccess, EACLAccess,
@ -34,7 +35,6 @@ from python_keywords.storage_group import (
verify_get_storage_group, verify_get_storage_group,
verify_list_storage_group, verify_list_storage_group,
) )
from python_keywords.utility_keywords import generate_file
from wallet import init_wallet from wallet import init_wallet
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")

View file

@ -10,10 +10,10 @@ from common import (
STORAGE_NODE_SSH_USER, STORAGE_NODE_SSH_USER,
) )
from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes
from file_helper import generate_file, get_file_hash
from iptables_helper import IpTablesHelper from iptables_helper import IpTablesHelper
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.neofs_verbs import get_object, put_object from python_keywords.neofs_verbs import get_object, put_object
from python_keywords.utility_keywords import generate_file, get_file_hash
from ssh_helper import HostClient from ssh_helper import HostClient
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL

View file

@ -8,9 +8,9 @@ from common import (
STORAGE_NODE_SSH_USER, STORAGE_NODE_SSH_USER,
) )
from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes
from file_helper import generate_file, get_file_hash
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.neofs_verbs import get_object, put_object from python_keywords.neofs_verbs import get_object, put_object
from python_keywords.utility_keywords import generate_file, get_file_hash
from sbercloud_helper import SberCloud, SberCloudConfig from sbercloud_helper import SberCloud, SberCloudConfig
from ssh_helper import HostClient from ssh_helper import HostClient
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL

View file

@ -15,6 +15,7 @@ from common import (
) )
from data_formatters import get_wallet_public_key from data_formatters import get_wallet_public_key
from epoch import tick_epoch from epoch import tick_epoch
from file_helper import generate_file
from grpc_responses import OBJECT_NOT_FOUND, error_matches_status from grpc_responses import OBJECT_NOT_FOUND, error_matches_status
from python_keywords.container import create_container, get_container from python_keywords.container import create_container, get_container
from python_keywords.failover_utils import wait_object_replication_on_nodes from python_keywords.failover_utils import wait_object_replication_on_nodes
@ -37,7 +38,6 @@ from python_keywords.node_management import (
from service_helper import get_storage_service_helper from service_helper import get_storage_service_helper
from storage_policy import get_nodes_with_object, get_simple_object_copies from storage_policy import get_nodes_with_object, get_simple_object_copies
from utility import parse_time, placement_policy_from_container, wait_for_gc_pass_on_storage_nodes from utility import parse_time, placement_policy_from_container, wait_for_gc_pass_on_storage_nodes
from utility_keywords import generate_file
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")

View file

@ -6,6 +6,7 @@ import pytest
from common import COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE from common import COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
from container import create_container from container import create_container
from epoch import get_epoch, tick_epoch from epoch import get_epoch, tick_epoch
from file_helper import generate_file, get_file_hash
from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status
from python_keywords.neofs_verbs import ( from python_keywords.neofs_verbs import (
delete_object, delete_object,
@ -17,7 +18,6 @@ from python_keywords.neofs_verbs import (
search_object, search_object,
) )
from python_keywords.storage_policy import get_complex_object_copies, get_simple_object_copies from python_keywords.storage_policy import get_complex_object_copies, get_simple_object_copies
from python_keywords.utility_keywords import generate_file, get_file_hash
from tombstone import verify_head_tombstone from tombstone import verify_head_tombstone
from utility import get_file_content, wait_for_gc_pass_on_storage_nodes from utility import get_file_content, wait_for_gc_pass_on_storage_nodes

View file

@ -1,6 +1,6 @@
import allure import allure
import pytest import pytest
from python_keywords.utility_keywords import generate_file from file_helper import generate_file
from s3_helper import object_key_from_file_path from s3_helper import object_key_from_file_path
from steps import s3_gate_bucket, s3_gate_object from steps import s3_gate_bucket, s3_gate_object

View file

@ -1,11 +1,9 @@
import os
from datetime import datetime, timedelta from datetime import datetime, timedelta
import allure import allure
import pytest import pytest
from python_keywords.storage_policy import get_simple_object_copies from file_helper import generate_file
from python_keywords.utility_keywords import generate_file from s3_helper import check_objects_in_bucket, object_key_from_file_path
from s3_helper import check_objects_in_bucket, object_key_from_file_path, set_bucket_versioning
from steps import s3_gate_bucket, s3_gate_object from steps import s3_gate_bucket, s3_gate_object
from steps.s3_gate_base import TestS3GateBase from steps.s3_gate_base import TestS3GateBase

View file

@ -1,18 +1,12 @@
import logging import logging
import os import os
from random import choice, choices, randrange from random import choice, choices
from time import sleep
import allure import allure
import pytest import pytest
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
from epoch import tick_epoch from epoch import tick_epoch
from python_keywords.container import list_containers from file_helper import generate_file, get_file_hash
from python_keywords.utility_keywords import (
generate_file,
generate_file_and_file_hash,
get_file_hash,
)
from s3_helper import ( from s3_helper import (
check_objects_in_bucket, check_objects_in_bucket,
check_tags_by_bucket, check_tags_by_bucket,
@ -281,10 +275,7 @@ class TestS3Gate(TestS3GateBase):
Upload part/List parts/Complete multipart upload). Upload part/List parts/Complete multipart upload).
""" """
parts_count = 3 parts_count = 3
file_name_large, _ = generate_file_and_file_hash( file_name_large = generate_file(SIMPLE_OBJ_SIZE * 1024 * 6 * parts_count) # 5Mb - min part
SIMPLE_OBJ_SIZE * 1024 * 6 * parts_count
) # 5Mb - min part
# file_name_large, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE * 1024 * 30 * parts_count) # 5Mb - min part
object_key = self.object_key_from_file_path(file_name_large) object_key = self.object_key_from_file_path(file_name_large)
part_files = split_file(file_name_large, parts_count) part_files = split_file(file_name_large, parts_count)
parts = [] parts = []
@ -362,7 +353,7 @@ class TestS3Gate(TestS3GateBase):
("some-key--obj2", "some-value--obj2"), ("some-key--obj2", "some-value--obj2"),
] ]
key_value_pair_obj_new = [("some-key-obj-new", "some-value-obj-new")] key_value_pair_obj_new = [("some-key-obj-new", "some-value-obj-new")]
file_name_simple, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE) file_name_simple = generate_file(SIMPLE_OBJ_SIZE)
obj_key = self.object_key_from_file_path(file_name_simple) obj_key = self.object_key_from_file_path(file_name_simple)
s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair_bucket) s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair_bucket)
@ -397,7 +388,7 @@ class TestS3Gate(TestS3GateBase):
with allure.step(f"Generate {max_obj_count} files"): with allure.step(f"Generate {max_obj_count} files"):
for _ in range(max_obj_count): for _ in range(max_obj_count):
file_paths.append(generate_file_and_file_hash(choice(obj_sizes))[0]) file_paths.append(generate_file(choice(obj_sizes)))
for bucket in (bucket_1, bucket_2): for bucket in (bucket_1, bucket_2):
with allure.step(f"Bucket {bucket} must be empty as it just created"): with allure.step(f"Bucket {bucket} must be empty as it just created"):

View file

@ -7,9 +7,8 @@ import allure
import pytest import pytest
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, FREE_STORAGE, SIMPLE_OBJ_SIZE from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, FREE_STORAGE, SIMPLE_OBJ_SIZE
from data_formatters import get_wallet_public_key from data_formatters import get_wallet_public_key
from python_keywords.container import list_containers from file_helper import concat_files, generate_file, get_file_hash
from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas
from python_keywords.utility_keywords import concat_files, generate_file, get_file_hash
from s3_helper import check_objects_in_bucket, set_bucket_versioning from s3_helper import check_objects_in_bucket, set_bucket_versioning
from utility import create_file_with_content from utility import create_file_with_content
from wallet import init_wallet from wallet import init_wallet

View file

@ -6,7 +6,7 @@ from typing import Tuple
import allure import allure
import pytest import pytest
from python_keywords.utility_keywords import generate_file from file_helper import generate_file
from s3_helper import check_tags_by_bucket, check_tags_by_object, object_key_from_file_path from s3_helper import check_tags_by_bucket, check_tags_by_object, object_key_from_file_path
from steps import s3_gate_bucket, s3_gate_object from steps import s3_gate_bucket, s3_gate_object

View file

@ -2,13 +2,13 @@ import os
import allure import allure
import pytest import pytest
from python_keywords.utility_keywords import generate_file from file_helper import generate_file
from s3_helper import set_bucket_versioning from s3_helper import set_bucket_versioning
from steps import s3_gate_bucket, s3_gate_object
from steps.aws_cli_client import AwsCliClient
from steps.s3_gate_base import TestS3GateBase
from utility import create_file_with_content from utility import create_file_with_content
from steps import s3_gate_bucket, s3_gate_object
from steps.s3_gate_base import TestS3GateBase
def pytest_generate_tests(metafunc): def pytest_generate_tests(metafunc):
if "s3_client" in metafunc.fixturenames: if "s3_client" in metafunc.fixturenames:

View file

@ -8,6 +8,7 @@ import pytest
from common import COMPLEX_OBJ_SIZE from common import COMPLEX_OBJ_SIZE
from container import create_container from container import create_container
from epoch import get_epoch, tick_epoch from epoch import get_epoch, tick_epoch
from file_helper import generate_file, get_file_hash
from python_keywords.http_gate import ( from python_keywords.http_gate import (
get_via_http_curl, get_via_http_curl,
get_via_http_gate, get_via_http_gate,
@ -18,7 +19,6 @@ from python_keywords.http_gate import (
) )
from python_keywords.neofs_verbs import get_object, put_object from python_keywords.neofs_verbs import get_object, put_object
from python_keywords.storage_policy import get_nodes_without_object from python_keywords.storage_policy import get_nodes_without_object
from python_keywords.utility_keywords import generate_file, get_file_hash
from utility import wait_for_gc_pass_on_storage_nodes from utility import wait_for_gc_pass_on_storage_nodes
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL

View file

@ -3,6 +3,7 @@ import random
import allure import allure
import pytest import pytest
from common import COMPLEX_OBJ_SIZE, NEOFS_NETMAP_DICT, SIMPLE_OBJ_SIZE from common import COMPLEX_OBJ_SIZE, NEOFS_NETMAP_DICT, SIMPLE_OBJ_SIZE
from file_helper import generate_file
from grpc_responses import SESSION_NOT_FOUND from grpc_responses import SESSION_NOT_FOUND
from payment_neogo import _address_from_wallet from payment_neogo import _address_from_wallet
from python_keywords.container import create_container from python_keywords.container import create_container
@ -15,7 +16,6 @@ from python_keywords.neofs_verbs import (
search_object, search_object,
) )
from python_keywords.session_token import create_session_token from python_keywords.session_token import create_session_token
from python_keywords.utility_keywords import generate_file
@allure.title("Test Object Operations with Session Token") @allure.title("Test Object Operations with Session Token")

View file

@ -1,6 +1,7 @@
from typing import Optional from typing import Optional
import allure import allure
from file_helper import get_file_hash
from grpc_responses import OBJECT_ACCESS_DENIED, error_matches_status from grpc_responses import OBJECT_ACCESS_DENIED, error_matches_status
from python_keywords.neofs_verbs import ( from python_keywords.neofs_verbs import (
delete_object, delete_object,
@ -11,7 +12,6 @@ from python_keywords.neofs_verbs import (
put_object, put_object,
search_object, search_object,
) )
from python_keywords.utility_keywords import get_file_hash
OPERATION_ERROR_TYPE = RuntimeError OPERATION_ERROR_TYPE = RuntimeError

View file

@ -1,150 +0,0 @@
#!/usr/bin/python3.8
import hashlib
import logging
import os
import tarfile
import uuid
from typing import Optional, Tuple
import allure
import docker
import wallet
from cli_helpers import _cmd_run
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
logger = logging.getLogger("NeoLogger")
def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str:
"""
Function generates a binary file with the specified size in bytes.
Args:
size (int): the size in bytes, can be declared as 6e+6 for example
Returns:
(str): the path to the generated file
"""
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
with open(file_path, "wb") as fout:
fout.write(os.urandom(size))
logger.info(f"file with size {size} bytes has been generated: {file_path}")
return file_path
@allure.step("Generate file")
def generate_file_and_file_hash(size: int) -> Tuple[str, str]:
"""
Function generates a binary file with the specified size in bytes
and its hash.
Args:
size (int): the size in bytes, can be declared as 6e+6 for example
Returns:
(str): the path to the generated file
(str): the hash of the generated file
"""
file_path = generate_file(size)
file_hash = get_file_hash(file_path)
return file_path, file_hash
@allure.step("Get File Hash")
def get_file_hash(filename: str, len: Optional[int] = None):
"""
This function generates hash for the specified file.
Args:
filename (str): the path to the file to generate hash for
len (int): how many bytes to read
Returns:
(str): the hash of the file
"""
file_hash = hashlib.sha256()
with open(filename, "rb") as out:
if len:
file_hash.update(out.read(len))
else:
file_hash.update(out.read())
return file_hash.hexdigest()
@allure.step("Generate Wallet")
def generate_wallet():
return wallet.init_wallet(ASSETS_DIR)
@allure.step("Get Docker Logs")
def get_container_logs(testcase_name: str) -> None:
client = docker.APIClient(base_url="unix://var/run/docker.sock")
logs_dir = os.getenv("${OUTPUT_DIR}")
tar_name = f"{logs_dir}/dockerlogs({testcase_name}).tar.gz"
tar = tarfile.open(tar_name, "w:gz")
for container in client.containers():
container_name = container["Names"][0][1:]
if client.inspect_container(container_name)["Config"]["Domainname"] == "neofs.devenv":
file_name = f"{logs_dir}/docker_log_{container_name}"
with open(file_name, "wb") as out:
out.write(client.logs(container_name))
logger.info(f"Collected logs from container {container_name}")
tar.add(file_name)
os.remove(file_name)
tar.close()
@allure.step("Make Up")
def make_up(services: list = [], config_dict: dict = {}):
test_path = os.getcwd()
dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env")
os.chdir(dev_path)
if len(services) > 0:
for service in services:
if config_dict != {}:
with open(f"{dev_path}/.int_test.env", "a") as out:
for key, value in config_dict.items():
out.write(f"{key}={value}")
cmd = f"make up/{service}"
_cmd_run(cmd)
else:
cmd = f"make up/basic; make update.max_object_size val={SIMPLE_OBJ_SIZE}"
_cmd_run(cmd, timeout=120)
os.chdir(test_path)
@allure.step("Make Down")
def make_down(services: list = []):
test_path = os.getcwd()
dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env")
os.chdir(dev_path)
if len(services) > 0:
for service in services:
cmd = f"make down/{service}"
_cmd_run(cmd)
with open(f"{dev_path}/.int_test.env", "w"):
pass
else:
cmd = "make down; make clean"
_cmd_run(cmd, timeout=60)
os.chdir(test_path)
@allure.step("Concatenation set of files to one file")
def concat_files(list_of_parts: list, new_file_name: Optional[str] = None) -> str:
"""
Concatenates a set of files into a single file.
Args:
list_of_parts (list): list with files to concratination
new_file_name (str): file name to the generated file
Returns:
(str): the path to the generated file
"""
if not new_file_name:
new_file_name = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
with open(new_file_name, "wb") as f:
for file in list_of_parts:
with open(file, "rb") as part_file:
f.write(part_file.read())
return new_file_name