2021-08-25 17:08:54 +00:00
|
|
|
#!/usr/bin/python3.8
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2022-06-09 13:08:11 +00:00
|
|
|
import hashlib
|
2022-09-20 15:03:52 +00:00
|
|
|
import logging
|
2021-04-26 10:30:40 +00:00
|
|
|
import os
|
|
|
|
import tarfile
|
|
|
|
import uuid
|
2022-09-19 14:22:10 +00:00
|
|
|
from typing import Optional, Tuple
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
import allure
|
2022-06-09 13:08:11 +00:00
|
|
|
import docker
|
2022-07-04 19:49:14 +00:00
|
|
|
import wallet
|
2022-07-18 10:19:05 +00:00
|
|
|
from cli_helpers import _cmd_run
|
2022-09-20 15:03:52 +00:00
|
|
|
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
|
2022-01-10 11:02:57 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
logger = logging.getLogger("NeoLogger")
|
2021-04-26 10:30:40 +00:00
|
|
|
|
2022-06-09 13:08:11 +00:00
|
|
|
|
2022-07-05 10:17:36 +00:00
|
|
|
def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str:
|
|
|
|
"""
|
|
|
|
Function generates a binary file with the specified size in bytes.
|
|
|
|
Args:
|
|
|
|
size (int): the size in bytes, can be declared as 6e+6 for example
|
|
|
|
Returns:
|
|
|
|
(str): the path to the generated file
|
|
|
|
"""
|
|
|
|
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
|
2022-09-20 15:03:52 +00:00
|
|
|
with open(file_path, "wb") as fout:
|
2022-07-05 10:17:36 +00:00
|
|
|
fout.write(os.urandom(size))
|
|
|
|
logger.info(f"file with size {size} bytes has been generated: {file_path}")
|
|
|
|
|
|
|
|
return file_path
|
|
|
|
|
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
@allure.step("Generate file")
|
2022-07-05 10:17:36 +00:00
|
|
|
def generate_file_and_file_hash(size: int) -> Tuple[str, str]:
|
2022-05-19 11:10:00 +00:00
|
|
|
"""
|
2022-07-05 10:17:36 +00:00
|
|
|
Function generates a binary file with the specified size in bytes
|
2022-07-04 19:49:14 +00:00
|
|
|
and its hash.
|
2022-05-19 11:10:00 +00:00
|
|
|
Args:
|
2022-05-24 13:36:17 +00:00
|
|
|
size (int): the size in bytes, can be declared as 6e+6 for example
|
2022-05-19 11:10:00 +00:00
|
|
|
Returns:
|
|
|
|
(str): the path to the generated file
|
|
|
|
(str): the hash of the generated file
|
|
|
|
"""
|
2022-07-05 10:17:36 +00:00
|
|
|
file_path = generate_file(size)
|
|
|
|
file_hash = get_file_hash(file_path)
|
2022-05-19 11:10:00 +00:00
|
|
|
|
2022-07-05 10:17:36 +00:00
|
|
|
return file_path, file_hash
|
2022-05-19 11:10:00 +00:00
|
|
|
|
2022-05-20 11:18:14 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
@allure.step("Get File Hash")
|
2022-09-19 14:22:10 +00:00
|
|
|
def get_file_hash(filename: str, len: Optional[int] = None):
|
2022-05-20 11:18:14 +00:00
|
|
|
"""
|
|
|
|
This function generates hash for the specified file.
|
|
|
|
Args:
|
|
|
|
filename (str): the path to the file to generate hash for
|
2022-07-08 17:24:55 +00:00
|
|
|
len (int): how many bytes to read
|
2022-05-20 11:18:14 +00:00
|
|
|
Returns:
|
|
|
|
(str): the hash of the file
|
|
|
|
"""
|
2022-06-06 13:47:13 +00:00
|
|
|
file_hash = hashlib.sha256()
|
2022-05-20 11:18:14 +00:00
|
|
|
with open(filename, "rb") as out:
|
2022-07-08 17:24:55 +00:00
|
|
|
if len:
|
|
|
|
file_hash.update(out.read(len))
|
|
|
|
else:
|
|
|
|
file_hash.update(out.read())
|
2022-05-20 11:18:14 +00:00
|
|
|
return file_hash.hexdigest()
|
2022-05-26 09:44:13 +00:00
|
|
|
|
2022-05-20 11:18:14 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
@allure.step("Generate Wallet")
|
2022-07-04 19:49:14 +00:00
|
|
|
def generate_wallet():
|
2022-07-05 16:19:27 +00:00
|
|
|
return wallet.init_wallet(ASSETS_DIR)
|
2022-07-04 19:49:14 +00:00
|
|
|
|
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
@allure.step("Get Docker Logs")
|
2021-04-26 10:30:40 +00:00
|
|
|
def get_container_logs(testcase_name: str) -> None:
|
2022-09-20 15:03:52 +00:00
|
|
|
client = docker.APIClient(base_url="unix://var/run/docker.sock")
|
|
|
|
logs_dir = os.getenv("${OUTPUT_DIR}")
|
2021-06-25 13:44:42 +00:00
|
|
|
tar_name = f"{logs_dir}/dockerlogs({testcase_name}).tar.gz"
|
2021-04-26 10:30:40 +00:00
|
|
|
tar = tarfile.open(tar_name, "w:gz")
|
|
|
|
for container in client.containers():
|
2022-09-20 15:03:52 +00:00
|
|
|
container_name = container["Names"][0][1:]
|
|
|
|
if client.inspect_container(container_name)["Config"]["Domainname"] == "neofs.devenv":
|
2021-06-25 13:44:42 +00:00
|
|
|
file_name = f"{logs_dir}/docker_log_{container_name}"
|
2022-09-20 15:03:52 +00:00
|
|
|
with open(file_name, "wb") as out:
|
2021-04-26 10:30:40 +00:00
|
|
|
out.write(client.logs(container_name))
|
|
|
|
logger.info(f"Collected logs from container {container_name}")
|
|
|
|
tar.add(file_name)
|
|
|
|
os.remove(file_name)
|
|
|
|
tar.close()
|
2021-09-09 13:36:34 +00:00
|
|
|
|
2022-06-09 13:08:11 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
@allure.step("Make Up")
|
2022-06-09 13:08:11 +00:00
|
|
|
def make_up(services: list = [], config_dict: dict = {}):
|
2022-01-10 11:02:57 +00:00
|
|
|
test_path = os.getcwd()
|
2022-09-20 15:03:52 +00:00
|
|
|
dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env")
|
2022-01-10 11:02:57 +00:00
|
|
|
os.chdir(dev_path)
|
|
|
|
|
2022-03-11 16:08:14 +00:00
|
|
|
if len(services) > 0:
|
2022-01-10 11:02:57 +00:00
|
|
|
for service in services:
|
2021-12-28 07:08:16 +00:00
|
|
|
if config_dict != {}:
|
|
|
|
with open(f"{dev_path}/.int_test.env", "a") as out:
|
|
|
|
for key, value in config_dict.items():
|
2022-09-20 15:03:52 +00:00
|
|
|
out.write(f"{key}={value}")
|
|
|
|
cmd = f"make up/{service}"
|
2022-02-07 11:41:34 +00:00
|
|
|
_cmd_run(cmd)
|
2022-01-10 11:02:57 +00:00
|
|
|
else:
|
2022-09-19 14:22:10 +00:00
|
|
|
cmd = f"make up/basic; make update.max_object_size val={SIMPLE_OBJ_SIZE}"
|
2022-03-09 16:02:59 +00:00
|
|
|
_cmd_run(cmd, timeout=120)
|
2022-01-10 11:02:57 +00:00
|
|
|
|
|
|
|
os.chdir(test_path)
|
|
|
|
|
2022-06-09 13:08:11 +00:00
|
|
|
|
2022-09-20 15:03:52 +00:00
|
|
|
@allure.step("Make Down")
|
2022-06-09 13:08:11 +00:00
|
|
|
def make_down(services: list = []):
|
2022-01-10 11:02:57 +00:00
|
|
|
test_path = os.getcwd()
|
2022-09-20 15:03:52 +00:00
|
|
|
dev_path = os.getenv("DEVENV_PATH", "../neofs-dev-env")
|
2022-01-10 11:02:57 +00:00
|
|
|
os.chdir(dev_path)
|
|
|
|
|
2021-12-28 07:08:16 +00:00
|
|
|
if len(services) > 0:
|
|
|
|
for service in services:
|
2022-09-20 15:03:52 +00:00
|
|
|
cmd = f"make down/{service}"
|
2021-12-28 07:08:16 +00:00
|
|
|
_cmd_run(cmd)
|
|
|
|
with open(f"{dev_path}/.int_test.env", "w"):
|
|
|
|
pass
|
|
|
|
else:
|
2022-09-20 15:03:52 +00:00
|
|
|
cmd = "make down; make clean"
|
2021-12-28 07:08:16 +00:00
|
|
|
_cmd_run(cmd, timeout=60)
|
2022-05-19 11:10:00 +00:00
|
|
|
|
2022-01-10 11:02:57 +00:00
|
|
|
os.chdir(test_path)
|
2022-09-19 14:22:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
@allure.step("Concatenation set of files to one file")
|
|
|
|
def concat_files(list_of_parts: list, new_file_name: Optional[str] = None) -> str:
|
|
|
|
"""
|
|
|
|
Concatenates a set of files into a single file.
|
|
|
|
Args:
|
|
|
|
list_of_parts (list): list with files to concratination
|
|
|
|
new_file_name (str): file name to the generated file
|
|
|
|
Returns:
|
|
|
|
(str): the path to the generated file
|
|
|
|
"""
|
|
|
|
if not new_file_name:
|
|
|
|
new_file_name = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
|
|
|
|
with open(new_file_name, "wb") as f:
|
|
|
|
for file in list_of_parts:
|
|
|
|
with open(file, "rb") as part_file:
|
|
|
|
f.write(part_file.read())
|
|
|
|
return new_file_name
|