Cleanup sbercloud config

Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
Vladimir Domnich 2022-08-01 10:41:04 +03:00
parent 47c55f0060
commit e1d7999313
3 changed files with 83 additions and 65 deletions

View file

@ -7,16 +7,6 @@ SHELL = bash
OUTPUT_DIR = artifacts/ OUTPUT_DIR = artifacts/
KEYWORDS_REPO = git@github.com:nspcc-dev/neofs-keywords.git KEYWORDS_REPO = git@github.com:nspcc-dev/neofs-keywords.git
VENVS = $(shell ls -1d venv/*/ | sort -u | xargs basename -a) VENVS = $(shell ls -1d venv/*/ | sort -u | xargs basename -a)
ROOT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
DEV_IMAGE_PY ?= registry.spb.yadro.com/tools/pytest-neofs-x86_64:7
SETUP_DIR ?= $(CURDIR)/.setup
DEV_ENV_DEPLOY_DIR ?= /opt/dev-env
DOCKER_NETWORK = --network host
ifeq ($(shell uname -s),Darwin)
DOCKER_NETWORK = --network bridge -p 389:389 -p 636:636
endif
.PHONY: all .PHONY: all
all: venvs all: venvs
@ -42,7 +32,6 @@ clean:
pytest-local: pytest-local:
@echo "⇒ Run Pytest" @echo "⇒ Run Pytest"
export PYTHONPATH=$(ROOT_DIR)/neofs-keywords/lib:$(ROOT_DIR)/neofs-keywords/robot:$(ROOT_DIR)/robot/resources/lib:$(ROOT_DIR)/robot/resources/lib/python_keywords:$(ROOT_DIR)/robot/variables && \
python -m pytest pytest_tests/testsuites/ python -m pytest pytest_tests/testsuites/
help: help:

View file

@ -1,41 +1,52 @@
import json import json
import os
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional
import requests import requests
from yaml import FullLoader import yaml
from yaml import load as yaml_load
@dataclass @dataclass
class SberCloudCtx: class SberCloudConfig:
sber_login: str = None login: Optional[str] = None
sber_password: str = None password: Optional[str] = None
sber_domain: str = None domain: Optional[str] = None
sber_project_id: str = None project_id: Optional[str] = None
sber_iam_url: str = None iam_url: Optional[str] = None
sber_ecss: list = None
@staticmethod @staticmethod
def from_dict(sbercloud_dict: dict) -> 'SberCloudCtx': def from_dict(config_dict: dict) -> 'SberCloudConfig':
return SberCloudCtx(**sbercloud_dict) return SberCloudConfig(**config_dict)
@staticmethod @staticmethod
def from_yaml(config: str) -> 'SberCloudCtx': def from_yaml(config_path: str) -> 'SberCloudConfig':
with open(config) as yaml_file: with open(config_path) as file:
config_from_yaml = yaml_load(yaml_file, Loader=FullLoader) config_dict = yaml.load(file, Loader=yaml.FullLoader)
return SberCloudCtx.from_dict(config_from_yaml) return SberCloudConfig.from_dict(config_dict["sbercloud"])
@staticmethod
def from_env() -> 'SberCloudConfig':
config_dict = {
"domain": os.getenv("SBERCLOUD_DOMAIN"),
"login": os.getenv("SBERCLOUD_LOGIN"),
"password": os.getenv("SBERCLOUD_PASSWORD"),
"project_id": os.getenv("SBERCLOUD_PROJECT_ID"),
"iam_url": os.getenv("SBERCLOUD_IAM_URL"),
}
return SberCloudConfig.from_dict(config_dict)
class SberCloud: class SberCloud:
def __init__(self, config: str): def __init__(self, config: SberCloudConfig) -> None:
self.sbercloud_config = SberCloudCtx().from_yaml(config) self.config = config
self.ecs_url = None self.ecs_url = None
self.project_id = None self.project_id = None
self.token = None self.token = None
self.update_token() self._initialize()
self.ecss = self.get_ecss() self.ecs_nodes = self.get_ecs_nodes()
def update_token(self): def _initialize(self) -> None:
data = { data = {
'auth': { 'auth': {
'identity': { 'identity': {
@ -43,68 +54,85 @@ class SberCloud:
'password': { 'password': {
'user': { 'user': {
'domain': { 'domain': {
'name': self.sbercloud_config.sber_domain 'name': self.config.domain
}, },
'name': self.sbercloud_config.sber_login, 'name': self.config.login,
'password': self.sbercloud_config.sber_password 'password': self.config.password
} }
} }
}, },
'scope': { 'scope': {
'project': { 'project': {
'id': self.sbercloud_config.sber_project_id 'id': self.config.project_id
} }
} }
} }
} }
response = requests.post(f'{self.sbercloud_config.sber_iam_url}/v3/auth/tokens', data=json.dumps(data), response = requests.post(
headers={'Content-Type': 'application/json'}) f'{self.config.iam_url}/v3/auth/tokens',
self.ecs_url = [catalog['endpoints'][0]['url'] data=json.dumps(data),
for catalog in response.json()['token']['catalog'] if catalog['type'] == 'ecs'][0] headers={'Content-Type': 'application/json'}
)
self.ecs_url = [
catalog['endpoints'][0]['url']
for catalog in response.json()['token']['catalog'] if catalog['type'] == 'ecs'
][0]
self.project_id = self.ecs_url.split('/')[-1] self.project_id = self.ecs_url.split('/')[-1]
self.token = response.headers['X-Subject-Token'] self.token = response.headers['X-Subject-Token']
def find_esc_by_ip(self, ip: str, update: bool = False) -> str: def find_ecs_node_by_ip(self, ip: str, no_cache: bool = False) -> str:
if not self.ecss or update: if not self.ecs_nodes or no_cache:
self.ecss = self.get_ecss() self.ecs_nodes = self.get_ecs_nodes()
ecss = [ecs for ecs in self.ecss if ip in [ nodes_by_ip = [
ecs_ip['addr'] for ecs_ip in [ecs_ip for ecs_ips in ecs['addresses'].values() for ecs_ip in ecs_ips]]] node for node in self.ecs_nodes
assert len(ecss) == 1 if ip in [
return ecss[0]['id'] node_ip['addr']
for node_ips in node['addresses'].values()
for node_ip in node_ips
]
]
assert len(nodes_by_ip) == 1
return nodes_by_ip[0]['id']
def get_ecss(self) -> [dict]: def get_ecs_nodes(self) -> list[dict]:
response = requests.get(f'{self.ecs_url}/cloudservers/detail', response = requests.get(f'{self.ecs_url}/cloudservers/detail',
headers={'X-Auth-Token': self.token}).json() headers={'X-Auth-Token': self.token}).json()
return response['servers'] return response['servers']
def start_node(self, node_id: str = None, node_ip: str = None): def start_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None) -> None:
data = { data = {
'os-start': { 'os-start': {
'servers': [ 'servers': [
{ {
'id': node_id or self.find_esc_by_ip(node_ip) 'id': node_id or self.find_ecs_node_by_ip(node_ip)
} }
] ]
} }
} }
response = requests.post(f'{self.ecs_url}/cloudservers/action', response = requests.post(
data=json.dumps(data), f'{self.ecs_url}/cloudservers/action',
headers={'Content-Type': 'application/json', 'X-Auth-Token': self.token}) data=json.dumps(data),
assert response.status_code < 300, f'Status:{response.status_code}. Server not started: {response.json()}' headers={'Content-Type': 'application/json', 'X-Auth-Token': self.token}
)
assert response.status_code < 300, \
f'Status:{response.status_code}. Server not started: {response.json()}'
def stop_node(self, node_id: str = None, node_ip: str = None, hard: bool = False): def stop_node(self, node_id: Optional[str] = None, node_ip: Optional[str] = None,
hard: bool = False) -> None:
data = { data = {
'os-stop': { 'os-stop': {
'type': 'HARD' if hard else 'SOFT', 'type': 'HARD' if hard else 'SOFT',
'servers': [ 'servers': [
{ {
'id': node_id or self.find_esc_by_ip(node_ip) 'id': node_id or self.find_ecs_node_by_ip(node_ip)
} }
] ]
} }
} }
response = requests.post(f'{self.ecs_url}/cloudservers/action', response = requests.post(
data=json.dumps(data), f'{self.ecs_url}/cloudservers/action',
headers={'Content-Type': 'application/json', 'X-Auth-Token': self.token}) data=json.dumps(data),
assert response.status_code < 300, f'Status:{response.status_code}. Server not stopped: {response.json()}' headers={'Content-Type': 'application/json', 'X-Auth-Token': self.token}
)
assert response.status_code < 300, \
f'Status:{response.status_code}. Server not stopped: {response.json()}'

View file

@ -1,14 +1,14 @@
import logging import logging
import os
import allure import allure
import pytest import pytest
from common import STORAGE_NODE_SSH_PRIVATE_KEY_PATH, STORAGE_NODE_SSH_USER, STORAGE_NODE_SSH_PASSWORD from common import (STORAGE_NODE_SSH_PRIVATE_KEY_PATH, STORAGE_NODE_SSH_USER,
STORAGE_NODE_SSH_PASSWORD)
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.neofs_verbs import get_object, put_object from python_keywords.neofs_verbs import get_object, put_object
from python_keywords.utility_keywords import generate_file, get_file_hash from python_keywords.utility_keywords import generate_file, get_file_hash
from sbercloud_helper import SberCloud from sbercloud_helper import SberCloud, SberCloudConfig
from ssh_helper import HostClient, HostIsNotAvailable from ssh_helper import HostClient, HostIsNotAvailable
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL
from .failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes from .failover_utils import wait_all_storage_node_returned, wait_object_replication_on_nodes
@ -21,7 +21,8 @@ stopped_hosts = []
def sbercloud_client(): def sbercloud_client():
with allure.step('Connect to SberCloud'): with allure.step('Connect to SberCloud'):
try: try:
yield SberCloud(f'{os.getcwd()}/configuration/sbercloud.yaml') config = SberCloudConfig.from_env()
yield SberCloud(config)
except Exception as err: except Exception as err:
pytest.fail(f'SberCloud infrastructure not available. Error\n{err}') pytest.fail(f'SberCloud infrastructure not available. Error\n{err}')
@ -42,7 +43,7 @@ def panic_reboot_host(ip: str = None):
ssh.exec('sudo sh -c "echo b > /proc/sysrq-trigger"', timeout=1) ssh.exec('sudo sh -c "echo b > /proc/sysrq-trigger"', timeout=1)
def return_all_storage_nodes(sbercloud_client: SberCloud): def return_all_storage_nodes(sbercloud_client: SberCloud) -> None:
for host in list(stopped_hosts): for host in list(stopped_hosts):
with allure.step(f'Start storage node {host}'): with allure.step(f'Start storage node {host}'):
sbercloud_client.start_node(node_ip=host.split(':')[-2]) sbercloud_client.start_node(node_ip=host.split(':')[-2])