Add pytest tests.

Change AWS cli v1 to cli v2.
Improve allure attachments.
Add tests for S3 API.

Signed-off-by: a.y.volkov <a.y.volkov@yadro.com>
This commit is contained in:
Vladimir Domnich 2022-07-05 11:18:37 +04:00 committed by Anastasia Prasolova
parent 7abb2761c9
commit 6b1e1ab28d
24 changed files with 2210 additions and 247 deletions

12
.flake8 Normal file
View file

@ -0,0 +1,12 @@
[flake8]
exclude =
.git,
__pycache__,
.idea,
.pytest_cache,
venv
per-file-ignores =
# imported but unused
__init__.py: F401
max-line-length = 120
disable-noqa

View file

@ -7,6 +7,13 @@ SHELL = bash
OUTPUT_DIR = artifacts/ OUTPUT_DIR = artifacts/
KEYWORDS_REPO = git@github.com:nspcc-dev/neofs-keywords.git KEYWORDS_REPO = git@github.com:nspcc-dev/neofs-keywords.git
VENVS = $(shell ls -1d venv/*/ | sort -u | xargs basename -a) VENVS = $(shell ls -1d venv/*/ | sort -u | xargs basename -a)
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
DEV_IMAGE_PY ?= registry.spb.yadro.com/tools/pytest-neofs-x86_64:4
ifeq ($(shell uname -s),Darwin)
DOCKER_NETWORK = --network bridge -p 389:389 -p 636:636
endif
.PHONY: all .PHONY: all
all: venvs all: venvs
@ -30,5 +37,34 @@ submodules:
clean: clean:
rm -rf venv.* rm -rf venv.*
pytest-local:
@echo "⇒ Run Pytest"
export PYTHONPATH=$(ROOT_DIR)/neofs-keywords/lib:$(ROOT_DIR)/neofs-keywords/robot:$(ROOT_DIR)/robot/resources/lib:$(ROOT_DIR)/robot/resources/lib/python_keywords:$(ROOT_DIR)/robot/variables && \
python -m pytest pytest_tests/testsuites/
help: help:
@echo "⇒ run Run testcases ${R}" @echo "⇒ run Run testcases ${R}"
.PHONY: pytest-docker
pytest-docker:
-docker ps
-docker rm neofs_tests_py
-docker pull $(DEV_IMAGE_PY)
docker run -t --rm \
-w /tests \
--name neofs_tests_py \
-e PYTHONPATH="/tests/neofs-keywords/lib:/tests/neofs-keywords/robot:/tests/robot/resources/lib:/tests/robot/resources/lib/python_keywords:/tests/robot/variables:/tests/pytest_tests/helpers" \
-v $(CURDIR):/tests \
-v /var/run/docker.sock:/var/run/docker.sock \
-v $(NEO_BIN_DIR):/neofs \
--privileged \
$(DOCKER_NETWORK) \
--env-file $(CURDIR)/.env \
$(DEV_IMAGE_PY) \
-v \
-m "$(CI_MARKERS)" \
--color=no \
--junitxml=/tests/xunit_results.xml \
--alluredir=/tests/allure_results \
--setup-show \
/tests/pytest_tests/testsuites

40
pytest_tests/Dockerfile Normal file
View file

@ -0,0 +1,40 @@
ARG ARCH
FROM python:3.9-slim
ARG ARCH
RUN apt-get -y update && apt-get -y install \
gcc \
make \
git \
curl \
wget \
openssh-client \
iputils-ping \
unzip \
vim \
dbus \
lsof \
tini \
libssl-dev \
expect \
runc \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get clean
COPY requirements.txt ./
# install aws cli utility v2 from pip
RUN python3 -m pip install awscliv2
RUN ln /usr/local/bin/awscliv2 /usr/bin/aws
RUN awsv2 --install
RUN aws --version
RUN pip install --no-cache-dir -r requirements.txt
ENV PATH=$PATH:/root:/neofs
WORKDIR /root
ENTRYPOINT ["tini", "--", "pytest"]

View file

View file

@ -0,0 +1,49 @@
import os
import uuid
from typing import List
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
def create_file_with_content(file_path: str = None, content: str = None) -> str:
mode = 'w+'
if not content:
content = os.urandom(SIMPLE_OBJ_SIZE)
mode = 'wb'
if not file_path:
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
else:
if not os.path.exists(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path))
with open(file_path, mode) as out_file:
out_file.write(content)
return file_path
def get_file_content(file_path: str) -> str:
with open(file_path, 'r') as out_file:
content = out_file.read()
return content
def split_file(file_path: str, parts: int) -> List[str]:
files = []
with open(file_path, 'rb') as in_file:
data = in_file.read()
content_size = len(data)
chunk_size = int((content_size + parts) / parts)
part_id = 1
for start_position in range(0, content_size + 1, chunk_size):
part_file_name = f'{file_path}_part_{part_id}'
files.append(part_file_name)
with open(part_file_name, 'wb') as out_file:
out_file.write(data[start_position:start_position + chunk_size])
part_id += 1
return files

17
pytest_tests/pytest.ini Normal file
View file

@ -0,0 +1,17 @@
[pytest]
log_cli = 1
log_cli_level = DEBUG
log_cli_format = %(asctime)s [%(levelname)4s] %(message)s
log_format = %(asctime)s [%(levelname)4s] %(message)s
log_cli_date_format = %Y-%m-%d %H:%M:%S
log_date_format = %H:%M:%S
markers =
# special markers
sanity: small tests subset
staging: test to be excluded from run in verifier/pr-validation/sanity jobs and run test in staging job
# functional markers
grpc_api: standard gRPC API tests
http_gate: HTTP gate contract
s3_gate: S3 gate tests
curl: tests for HTTP gate with curl utility
long: long tests (with long execution time)

View file

@ -0,0 +1,63 @@
aiodns==3.0.0
aiohttp==3.7.4.post0
aioresponses==0.7.2
allure-pytest==2.9.45
allure-python-commons==2.9.45
async-timeout==3.0.1
asynctest==0.13.0
attrs==21.4.0
base58==2.1.0
bitarray==2.3.4
boto3==1.16.33
botocore==1.19.33
certifi==2022.5.18
cffi==1.15.0
chardet==4.0.0
charset-normalizer==2.0.12
coverage==6.3.3
docker==4.4.0
docutils==0.17.1
Events==0.4
flake8==4.0.1
idna==3.3
iniconfig==1.1.1
isort==5.10.1
jmespath==0.10.0
jsonschema==4.5.1
lz4==3.1.3
mccabe==0.6.1
mmh3==3.0.0
multidict==6.0.2
mypy==0.950
mypy-extensions==0.4.3
neo-mamba==0.10.0
neo3crypto==0.2.1
neo3vm==0.9.0
neo3vm-stubs==0.9.0
netaddr==0.8.0
orjson==3.6.8
packaging==21.3
pexpect==4.8.0
pluggy==1.0.0
ptyprocess==0.7.0
py==1.11.0
pybiginteger==1.2.6
pybiginteger-stubs==1.2.6
pycares==4.1.2
pycodestyle==2.8.0
pycparser==2.21
pycryptodome==3.11.0
pyflakes==2.4.0
pyparsing==3.0.9
pyrsistent==0.18.1
pytest==7.1.2
python-dateutil==2.8.2
requests==2.27.1
robotframework==4.1.2
s3transfer==0.3.7
six==1.16.0
tomli==2.0.1
typing-extensions==4.2.0
urllib3==1.26.9
websocket-client==1.3.2
yarl==1.7.2

View file

View file

@ -0,0 +1,148 @@
import logging
import os
import shutil
from re import search
from time import sleep
import allure
import pytest
import rpc_client
import wallet
from cli_helpers import _cmd_run
from common import (ASSETS_DIR, COMMON_PLACEMENT_RULE, COMPLEX_OBJ_SIZE,
MAINNET_WALLET_WIF, NEO_MAINNET_ENDPOINT, SIMPLE_OBJ_SIZE)
from python_keywords.container import create_container
from python_keywords.payment_neogo import get_balance
from python_keywords.utility_keywords import generate_file_and_file_hash
from robot.api import deco
from wallet_keywords import neofs_deposit, transfer_mainnet_gas
from wellknown_acl import PUBLIC_ACL
deco.keyword = allure.step
logger = logging.getLogger('NeoLogger')
@pytest.fixture(scope='session', autouse=True)
@allure.title('Check binary versions')
def check_binary_versions(request):
environment_dir = request.config.getoption('--alluredir')
binaries = ['neo-go', 'neofs-cli', 'neofs-authmate', 'aws']
env_out = {}
for binary in binaries:
out = _cmd_run(f'{binary} --version')
version = search(r'(v?\d.*)\s+', out)
version = version.group(1) if version else 'Unknown'
env_out[binary.upper()] = version
if environment_dir:
with open(f'{environment_dir}/environment.properties', 'w') as out_file:
for env, env_value in env_out.items():
out_file.write(f'{env}={env_value}\n')
@pytest.fixture(scope='session')
@allure.title('Init wallet with address')
def init_wallet_with_address():
full_path = f'{os.getcwd()}/{ASSETS_DIR}'
os.mkdir(full_path)
yield wallet.init_wallet_w_addr(ASSETS_DIR)
shutil.rmtree(full_path)
@pytest.fixture(scope='session')
@allure.title('Prepare wallet and deposit')
def prepare_wallet_and_deposit(init_wallet_with_address):
deposit = 30
wallet, addr, wif = init_wallet_with_address
logger.info(f'Init wallet: {wallet},\naddr: {addr},\nwif: {wif}')
txid = transfer_mainnet_gas(MAINNET_WALLET_WIF, addr, deposit + 1)
wait_unitl_transaction_accepted_in_block(txid)
deposit_tx = neofs_deposit(wif, deposit)
wait_unitl_transaction_accepted_in_block(deposit_tx)
return wallet, wif
@pytest.fixture()
@allure.title('Create Container')
def prepare_container(prepare_wallet_and_deposit):
wallet, wif = prepare_wallet_and_deposit
return prepare_container_impl(wallet, wif)
@pytest.fixture(scope='module')
@allure.title('Create Public Container')
def prepare_public_container(prepare_wallet_and_deposit):
placement_rule = 'REP 1 IN X CBF 1 SELECT 1 FROM * AS X'
wallet, wif = prepare_wallet_and_deposit
return prepare_container_impl(wallet, wif, rule=placement_rule, basic_acl=PUBLIC_ACL)
def prepare_container_impl(wallet: str, wif: str, rule=COMMON_PLACEMENT_RULE, basic_acl: str = ''):
balance = get_balance(wif)
assert balance > 0, f'Expected balance is greater than 0. Got {balance}'
cid = create_container(wallet, rule=rule, basic_acl=basic_acl)
new_balance = get_balance(wif)
assert new_balance < balance, 'Expected some fee has charged'
return cid, wallet
@allure.step('Wait until transaction accepted in block')
def wait_unitl_transaction_accepted_in_block(tx_id: str):
"""
This function return True in case of accepted TX.
Parameters:
:param tx_id: transaction ID
"""
mainnet_rpc_cli = rpc_client.RPCClient(NEO_MAINNET_ENDPOINT)
if isinstance(tx_id, bytes):
tx_id = tx_id.decode()
sleep_interval, attempts = 5, 10
for __attempt in range(attempts):
try:
resp = mainnet_rpc_cli.get_transaction_height(tx_id)
if resp is not None:
logger.info(f"got block height: {resp}")
return True
except Exception as e:
logger.info(f"request failed with error: {e}")
raise e
sleep(sleep_interval)
raise TimeoutError(f'Timeout {sleep_interval * attempts} sec. reached on waiting for transaction accepted')
@pytest.fixture()
@allure.title('Generate files')
def generate_files():
file_name_simple, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE)
large_file_name, _ = generate_file_and_file_hash(COMPLEX_OBJ_SIZE)
return file_name_simple, large_file_name
@pytest.fixture()
@allure.title('Generate file')
def generate_file():
file_name_simple, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE)
return file_name_simple
@pytest.fixture()
@allure.title('Generate large file')
def generate_large_file():
obj_size = int(os.getenv('BIG_OBJ_SIZE', COMPLEX_OBJ_SIZE))
file_path, file_hash = generate_file_and_file_hash(obj_size)
return file_path, file_hash

View file

@ -0,0 +1,93 @@
import logging
from time import sleep
import allure
import pytest
from contract_keywords import tick_epoch
from python_keywords.neofs import verify_head_tombstone
from python_keywords.neofs_verbs import (delete_object, get_object, get_range,
get_range_hash, head_object,
put_object, search_object)
from python_keywords.storage_policy import get_simple_object_copies
from python_keywords.utility_keywords import get_file_hash
logger = logging.getLogger('NeoLogger')
CLEANUP_TIMEOUT = 10
@allure.title('Test native object API')
@pytest.mark.sanity
@pytest.mark.grpc_api
def test_object_api(prepare_container, generate_file):
cid, wallet = prepare_container
wallet_cid = {'wallet': wallet, 'cid': cid}
file_usr_header = {'key1': 1, 'key2': 'abc'}
file_usr_header_oth = {'key1': 2}
range_cut = '0:10'
oids = []
file_name = generate_file
file_hash = get_file_hash(file_name)
search_object(**wallet_cid, expected_objects_list=oids)
with allure.step('Put objects'):
oids.append(put_object(wallet=wallet, path=file_name, cid=cid))
oids.append(put_object(wallet=wallet, path=file_name, cid=cid, user_headers=file_usr_header))
oids.append(put_object(wallet=wallet, path=file_name, cid=cid, user_headers=file_usr_header_oth))
with allure.step('Validate storage policy for objects'):
for oid_to_check in oids:
assert get_simple_object_copies(wallet=wallet, cid=cid, oid=oid_to_check) == 2, 'Expected 2 copies'
with allure.step('Get objects and compare hashes'):
for oid_to_check in oids:
got_file_path = get_object(wallet=wallet, cid=cid, oid=oid_to_check)
got_file_hash = get_file_hash(got_file_path)
assert file_hash == got_file_hash
with allure.step('Get range/range hash'):
get_range_hash(**wallet_cid, oid=oids[0], bearer_token='', range_cut=range_cut)
get_range_hash(**wallet_cid, oid=oids[1], bearer_token='', range_cut=range_cut)
get_range(**wallet_cid, oid=oids[1], file_path='s_get_range', bearer='', range_cut=range_cut)
with allure.step('Search objects'):
search_object(**wallet_cid, expected_objects_list=oids)
search_object(**wallet_cid, filters=file_usr_header, expected_objects_list=oids[1:2])
search_object(**wallet_cid, filters=file_usr_header_oth, expected_objects_list=oids[2:3])
with allure.step('Head object and validate'):
head_object(**wallet_cid, oid=oids[0])
head_info = head_object(**wallet_cid, oid=oids[1])
check_header_is_presented(head_info, file_usr_header)
with allure.step('Delete objects'):
tombstone_s = delete_object(**wallet_cid, oid=oids[0])
tombstone_h = delete_object(**wallet_cid, oid=oids[1])
verify_head_tombstone(wallet_path=wallet, cid=cid, oid_ts=tombstone_s, oid=oids[0])
verify_head_tombstone(wallet_path=wallet, cid=cid, oid_ts=tombstone_h, oid=oids[1])
tick_epoch()
sleep(CLEANUP_TIMEOUT)
with allure.step('Get objects and check errors'):
get_object_and_check_error(**wallet_cid, oid=oids[0], err_msg='object already removed')
get_object_and_check_error(**wallet_cid, oid=oids[1], err_msg='object already removed')
def get_object_and_check_error(wallet: str, cid: str, oid: str, err_msg: str):
try:
get_object(wallet=wallet, cid=cid, oid=oid)
raise AssertionError(f'Expected object {oid} removed, but it is not')
except Exception as err:
logger.info(f'Error is {err}')
assert err_msg in str(err), f'Expected message {err_msg} in error: {err}'
def check_header_is_presented(head_info: dict, object_header: dict):
for key_to_check, val_to_check in object_header.items():
assert key_to_check in head_info['header']['attributes'], f'Key {key_to_check} is found in {head_object}'
assert head_info['header']['attributes'].get(key_to_check) == str(
val_to_check), f'Value {val_to_check} is equal'

View file

@ -0,0 +1,235 @@
import logging
from random import choice
from time import sleep
import allure
import pytest
from contract_keywords import get_epoch, tick_epoch
from python_keywords.http_gate import (get_via_http_curl, get_via_http_gate,
get_via_http_gate_by_attribute,
get_via_zip_http_gate,
upload_via_http_gate,
upload_via_http_gate_curl)
from python_keywords.neofs_verbs import get_object, put_object
from python_keywords.storage_policy import get_nodes_without_object
from python_keywords.utility_keywords import get_file_hash
logger = logging.getLogger('NeoLogger')
CLEANUP_TIMEOUT = 10
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#neofs-http-gateway', name='neofs-http-gateway')
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#uploading', name='uploading')
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#downloading', name='downloading')
@pytest.mark.http_gate
class TestHttpGate:
@allure.title('Test Put over gRPC, Get over HTTP')
def test_put_grpc_get_http(self, prepare_public_container, generate_files):
"""
Test that object can be put using gRPC interface and get using HTTP.
Steps:
1. Create simple and large objects.
2. Put objects using gRPC (neofs-cli).
3. Download objects using HTTP gate (https://github.com/nspcc-dev/neofs-http-gw#downloading).
4. Get objects using gRPC (neofs-cli).
5. Compare hashes for got objects.
6. Compare hashes for got and original objects.
Expected result:
Hashes must be the same.
"""
cid, wallet = prepare_public_container
file_name_simple, large_file_name = generate_files
with allure.step('Put objects using gRPC'):
oid_simple = put_object(wallet=wallet, path=file_name_simple, cid=cid)
oid_large = put_object(wallet=wallet, path=large_file_name, cid=cid)
for oid, file_name in ((oid_simple, file_name_simple), (oid_large, large_file_name)):
self.get_object_and_verify_hashes(oid, file_name, wallet, cid)
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#uploading', name='uploading')
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#downloading', name='downloading')
@pytest.mark.sanity
@allure.title('Test Put over HTTP, Get over HTTP')
def test_put_http_get_http(self, prepare_public_container, generate_files):
"""
Test that object can be put and get using HTTP interface.
Steps:
1. Create simple and large objects.
2. Upload objects using HTTP (https://github.com/nspcc-dev/neofs-http-gw#uploading).
3. Download objects using HTTP gate (https://github.com/nspcc-dev/neofs-http-gw#downloading).
4. Compare hashes for got and original objects.
Expected result:
Hashes must be the same.
"""
cid, wallet = prepare_public_container
file_name_simple, large_file_name = generate_files
with allure.step('Put objects using HTTP'):
oid_simple = upload_via_http_gate(cid=cid, path=file_name_simple)
oid_large = upload_via_http_gate(cid=cid, path=large_file_name)
for oid, file_name in ((oid_simple, file_name_simple), (oid_large, large_file_name)):
self.get_object_and_verify_hashes(oid, file_name, wallet, cid)
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#by-attributes', name='download by attributes')
@allure.title('Test Put over HTTP, Get over HTTP with headers')
@pytest.mark.parametrize('attributes',
[
{'fileName': 'simple_obj_filename'},
{'file-Name': 'simple obj filename'},
{'cat%jpeg': 'cat%jpeg'}
], ids=['simple', 'hyphen', 'percent']
)
def test_put_http_get_http_with_headers(self, prepare_public_container, generate_files, attributes):
"""
Test that object can be downloaded using different attributes in HTTP header.
Steps:
1. Create simple and large objects.
2. Upload objects using HTTP with particular attributes in the header.
3. Download objects by attributes using HTTP gate (https://github.com/nspcc-dev/neofs-http-gw#by-attributes).
4. Compare hashes for got and original objects.
Expected result:
Hashes must be the same.
"""
cid, wallet = prepare_public_container
file_name_simple, _ = generate_files
with allure.step('Put objects using HTTP with attribute'):
oid_simple = upload_via_http_gate(cid=cid, path=file_name_simple,
headers=self._attr_into_header(attributes))
self.get_object_by_attr_and_verify_hashes(oid_simple, file_name_simple, cid, attributes)
@allure.title('Test Expiration-Epoch in HTTP header')
def test_expiration_epoch_in_http(self, prepare_public_container, generate_file):
cid, wallet = prepare_public_container
file_name_simple = generate_file
object_not_found_err = 'object not found'
oids = []
curr_epoch = get_epoch()
epochs = (curr_epoch, curr_epoch + 1, curr_epoch + 2, curr_epoch + 100)
for epoch in epochs:
headers = {'X-Attribute-Neofs-Expiration-Epoch': str(epoch)}
with allure.step('Put objects using HTTP with attribute Expiration-Epoch'):
oids.append(upload_via_http_gate(cid=cid, path=file_name_simple, headers=headers))
assert len(oids) == len(epochs), 'Expected all objects has been put successfully'
with allure.step('All objects can be get'):
for oid in oids:
get_via_http_gate(cid=cid, oid=oid)
for expired_objects, not_expired_objects in [(oids[:1], oids[1:]), (oids[:2], oids[2:])]:
tick_epoch()
sleep(CLEANUP_TIMEOUT)
for oid in expired_objects:
self.try_to_get_object_and_expect_error(cid=cid, oid=oid, expected_err=object_not_found_err)
with allure.step('Other objects can be get'):
for oid in not_expired_objects:
get_via_http_gate(cid=cid, oid=oid)
@allure.title('Test Zip in HTTP header')
def test_zip_in_http(self, prepare_public_container, generate_files):
cid, wallet = prepare_public_container
file_name_simple, file_name_complex = generate_files
common_prefix = 'my_files'
headers1 = {'X-Attribute-FilePath': f'{common_prefix}/file1'}
headers2 = {'X-Attribute-FilePath': f'{common_prefix}/file2'}
upload_via_http_gate(cid=cid, path=file_name_simple, headers=headers1)
upload_via_http_gate(cid=cid, path=file_name_complex, headers=headers2)
dir_path = get_via_zip_http_gate(cid=cid, prefix=common_prefix)
with allure.step('Verify hashes'):
assert get_file_hash(f'{dir_path}/file1') == get_file_hash(file_name_simple)
assert get_file_hash(f'{dir_path}/file2') == get_file_hash(file_name_complex)
@pytest.mark.curl
@pytest.mark.long
@allure.title('Test Put over HTTP/Curl, Get over HTTP/Curl for large object')
def test_put_http_get_http_large_file(self, prepare_public_container, generate_large_file):
"""
This test checks upload and download using curl with 'large' object. Large is object with size up to 20Mb.
"""
cid, wallet = prepare_public_container
file_path, file_hash = generate_large_file
with allure.step('Put objects using HTTP'):
oid_simple = upload_via_http_gate(cid=cid, path=file_path)
oid_curl = upload_via_http_gate_curl(cid=cid, filepath=file_path, large_object=True)
self.get_object_and_verify_hashes(oid_simple, file_path, wallet, cid)
self.get_object_and_verify_hashes(oid_curl, file_path, wallet, cid, object_getter=get_via_http_curl)
@pytest.mark.curl
@allure.title('Test Put/Get over HTTP using Curl utility')
def test_put_http_get_http_curl(self, prepare_public_container, generate_files):
"""
Test checks upload and download over HTTP using curl utility.
"""
cid, wallet = prepare_public_container
file_name_simple, large_file_name = generate_files
with allure.step('Put objects using curl utility'):
oid_simple = upload_via_http_gate_curl(cid=cid, filepath=file_name_simple)
oid_large = upload_via_http_gate_curl(cid=cid, filepath=large_file_name)
for oid, file_name in ((oid_simple, file_name_simple), (oid_large, large_file_name)):
self.get_object_and_verify_hashes(oid, file_name, wallet, cid, object_getter=get_via_http_curl)
@staticmethod
@allure.step('Try to get object and expect error')
def try_to_get_object_and_expect_error(cid: str, oid: str, expected_err: str):
try:
get_via_http_gate(cid=cid, oid=oid)
raise AssertionError(f'Expected error on getting object with cid: {cid}')
except Exception as err:
assert expected_err in str(err), f'Expected error {expected_err} in {err}'
@staticmethod
@allure.step('Verify object can be get using HTTP header attribute')
def get_object_by_attr_and_verify_hashes(oid: str, file_name: str, cid: str, attrs: dict):
got_file_path_http = get_via_http_gate(cid=cid, oid=oid)
got_file_path_http_attr = get_via_http_gate_by_attribute(cid=cid, attribute=attrs)
TestHttpGate._assert_hashes_the_same(file_name, got_file_path_http, got_file_path_http_attr)
@staticmethod
@allure.step('Verify object can be get using HTTP')
def get_object_and_verify_hashes(oid: str, file_name: str, wallet: str, cid: str, object_getter=None):
nodes = get_nodes_without_object(wallet=wallet, cid=cid, oid=oid)
random_node = choice(nodes)
object_getter = object_getter or get_via_http_gate
got_file_path = get_object(wallet=wallet, cid=cid, oid=oid, endpoint=random_node)
got_file_path_http = object_getter(cid=cid, oid=oid)
TestHttpGate._assert_hashes_the_same(file_name, got_file_path, got_file_path_http)
@staticmethod
def _assert_hashes_the_same(orig_file_name: str, got_file_1: str, got_file_2: str):
msg = 'Expected hashes are equal for files {f1} and {f2}'
got_file_hash_http = get_file_hash(got_file_1)
assert get_file_hash(got_file_2) == got_file_hash_http, msg.format(f1=got_file_2, f2=got_file_1)
assert get_file_hash(orig_file_name) == got_file_hash_http, msg.format(f1=orig_file_name, f2=got_file_1)
@staticmethod
def _attr_into_header(attrs: dict) -> dict:
return {f'X-Attribute-{_key}': _value for _key, _value in attrs.items()}

View file

@ -0,0 +1,525 @@
import logging
import os
from random import choice, choices
import allure
import pytest
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
from contract_keywords import tick_epoch
from python_keywords import s3_gate_bucket, s3_gate_object
from python_keywords.aws_cli_client import AwsCliClient
from python_keywords.container import list_containers
from python_keywords.utility_keywords import (generate_file_and_file_hash,
get_file_hash)
from utility import create_file_with_content, get_file_content, split_file
logger = logging.getLogger('NeoLogger')
def pytest_generate_tests(metafunc):
if "s3_client" in metafunc.fixturenames:
metafunc.parametrize("s3_client", ['aws cli', 'boto3'], indirect=True)
@allure.link('https://github.com/nspcc-dev/neofs-s3-gw#neofs-s3-gateway', name='neofs-s3-gateway')
@pytest.mark.s3_gate
class TestS3Gate:
s3_client = None
@pytest.fixture(scope='class', autouse=True)
@allure.title('[Class/Autouse]: Create S3 client')
def s3_client(self, prepare_wallet_and_deposit, request):
wallet, wif = prepare_wallet_and_deposit
s3_bearer_rules_file = f"{os.getcwd()}/robot/resources/files/s3_bearer_rules.json"
cid, bucket, access_key_id, secret_access_key, owner_private_key = \
s3_gate_bucket.init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file)
containers_list = list_containers(wallet)
assert cid in containers_list, f'Expected cid {cid} in {containers_list}'
if request.param == 'aws cli':
try:
client = AwsCliClient(access_key_id, secret_access_key)
except Exception as err:
if 'command was not found or was not executable' in str(err):
pytest.skip('AWS CLI was not found')
else:
raise RuntimeError('Error on creating instance for AwsCliClient') from err
else:
client = s3_gate_bucket.config_s3_client(access_key_id, secret_access_key)
TestS3Gate.s3_client = client
@pytest.fixture
@allure.title('Create two buckets')
def create_buckets(self):
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client)
return bucket_1, bucket_2
@pytest.fixture
@allure.title('Create/delete bucket')
def bucket(self):
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
yield bucket
objects = s3_gate_object.list_objects_s3(self.s3_client, bucket)
if objects:
s3_gate_object.delete_objects_s3(self.s3_client, bucket, objects)
s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket)
@allure.title('Test S3 Bucket API')
def test_s3_buckets(self, generate_files):
"""
Test base S3 Bucket API (Create/List/Head/Delete).
"""
file_name_simple, file_name_large = generate_files
file_name = self.object_key_from_file_path(file_name_simple)
with allure.step('Create buckets'):
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client)
with allure.step('Check buckets are presented in the system'):
buckets = s3_gate_bucket.list_buckets_s3(self.s3_client)
assert bucket_1 in buckets, f'Expected bucket {bucket_1} is in the list'
assert bucket_2 in buckets, f'Expected bucket {bucket_2} is in the list'
with allure.step('Bucket must be empty'):
for bucket in (bucket_1, bucket_2):
objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert not objects_list, f'Expected empty bucket, got {objects_list}'
with allure.step('Check buckets are visible with S3 head command'):
s3_gate_bucket.head_bucket(self.s3_client, bucket_1)
s3_gate_bucket.head_bucket(self.s3_client, bucket_2)
with allure.step('Check we can put/list object with S3 commands'):
s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_name_simple)
s3_gate_object.head_object_s3(self.s3_client, bucket_1, file_name)
bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket_1)
assert file_name in bucket_objects, \
f'Expected file {file_name} in objects list {bucket_objects}'
with allure.step('Try to delete not empty bucket and get error'):
with pytest.raises(Exception, match=r'.*The bucket you tried to delete is not empty.*'):
s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_1)
s3_gate_bucket.head_bucket(self.s3_client, bucket_1)
with allure.step(f'Delete empty bucket {bucket_2}'):
s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket_2)
tick_epoch()
with allure.step(f'Check bucket {bucket_2} deleted'):
with pytest.raises(Exception, match=r'.*Not Found.*'):
s3_gate_bucket.head_bucket(self.s3_client, bucket_2)
buckets = s3_gate_bucket.list_buckets_s3(self.s3_client)
assert bucket_1 in buckets, f'Expected bucket {bucket_1} is in the list'
assert bucket_2 not in buckets, f'Expected bucket {bucket_2} is not in the list'
@allure.title('Test S3 Object API')
@pytest.mark.sanity
@pytest.mark.parametrize('file_type', ['simple', 'large'], ids=['Simple object', 'Large object'])
def test_s3_api_object(self, generate_files, file_type):
"""
Test base S3 Object API (Put/Head/List) for simple and large objects.
"""
file_name_simple, file_name_large = generate_files
file_name_path = file_name_simple if file_type == 'simple' else file_name_large
file_name = self.object_key_from_file_path(file_name_path)
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client)
for bucket in (bucket_1, bucket_2):
with allure.step('Bucket must be empty'):
objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert not objects_list, f'Expected empty bucket, got {objects_list}'
s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_path)
s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_large)
s3_gate_object.head_object_s3(self.s3_client, bucket, file_name)
bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert file_name in bucket_objects, \
f'Expected file {file_name} in objects list {bucket_objects}'
with allure.step("Check object's attributes"):
for attrs in (['ETag'], ['ObjectSize', 'StorageClass']):
s3_gate_object.get_object_attributes(self.s3_client, bucket, file_name, *attrs)
@allure.title('Test S3 Sync directory')
def test_s3_sync_dir(self, bucket):
"""
Test checks sync directory with AWS CLI utility.
"""
file_path_1 = f"{os.getcwd()}/{ASSETS_DIR}/test_sync/test_file_1"
file_path_2 = f"{os.getcwd()}/{ASSETS_DIR}/test_sync/test_file_2"
key_to_path = {'test_file_1': file_path_1, 'test_file_2': file_path_2}
if not isinstance(self.s3_client, AwsCliClient):
pytest.skip('This test is not supported with boto3 client')
create_file_with_content(file_path=file_path_1)
create_file_with_content(file_path=file_path_2)
self.s3_client.sync(bucket_name=bucket, dir_path=os.path.dirname(file_path_1))
with allure.step('Check objects are synced'):
objects = s3_gate_object.list_objects_s3(self.s3_client, bucket)
with allure.step('Check these are the same objects'):
assert set(key_to_path.keys()) == set(objects), f'Expected all abjects saved. Got {objects}'
for obj_key in objects:
got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key)
assert get_file_hash(got_object) == get_file_hash(key_to_path.get(obj_key)), \
'Expected hashes are the same'
@allure.title('Test S3 Object versioning')
def test_s3_api_versioning(self, bucket):
"""
Test checks basic versioning functionality for S3 bucket.
"""
version_1_content = 'Version 1'
version_2_content = 'Version 2'
file_name_simple = create_file_with_content(content=version_1_content)
obj_key = os.path.basename(file_name_simple)
with allure.step('Set versioning enable for bucket'):
s3_gate_bucket.get_bucket_versioning_status(self.s3_client, bucket)
s3_gate_bucket.set_bucket_versioning(self.s3_client, bucket, status=s3_gate_bucket.VersioningStatus.ENABLED)
status = s3_gate_bucket.get_bucket_versioning_status(self.s3_client, bucket)
assert status == s3_gate_bucket.VersioningStatus.ENABLED.value, f'Expected enabled status. Got {status}'
with allure.step('Put several versions of object into bucket'):
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
create_file_with_content(file_path=file_name_simple, content=version_2_content)
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
with allure.step('Check bucket shows all versions'):
versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket)
obj_versions = {version.get('VersionId') for version in versions if version.get('Key') == obj_key}
assert obj_versions == {version_id_1, version_id_2}, \
f'Expected object has versions: {version_id_1, version_id_2}'
with allure.step('Show information about particular version'):
for version_id in (version_id_1, version_id_2):
response = s3_gate_object.head_object_s3(self.s3_client, bucket, obj_key, version_id=version_id)
assert 'LastModified' in response, 'Expected LastModified field'
assert 'ETag' in response, 'Expected ETag field'
assert response.get('VersionId') == version_id, f'Expected VersionId is {version_id}'
assert response.get('ContentLength') != 0, 'Expected ContentLength is not zero'
with allure.step("Check object's attributes"):
for version_id in (version_id_1, version_id_2):
got_attrs = s3_gate_object.get_object_attributes(self.s3_client, bucket, obj_key, 'ETag',
version_id=version_id)
if got_attrs:
assert got_attrs.get('VersionId') == version_id, f'Expected VersionId is {version_id}'
with allure.step('Delete object and check it was deleted'):
response = s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key)
version_id_delete = response.get('VersionId')
with pytest.raises(Exception, match=r'.*Not Found.*'):
s3_gate_object.head_object_s3(self.s3_client, bucket, obj_key)
with allure.step('Get content for all versions and check it is correct'):
for version, content in ((version_id_2, version_2_content), (version_id_1, version_1_content)):
file_name = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key, version_id=version)
got_content = get_file_content(file_name)
assert got_content == content, f'Expected object content is\n{content}\nGot\n{got_content}'
with allure.step('Restore previous object version'):
s3_gate_object.delete_object_s3(self.s3_client, bucket, obj_key, version_id=version_id_delete)
file_name = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key)
got_content = get_file_content(file_name)
assert got_content == version_2_content, \
f'Expected object content is\n{version_2_content}\nGot\n{got_content}'
@allure.title('Test S3 Object Multipart API')
def test_s3_api_multipart(self, bucket):
"""
Test checks S3 Multipart API (Create multipart upload/Abort multipart upload/List multipart upload/
Upload part/List parts/Complete multipart upload).
"""
parts_count = 3
file_name_large, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE * 1024 * 6 * parts_count) # 5Mb - min part
# file_name_large, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE * 1024 * 30 * parts_count) # 5Mb - min part
object_key = self.object_key_from_file_path(file_name_large)
part_files = split_file(file_name_large, parts_count)
parts = []
uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket)
assert not uploads, f'Expected there is no uploads in bucket {bucket}'
with allure.step('Create and abort multipart upload'):
upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key)
uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket)
assert uploads, f'Expected there one upload in bucket {bucket}'
assert uploads[0].get('Key') == object_key, f'Expected correct key {object_key} in upload {uploads}'
assert uploads[0].get('UploadId') == upload_id, f'Expected correct UploadId {upload_id} in upload {uploads}'
s3_gate_object.abort_multipart_uploads_s3(self.s3_client, bucket, object_key, upload_id)
uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket)
assert not uploads, f'Expected there is no uploads in bucket {bucket}'
with allure.step('Create new multipart upload and upload several parts'):
upload_id = s3_gate_object.create_multipart_upload_s3(self.s3_client, bucket, object_key)
for part_id, file_path in enumerate(part_files, start=1):
etag = s3_gate_object.upload_part_s3(self.s3_client, bucket, object_key, upload_id, part_id, file_path)
parts.append((part_id, etag))
with allure.step('Check all parts are visible in bucket'):
got_parts = s3_gate_object.list_parts_s3(self.s3_client, bucket, object_key, upload_id)
assert len(got_parts) == len(part_files), f'Expected {parts_count} parts, got\n{got_parts}'
s3_gate_object.complete_multipart_upload_s3(self.s3_client, bucket, object_key, upload_id, parts)
uploads = s3_gate_object.list_multipart_uploads_s3(self.s3_client, bucket)
assert not uploads, f'Expected there is no uploads in bucket {bucket}'
with allure.step('Check we can get whole object from bucket'):
got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, object_key)
assert get_file_hash(got_object) == get_file_hash(file_name_large)
self.check_object_attributes(bucket, object_key, parts_count)
@allure.title('Test S3 Bucket tagging API')
def test_s3_api_bucket_tagging(self, bucket):
"""
Test checks S3 Bucket tagging API (Put tag/Get tag).
"""
key_value_pair = [('some-key', 'some-value'), ('some-key-2', 'some-value-2')]
s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair)
got_tags = s3_gate_bucket.get_bucket_tagging(self.s3_client, bucket)
with allure.step('Check all tags are presented'):
assert got_tags, f'Expected tags, got {got_tags}'
expected_tags = [{'Key': key, 'Value': value} for key, value in key_value_pair]
for tag in expected_tags:
assert tag in got_tags
s3_gate_bucket.delete_bucket_tagging(self.s3_client, bucket)
tags = s3_gate_bucket.get_bucket_tagging(self.s3_client, bucket)
assert not tags, f'Expected there is no tags for bucket {bucket}, got {tags}'
@allure.title('Test S3 Object tagging API')
def test_s3_api_object_tagging(self, bucket):
"""
Test checks S3 Object tagging API (Put tag/Get tag/Update tag).
"""
key_value_pair_bucket = [('some-key', 'some-value'), ('some-key-2', 'some-value-2')]
key_value_pair_obj = [('some-key-obj', 'some-value-obj'), ('some-key--obj2', 'some-value--obj2')]
key_value_pair_obj_new = [('some-key-obj-new', 'some-value-obj-new')]
file_name_simple, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE)
obj_key = self.object_key_from_file_path(file_name_simple)
s3_gate_bucket.put_bucket_tagging(self.s3_client, bucket, key_value_pair_bucket)
s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
for tags in (key_value_pair_obj, key_value_pair_obj_new):
s3_gate_object.put_object_tagging(self.s3_client, bucket, obj_key, tags)
got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, obj_key)
assert got_tags, f'Expected tags, got {got_tags}'
expected_tags = [{'Key': key, 'Value': value} for key, value in tags]
for tag in expected_tags:
assert tag in got_tags
s3_gate_object.delete_object_tagging(self.s3_client, bucket, obj_key)
got_tags = s3_gate_object.get_object_tagging(self.s3_client, bucket, obj_key)
assert not got_tags, f'Expected there is no tags for bucket {bucket}, got {got_tags}'
@allure.title('Test S3: Delete object & delete objects S3 API')
def test_s3_api_delete(self, create_buckets):
"""
Check delete_object and delete_objects S3 API operation. From first bucket some objects deleted one by one.
From second bucket some objects deleted all at once.
"""
max_obj_count = 20
max_delete_objects = 17
put_objects = []
file_paths = []
obj_sizes = [SIMPLE_OBJ_SIZE, COMPLEX_OBJ_SIZE]
bucket_1, bucket_2 = create_buckets
with allure.step(f'Generate {max_obj_count} files'):
for _ in range(max_obj_count):
file_paths.append(generate_file_and_file_hash(choice(obj_sizes))[0])
for bucket in (bucket_1, bucket_2):
with allure.step(f'Bucket {bucket} must be empty as it just created'):
objects_list = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket)
assert not objects_list, f'Expected empty bucket, got {objects_list}'
for file_path in file_paths:
s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
put_objects.append(self.object_key_from_file_path(file_path))
with allure.step(f'Check all objects put in bucket {bucket} successfully'):
bucket_objects = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket)
assert set(put_objects) == set(bucket_objects), \
f'Expected all objects {put_objects} in objects list {bucket_objects}'
with allure.step('Delete some objects from bucket_1 one by one'):
objects_to_delete_b1 = choices(put_objects, k=max_delete_objects)
for obj in objects_to_delete_b1:
s3_gate_object.delete_object_s3(self.s3_client, bucket_1, obj)
with allure.step('Check deleted objects are not visible in bucket bucket_1'):
bucket_objects = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket_1)
assert set(put_objects).difference(set(objects_to_delete_b1)) == set(bucket_objects), \
f'Expected all objects {put_objects} in objects list {bucket_objects}'
self.try_to_get_object_and_got_error(bucket_1, objects_to_delete_b1)
with allure.step('Delete some objects from bucket_2 at once'):
objects_to_delete_b2 = choices(put_objects, k=max_delete_objects)
s3_gate_object.delete_objects_s3(self.s3_client, bucket_2, objects_to_delete_b2)
with allure.step('Check deleted objects are not visible in bucket bucket_2'):
objects_list = s3_gate_object.list_objects_s3_v2(self.s3_client, bucket_2)
assert set(put_objects).difference(set(objects_to_delete_b2)) == set(objects_list), \
f'Expected all objects {put_objects} in objects list {bucket_objects}'
self.try_to_get_object_and_got_error(bucket_2, objects_to_delete_b2)
@allure.title('Test S3: Copy object to the same bucket')
def test_s3_copy_same_bucket(self, generate_files):
"""
Test object can be copied to the same bucket.
"""
file_simple, file_large = generate_files
file_name_simple = self.object_key_from_file_path(file_simple)
file_name_large = self.object_key_from_file_path(file_large)
bucket_objects = [file_name_simple, file_name_large]
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
with allure.step('Bucket must be empty'):
objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert not objects_list, f'Expected empty bucket, got {objects_list}'
with allure.step('Put objects into bucket'):
for obj in (file_simple, file_large):
s3_gate_object.put_object_s3(self.s3_client, bucket, obj)
with allure.step('Copy one object into the same bucket'):
copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple)
bucket_objects.append(copy_obj_path)
self.check_objects_in_bucket(bucket, bucket_objects)
with allure.step('Check copied object has the same content'):
got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path)
assert get_file_hash(file_simple) == get_file_hash(got_copied_file), 'Hashes must be the same'
with allure.step('Delete one object from bucket'):
s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_simple)
bucket_objects.remove(file_name_simple)
self.check_objects_in_bucket(bucket, expected_objects=bucket_objects, unexpected_objects=[file_name_simple])
@allure.title('Test S3: Copy object to another bucket')
def test_s3_copy_to_another_bucket(self, generate_files):
"""
Test object can be copied to another bucket.
"""
file_simple, file_large = generate_files
file_name_simple = self.object_key_from_file_path(file_simple)
file_name_large = self.object_key_from_file_path(file_large)
bucket_1_objects = [file_name_simple, file_name_large]
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client)
with allure.step('Buckets must be empty'):
for bucket in (bucket_1, bucket_2):
objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert not objects_list, f'Expected empty bucket, got {objects_list}'
with allure.step('Put objects into one bucket'):
for obj in (file_simple, file_large):
s3_gate_object.put_object_s3(self.s3_client, bucket_1, obj)
with allure.step('Copy object from first bucket into second'):
copy_obj_path_b2 = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, file_name_large,
bucket_dst=bucket_2)
self.check_objects_in_bucket(bucket_1, expected_objects=bucket_1_objects)
self.check_objects_in_bucket(bucket_2, expected_objects=[copy_obj_path_b2])
with allure.step('Check copied object has the same content'):
got_copied_file_b2 = s3_gate_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2)
assert get_file_hash(file_large) == get_file_hash(got_copied_file_b2), 'Hashes must be the same'
with allure.step('Delete one object from first bucket'):
s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple)
bucket_1_objects.remove(file_name_simple)
self.check_objects_in_bucket(bucket_1, expected_objects=bucket_1_objects)
self.check_objects_in_bucket(bucket_2, expected_objects=[copy_obj_path_b2])
with allure.step('Delete one object from second bucket and check it is empty'):
s3_gate_object.delete_object_s3(self.s3_client, bucket_2, copy_obj_path_b2)
self.check_objects_in_bucket(bucket_2, expected_objects=[])
def check_object_attributes(self, bucket: str, object_key: str, parts_count: int):
if not isinstance(self.s3_client, AwsCliClient):
logger.warning('Attributes check is not supported for boto3 implementation')
return
with allure.step("Check object's attributes"):
obj_parts = s3_gate_object.get_object_attributes(self.s3_client, bucket, object_key, 'ObjectParts',
get_full_resp=False)
assert obj_parts.get('TotalPartsCount') == parts_count, f'Expected TotalPartsCount is {parts_count}'
assert len(obj_parts.get('Parts')) == parts_count, f'Expected Parts cunt is {parts_count}'
with allure.step("Check object's attribute max-parts"):
max_parts = 2
obj_parts = s3_gate_object.get_object_attributes(self.s3_client, bucket, object_key, 'ObjectParts',
max_parts=max_parts, get_full_resp=False)
assert obj_parts.get('TotalPartsCount') == parts_count, f'Expected TotalPartsCount is {parts_count}'
assert obj_parts.get('MaxParts') == max_parts, f'Expected MaxParts is {parts_count}'
assert len(obj_parts.get('Parts')) == max_parts, f'Expected Parts count is {parts_count}'
with allure.step("Check object's attribute part-number-marker"):
part_number_marker = 3
obj_parts = s3_gate_object.get_object_attributes(self.s3_client, bucket, object_key, 'ObjectParts',
part_number=part_number_marker, get_full_resp=False)
assert obj_parts.get('TotalPartsCount') == parts_count, f'Expected TotalPartsCount is {parts_count}'
assert obj_parts.get(
'PartNumberMarker') == part_number_marker, f'Expected PartNumberMarker is {part_number_marker}'
assert len(obj_parts.get('Parts')) == 1, f'Expected Parts count is {parts_count}'
@allure.step('Expected all objects are presented in the bucket')
def check_objects_in_bucket(self, bucket, expected_objects: list, unexpected_objects: list = None):
unexpected_objects = unexpected_objects or []
bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert len(bucket_objects) == len(expected_objects), f'Expected {len(expected_objects)} objects in the bucket'
for bucket_object in expected_objects:
assert bucket_object in bucket_objects, \
f'Expected object {bucket_object} in objects list {bucket_objects}'
for bucket_object in unexpected_objects:
assert bucket_object not in bucket_objects, \
f'Expected object {bucket_object} not in objects list {bucket_objects}'
@allure.step('Try to get object and got error')
def try_to_get_object_and_got_error(self, bucket: str, unexpected_objects: list):
for obj in unexpected_objects:
try:
s3_gate_object.get_object_s3(self.s3_client, bucket, obj)
raise AssertionError(f'Object {obj} found in bucket {bucket}')
except Exception as err:
assert 'The specified key does not exist' in str(err), f'Expected error in exception {err}'
@staticmethod
def object_key_from_file_path(full_path: str) -> str:
return os.path.basename(full_path)

View file

@ -0,0 +1 @@
password: ""

View file

@ -0,0 +1,219 @@
import json
import logging
import os
import allure
from cli_helpers import _cmd_run, _configure_aws_cli
from common import ASSETS_DIR, S3_GATE
logger = logging.getLogger('NeoLogger')
class AwsCliClient:
def __init__(self, access_key_id: str, secret_access_key: str):
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
self.config_aws_client()
def config_aws_client(self):
cmd = 'aws configure'
logger.info(f'Executing command: {cmd}')
_configure_aws_cli(cmd, self.access_key_id, self.secret_access_key)
def create_bucket(self, Bucket: str):
cmd = f'aws --no-verify-ssl s3api create-bucket --bucket {Bucket} --endpoint-url {S3_GATE}'
_cmd_run(cmd)
def list_buckets(self) -> dict:
cmd = f'aws --no-verify-ssl s3api list-buckets --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def get_bucket_versioning(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api get-bucket-versioning --bucket {Bucket}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def put_bucket_versioning(self, Bucket: str, VersioningConfiguration: dict) -> dict:
cmd = f'aws --no-verify-ssl s3api put-bucket-versioning --bucket {Bucket} ' \
f'--versioning-configuration Status={VersioningConfiguration.get("Status")}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def list_objects(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api list-objects --bucket {Bucket}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def list_objects_v2(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api list-objects-v2 --bucket {Bucket}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def list_object_versions(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api list-object-versions --bucket {Bucket}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def copy_object(self, Bucket: str, CopySource: str, Key: str) -> dict:
cmd = f'aws --no-verify-ssl s3api copy-object --copy-source {CopySource} --bucket {Bucket} --key {Key}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def head_bucket(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api head-bucket --bucket {Bucket} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def put_object(self, Body: str, Bucket: str, Key: str) -> dict:
cmd = f' aws --no-verify-ssl s3api put-object --bucket {Bucket} --key {Key} --body {Body}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def head_object(self, Bucket: str, Key: str, VersionId: str = None) -> dict:
version = f' --version-id {VersionId}' if VersionId else ''
cmd = f' aws --no-verify-ssl s3api head-object --bucket {Bucket} --key {Key} {version}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def get_object(self, Bucket: str, Key: str, file_path: str, VersionId: str = None) -> dict:
version = f' --version-id {VersionId}' if VersionId else ''
cmd = f' aws --no-verify-ssl s3api get-object --bucket {Bucket} ' \
f'--key {Key} {version} {file_path} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def delete_objects(self, Bucket: str, Delete: dict) -> dict:
file_path = f"{os.getcwd()}/{ASSETS_DIR}/delete.json"
with open(file_path, 'w') as out_file:
out_file.write(json.dumps(Delete))
cmd = f'aws --no-verify-ssl s3api delete-objects --bucket {Bucket} --delete file://{file_path} ' \
f'--endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def delete_object(self, Bucket: str, Key: str, VersionId: str = None) -> dict:
version = f' --version-id {VersionId}' if VersionId else ''
cmd = f'aws --no-verify-ssl s3api delete-object --bucket {Bucket} --key {Key} {version}' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def get_object_attributes(self, bucket: str, key: str, *attributes: str, version_id: str = None,
max_parts: int = None, part_number: int = None) -> dict:
attrs = ','.join(attributes)
version = f' --version-id {version_id}' if version_id else ''
parts = f'--max-parts {max_parts}' if max_parts else ''
part_number = f'--part-number-marker {part_number}' if part_number else ''
cmd = f'aws --no-verify-ssl s3api get-object-attributes --bucket {bucket} --key {key} {version}' \
f' {parts} {part_number} --object-attributes {attrs} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def delete_bucket(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api delete-bucket --bucket {Bucket} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def get_bucket_tagging(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api get-bucket-tagging --bucket {Bucket} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def put_bucket_tagging(self, Bucket: str, Tagging: dict) -> dict:
cmd = f'aws --no-verify-ssl s3api put-bucket-tagging --bucket {Bucket} --tagging \'{json.dumps(Tagging)}\'' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def delete_bucket_tagging(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api delete-bucket-tagging --bucket {Bucket} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def put_object_tagging(self, Bucket: str, Key: str, Tagging: dict) -> dict:
cmd = f'aws --no-verify-ssl s3api put-object-tagging --bucket {Bucket} --key {Key}' \
f' --tagging \'{json.dumps(Tagging)}\'' \
f' --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def get_object_tagging(self, Bucket: str, Key: str) -> dict:
cmd = f'aws --no-verify-ssl s3api get-object-tagging --bucket {Bucket} --key {Key} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def delete_object_tagging(self, Bucket: str, Key: str) -> dict:
cmd = f'aws --no-verify-ssl s3api delete-object-tagging --bucket {Bucket} --key {Key} --endpoint {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
@allure.step('Sync directory S3')
def sync(self, bucket_name: str, dir_path: str) -> dict:
cmd = f'aws --no-verify-ssl s3 sync {dir_path} s3://{bucket_name} --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def create_multipart_upload(self, Bucket: str, Key: str) -> dict:
cmd = f'aws --no-verify-ssl s3api create-multipart-upload --bucket {Bucket} --key {Key}' \
f' --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def list_multipart_uploads(self, Bucket: str) -> dict:
cmd = f'aws --no-verify-ssl s3api list-multipart-uploads --bucket {Bucket}' \
f' --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def abort_multipart_upload(self, Bucket: str, Key: str, UploadId: str) -> dict:
cmd = f'aws --no-verify-ssl s3api abort-multipart-upload --bucket {Bucket} --key {Key}' \
f' --upload-id {UploadId} --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def upload_part(self, UploadId: str, Bucket: str, Key: str, PartNumber: int, Body: str) -> dict:
cmd = f'aws --no-verify-ssl s3api upload-part --bucket {Bucket} --key {Key} --upload-id {UploadId} ' \
f'--part-number {PartNumber} --body {Body} --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def list_parts(self, UploadId: str, Bucket: str, Key: str) -> dict:
cmd = f'aws --no-verify-ssl s3api list-parts --bucket {Bucket} --key {Key} --upload-id {UploadId} ' \
f' --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
def complete_multipart_upload(self, Bucket: str, Key: str, UploadId: str, MultipartUpload: dict) -> dict:
file_path = f"{os.getcwd()}/{ASSETS_DIR}/parts.json"
with open(file_path, 'w') as out_file:
out_file.write(json.dumps(MultipartUpload))
cmd = f'aws --no-verify-ssl s3api complete-multipart-upload --bucket {Bucket} --key {Key}' \
f' --upload-id {UploadId} --multipart-upload file://{file_path}' \
f' --endpoint-url {S3_GATE}'
output = _cmd_run(cmd)
return self._to_json(output)
@staticmethod
def _to_json(output: str) -> dict:
json_output = {}
try:
json_output = json.loads(output)
except Exception:
if '{' not in output and '}' not in output:
logger.warning(f'Could not parse json from output {output}')
return json_output
json_output = json.loads(output[output.index('{'):])
return json_output

View file

@ -4,9 +4,15 @@
Helper functions to use with `neofs-cli`, `neo-go` Helper functions to use with `neofs-cli`, `neo-go`
and other CLIs. and other CLIs.
""" """
from typing import Union
import subprocess import subprocess
import sys
from contextlib import suppress
from datetime import datetime
from json import dumps
from textwrap import shorten
import allure
import pexpect import pexpect
from robot.api import logger from robot.api import logger
@ -20,11 +26,16 @@ def _cmd_run(cmd, timeout=30):
""" """
try: try:
logger.info(f"Executing command: {cmd}") logger.info(f"Executing command: {cmd}")
start_time = datetime.now()
compl_proc = subprocess.run(cmd, check=True, universal_newlines=True, compl_proc = subprocess.run(cmd, check=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=timeout,
shell=True) shell=True)
output = compl_proc.stdout output = compl_proc.stdout
return_code = compl_proc.returncode
end_time = datetime.now()
logger.info(f"Output: {output}") logger.info(f"Output: {output}")
_attach_allure_log(cmd, output, return_code, start_time, end_time)
return output return output
except subprocess.CalledProcessError as exc: except subprocess.CalledProcessError as exc:
raise RuntimeError(f"Error:\nreturn code: {exc.returncode} " raise RuntimeError(f"Error:\nreturn code: {exc.returncode} "
@ -38,8 +49,62 @@ def _cmd_run(cmd, timeout=30):
def _run_with_passwd(cmd): def _run_with_passwd(cmd):
child = pexpect.spawn(cmd) child = pexpect.spawn(cmd)
child.delaybeforesend = 1
child.expect(".*") child.expect(".*")
child.sendline('\r') child.sendline('\r')
if sys.platform == "darwin":
child.expect(pexpect.EOF)
cmd = child.before
else:
child.wait() child.wait()
cmd = child.read() cmd = child.read()
return cmd.decode() return cmd.decode()
def _configure_aws_cli(cmd, key_id, access_key, out_format='json'):
child = pexpect.spawn(cmd)
child.delaybeforesend = 1
child.expect("AWS Access Key ID.*")
child.sendline(key_id)
child.expect("AWS Secret Access Key.*")
child.sendline(access_key)
child.expect("Default region name.*")
child.sendline('')
child.expect("Default output format.*")
child.sendline(out_format)
child.wait()
cmd = child.read()
# child.expect(pexpect.EOF)
# cmd = child.before
return cmd.decode()
def _attach_allure_log(cmd: str, output: str, return_code: int, start_time: datetime, end_time: datetime):
if 'allure' in sys.modules:
command_attachment = (
f"COMMAND: '{cmd}'\n"
f'OUTPUT:\n {output}\n'
f'RC: {return_code}\n'
f'Start / End / Elapsed\t {start_time.time()} / {end_time.time()} / {end_time - start_time}'
)
with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'):
allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT)
def log_command_execution(cmd: str, output: Union[str, dict]):
logger.info(f'{cmd}: {output}')
if 'allure' in sys.modules:
with suppress(Exception):
json_output = dumps(output, indent=4, sort_keys=True)
output = json_output
command_attachment = (
f"COMMAND: '{cmd}'\n"
f'OUTPUT:\n {output}\n'
)
with allure.step(f'COMMAND: {shorten(cmd, width=60, placeholder="...")}'):
allure.attach(command_attachment, 'Command execution', allure.attachment_type.TEXT)

View file

@ -1,16 +1,28 @@
#!/usr/bin/python3 #!/usr/bin/python3
import re
import shutil import shutil
import sys
import uuid
import zipfile
from urllib.parse import quote_plus
import allure
import requests import requests
from cli_helpers import _cmd_run
from common import HTTP_GATE
from robot.api import logger from robot.api import logger
from robot.api.deco import keyword from robot.api.deco import keyword
from robot.libraries.BuiltIn import BuiltIn from robot.libraries.BuiltIn import BuiltIn
from common import HTTP_GATE
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
if "pytest" in sys.modules:
import os
ASSETS_DIR = os.getenv("ASSETS_DIR", "TemporaryDir/")
else:
ASSETS_DIR = BuiltIn().get_variable_value("${ASSETS_DIR}")
@keyword('Get via HTTP Gate') @keyword('Get via HTTP Gate')
@ -30,7 +42,141 @@ def get_via_http_gate(cid: str, oid: str):
status code: {resp.status_code} {resp.reason}""") status code: {resp.status_code} {resp.reason}""")
logger.info(f'Request: {request}') logger.info(f'Request: {request}')
_attach_allure_step(request, resp.status_code)
filename = f"{ASSETS_DIR}/{cid}_{oid}" filename = f"{ASSETS_DIR}/{cid}_{oid}"
with open(filename, "wb") as get_file: with open(filename, "wb") as get_file:
shutil.copyfileobj(resp.raw, get_file) shutil.copyfileobj(resp.raw, get_file)
return filename return filename
@keyword('Get via Zip HTTP Gate')
def get_via_zip_http_gate(cid: str, prefix: str):
"""
This function gets given object from HTTP gate
:param cid: CID to get object from
:param prefix: common prefix
"""
request = f'{HTTP_GATE}/zip/{cid}/{prefix}'
resp = requests.get(request, stream=True)
if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate:
request: {resp.request.path_url},
response: {resp.text},
status code: {resp.status_code} {resp.reason}""")
logger.info(f'Request: {request}')
_attach_allure_step(request, resp.status_code)
filename = f'{ASSETS_DIR}/{cid}_archive.zip'
with open(filename, 'wb') as get_file:
shutil.copyfileobj(resp.raw, get_file)
with zipfile.ZipFile(filename, 'r') as zip_ref:
zip_ref.extractall(ASSETS_DIR)
return f'{ASSETS_DIR}/{prefix}'
@keyword('Get via HTTP Gate by attribute')
def get_via_http_gate_by_attribute(cid: str, attribute: dict):
"""
This function gets given object from HTTP gate
:param cid: CID to get object from
:param attribute: attribute name: attribute value pair
"""
attr_name = list(attribute.keys())[0]
attr_value = quote_plus(str(attribute.get(attr_name)))
request = f'{HTTP_GATE}/get_by_attribute/{cid}/{quote_plus(str(attr_name))}/{attr_value}'
resp = requests.get(request, stream=True)
if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate:
request: {resp.request.path_url},
response: {resp.text},
status code: {resp.status_code} {resp.reason}""")
logger.info(f'Request: {request}')
_attach_allure_step(request, resp.status_code)
filename = f"{ASSETS_DIR}/{cid}_{str(uuid.uuid4())}"
with open(filename, "wb") as get_file:
shutil.copyfileobj(resp.raw, get_file)
return filename
@keyword('Upload via HTTP Gate')
def upload_via_http_gate(cid: str, path: str, headers: dict = None) -> str:
"""
This function upload given object through HTTP gate
:param cid: CID to get object from
:param path: File path to upload
:param headers: Object header
"""
request = f'{HTTP_GATE}/upload/{cid}'
files = {'upload_file': open(path, 'rb')}
body = {
'filename': path
}
resp = requests.post(request, files=files, data=body, headers=headers)
if not resp.ok:
raise Exception(f"""Failed to get object via HTTP gate:
request: {resp.request.path_url},
response: {resp.text},
status code: {resp.status_code} {resp.reason}""")
logger.info(f'Request: {request}')
_attach_allure_step(request, resp.json(), req_type='POST')
assert resp.json().get('object_id'), f'OID found in response {resp}'
return resp.json().get('object_id')
@keyword('Upload via HTTP Gate using Curl')
def upload_via_http_gate_curl(cid: str, filepath: str, large_object=False, headers: dict = None) -> str:
"""
This function upload given object through HTTP gate using curl utility.
:param cid: CID to get object from
:param filepath: File path to upload
:param headers: Object header
"""
request = f'{HTTP_GATE}/upload/{cid}'
files = f'file=@{filepath};filename={os.path.basename(filepath)}'
cmd = f'curl -F \'{files}\' {request}'
if large_object:
files = f'file=@pipe;filename={os.path.basename(filepath)}'
cmd = f'mkfifo pipe;cat {filepath} > pipe & curl --no-buffer -F \'{files}\' {request}'
output = _cmd_run(cmd)
oid_re = re.search(r'"object_id": "(.*)"', output)
if not oid_re:
raise AssertionError(f'Could not find "object_id" in {output}')
return oid_re.group(1)
@keyword('Get via HTTP Gate using Curl')
def get_via_http_curl(cid: str, oid: str) -> str:
"""
This function gets given object from HTTP gate using curl utility.
:param cid: CID to get object from
:param oid: object OID
"""
request = f'{HTTP_GATE}/get/{cid}/{oid}'
filename = f"{ASSETS_DIR}/{cid}_{oid}_{str(uuid.uuid4())}"
cmd = f'curl {request} > {filename}'
_cmd_run(cmd)
return filename
def _attach_allure_step(request: str, status_code: int, req_type='GET'):
if 'allure' in sys.modules:
command_attachment = (
f"REQUEST: '{request}'\n"
f'RESPONSE:\n {status_code}\n'
)
with allure.step(f'{req_type} Request'):
allure.attach(command_attachment, f'{req_type} Request', allure.attachment_type.TEXT)

View file

@ -1,236 +0,0 @@
#!/usr/bin/python3
import json
import os
import uuid
import boto3
import botocore
import urllib3
from robot.api import logger
from robot.api.deco import keyword
from cli_helpers import _run_with_passwd
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
##########################################################
# Disabling warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings()
##########################################################
ROBOT_AUTO_KEYWORDS = False
CREDENTIALS_CREATE_TIMEOUT = '30s'
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
@keyword('Init S3 Credentials')
def init_s3_credentials(wallet):
bucket = str(uuid.uuid4())
s3_bearer_rules = "robot/resources/files/s3_bearer_rules.json"
cmd = (
f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} '
f'issue-secret --wallet {wallet} --gate-public-key={GATE_PUB_KEY} '
f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} '
f'--bearer-rules {s3_bearer_rules}'
)
logger.info(f"Executing command: {cmd}")
try:
output = _run_with_passwd(cmd)
logger.info(f"Command completed with output: {output}")
# first five string are log output, cutting them off and parse
# the rest of the output as JSON
output = '\n'.join(output.split('\n')[5:])
output_dict = json.loads(output)
return (output_dict['container_id'],
bucket,
output_dict['access_key_id'],
output_dict['secret_access_key'],
output_dict['owner_private_key'])
except Exception as exc:
raise RuntimeError("failed to init s3 credentials") from exc
@keyword('Config S3 client')
def config_s3_client(access_key_id, secret_access_key):
try:
session = boto3.session.Session()
s3_client = session.client(
service_name='s3',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
endpoint_url=S3_GATE, verify=False
)
return s3_client
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('List objects S3 v2')
def list_objects_s3_v2(s3_client, bucket):
try:
response = s3_client.list_objects_v2(Bucket=bucket)
logger.info(f"S3 v2 List objects result: {response['Contents']}")
obj_list = []
for obj in response['Contents']:
obj_list.append(obj['Key'])
logger.info(f"Found s3 objects: {obj_list}")
return obj_list
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('List objects S3')
def list_objects_s3(s3_client, bucket):
try:
response = s3_client.list_objects(Bucket=bucket)
logger.info(f"S3 List objects result: {response['Contents']}")
obj_list = []
for obj in response['Contents']:
obj_list.append(obj['Key'])
logger.info(f"Found s3 objects: {obj_list}")
return obj_list
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Create bucket S3')
def create_bucket_s3(s3_client):
bucket_name = str(uuid.uuid4())
try:
s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
logger.info(f"Created S3 bucket: {s3_bucket}")
return bucket_name
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('List buckets S3')
def list_buckets_s3(s3_client):
found_buckets = []
try:
response = s3_client.list_buckets()
logger.info(f"S3 List buckets result: {response}")
for bucket in response['Buckets']:
found_buckets.append(bucket['Name'])
return found_buckets
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Delete bucket S3')
def delete_bucket_s3(s3_client, bucket):
try:
response = s3_client.delete_bucket(Bucket=bucket)
logger.info(f"S3 Delete bucket result: {response}")
return response
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Head bucket S3')
def head_bucket(s3_client, bucket):
try:
response = s3_client.head_bucket(Bucket=bucket)
logger.info(f"S3 Head bucket result: {response}")
return response
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Put object S3')
def put_object_s3(s3_client, bucket, filepath):
filename = os.path.basename(filepath)
with open(filepath, "rb") as put_file:
file_content = put_file.read()
try:
response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename)
logger.info(f"S3 Put object result: {response}")
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Head object S3')
def head_object_s3(s3_client, bucket, object_key):
try:
response = s3_client.head_object(Bucket=bucket, Key=object_key)
logger.info(f"S3 Head object result: {response}")
return response
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Delete object S3')
def delete_object_s3(s3_client, bucket, object_key):
try:
response = s3_client.delete_object(Bucket=bucket, Key=object_key)
logger.info(f"S3 Put object result: {response}")
return response
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Copy object S3')
def copy_object_s3(s3_client, bucket, object_key):
filename = f"{os.getcwd()}/{uuid.uuid4()}"
try:
response = s3_client.copy_object(Bucket=bucket,
CopySource=f"{bucket}/{object_key}",
Key=filename)
logger.info(f"S3 Copy object result: {response}")
return filename
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err
@keyword('Get object S3')
def get_object_s3(s3_client, bucket, object_key):
filename = f"{os.getcwd()}/{uuid.uuid4()}"
try:
response = s3_client.get_object(Bucket=bucket, Key=object_key)
with open(f"{filename}", 'wb') as get_file:
chunk = response['Body'].read(1024)
while chunk:
get_file.write(chunk)
chunk = response['Body'].read(1024)
return filename
except botocore.exceptions.ClientError as err:
raise Exception(f"Error Message: {err.response['Error']['Message']}\n"
f"Http status code: {err.response['ResponseMetadata']['HTTPStatusCode']}") from err

View file

@ -0,0 +1,194 @@
#!/usr/bin/python3
import json
import os
import uuid
from enum import Enum
import boto3
from botocore.exceptions import ClientError
import urllib3
from robot.api import logger
from robot.api.deco import keyword
from cli_helpers import _run_with_passwd, log_command_execution
from common import GATE_PUB_KEY, NEOFS_ENDPOINT, S3_GATE
##########################################################
# Disabling warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings()
##########################################################
ROBOT_AUTO_KEYWORDS = False
CREDENTIALS_CREATE_TIMEOUT = '30s'
NEOFS_EXEC = os.getenv('NEOFS_EXEC', 'neofs-authmate')
ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/')
class VersioningStatus(Enum):
ENABLED = 'Enabled'
SUSPENDED = 'Suspended'
@keyword('Init S3 Credentials')
def init_s3_credentials(wallet, s3_bearer_rules_file: str = None):
bucket = str(uuid.uuid4())
s3_bearer_rules = s3_bearer_rules_file or 'robot/resources/files/s3_bearer_rules.json'
cmd = (
f'{NEOFS_EXEC} --debug --with-log --timeout {CREDENTIALS_CREATE_TIMEOUT} '
f'issue-secret --wallet {wallet} --gate-public-key={GATE_PUB_KEY} '
f'--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} '
f'--bearer-rules {s3_bearer_rules}'
)
logger.info(f'Executing command: {cmd}')
try:
output = _run_with_passwd(cmd)
logger.info(f'Command completed with output: {output}')
# first five string are log output, cutting them off and parse
# the rest of the output as JSON
output = '\n'.join(output.split('\n')[5:])
output_dict = json.loads(output)
return (output_dict['container_id'],
bucket,
output_dict['access_key_id'],
output_dict['secret_access_key'],
output_dict['owner_private_key'])
except Exception as exc:
raise RuntimeError(f'Failed to init s3 credentials because of error\n{exc}') from exc
@keyword('Config S3 client')
def config_s3_client(access_key_id: str, secret_access_key: str):
try:
session = boto3.session.Session()
s3_client = session.client(
service_name='s3',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
endpoint_url=S3_GATE, verify=False
)
return s3_client
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Create bucket S3')
def create_bucket_s3(s3_client):
bucket_name = str(uuid.uuid4())
try:
s3_bucket = s3_client.create_bucket(Bucket=bucket_name)
log_command_execution(f'Created S3 bucket {bucket_name}', s3_bucket)
return bucket_name
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('List buckets S3')
def list_buckets_s3(s3_client):
found_buckets = []
try:
response = s3_client.list_buckets()
log_command_execution('S3 List buckets result', response)
for bucket in response['Buckets']:
found_buckets.append(bucket['Name'])
return found_buckets
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Delete bucket S3')
def delete_bucket_s3(s3_client, bucket: str):
try:
response = s3_client.delete_bucket(Bucket=bucket)
log_command_execution('S3 Delete bucket result', response)
return response
except ClientError as err:
log_command_execution('S3 Delete bucket error', str(err))
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Head bucket S3')
def head_bucket(s3_client, bucket: str):
try:
response = s3_client.head_bucket(Bucket=bucket)
log_command_execution('S3 Head bucket result', response)
return response
except ClientError as err:
log_command_execution('S3 Head bucket error', str(err))
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Set bucket versioning status')
def set_bucket_versioning(s3_client, bucket_name: str, status: VersioningStatus):
try:
response = s3_client.put_bucket_versioning(Bucket=bucket_name, VersioningConfiguration={'Status': status.value})
log_command_execution('S3 Set bucket versioning to', response)
except ClientError as err:
raise Exception(f'Got error during set bucket versioning: {err}') from err
@keyword('Get bucket versioning status')
def get_bucket_versioning_status(s3_client, bucket_name: str) -> str:
try:
response = s3_client.get_bucket_versioning(Bucket=bucket_name)
status = response.get('Status')
log_command_execution('S3 Got bucket versioning status', response)
return status
except ClientError as err:
raise Exception(f'Got error during get bucket versioning status: {err}') from err
@keyword('Put bucket tagging')
def put_bucket_tagging(s3_client, bucket_name: str, tags: list):
try:
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags]
tagging = {'TagSet': tags}
response = s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging=tagging)
log_command_execution('S3 Put bucket tagging', response)
except ClientError as err:
raise Exception(f'Got error during put bucket tagging: {err}') from err
@keyword('Get bucket tagging')
def get_bucket_tagging(s3_client, bucket_name: str) -> list:
try:
response = s3_client.get_bucket_tagging(Bucket=bucket_name)
log_command_execution('S3 Get bucket tagging', response)
return response.get('TagSet')
except ClientError as err:
raise Exception(f'Got error during get bucket tagging: {err}') from err
@keyword('Delete bucket tagging')
def delete_bucket_tagging(s3_client, bucket_name: str):
try:
response = s3_client.delete_bucket_tagging(Bucket=bucket_name)
log_command_execution('S3 Delete bucket tagging', response)
except ClientError as err:
raise Exception(f'Got error during delete bucket tagging: {err}') from err

View file

@ -0,0 +1,330 @@
#!/usr/bin/python3
import os
import uuid
from enum import Enum
from typing import Optional, List
import urllib3
from botocore.exceptions import ClientError
from robot.api import logger
from robot.api.deco import keyword
from cli_helpers import log_command_execution
from python_keywords.aws_cli_client import AwsCliClient
##########################################################
# Disabling warnings on self-signed certificate which the
# boto library produces on requests to S3-gate in dev-env.
urllib3.disable_warnings()
##########################################################
ROBOT_AUTO_KEYWORDS = False
CREDENTIALS_CREATE_TIMEOUT = '30s'
ASSETS_DIR = os.getenv('ASSETS_DIR', 'TemporaryDir/')
class VersioningStatus(Enum):
ENABLED = 'Enabled'
SUSPENDED = 'Suspended'
@keyword('List objects S3 v2')
def list_objects_s3_v2(s3_client, bucket: str) -> list:
try:
response = s3_client.list_objects_v2(Bucket=bucket)
content = response.get('Contents', [])
log_command_execution('S3 v2 List objects result', response)
obj_list = []
for obj in content:
obj_list.append(obj['Key'])
logger.info(f'Found s3 objects: {obj_list}')
return obj_list
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('List objects S3')
def list_objects_s3(s3_client, bucket: str) -> list:
try:
response = s3_client.list_objects(Bucket=bucket)
content = response.get('Contents', [])
log_command_execution('S3 List objects result', response)
obj_list = []
for obj in content:
obj_list.append(obj['Key'])
logger.info(f'Found s3 objects: {obj_list}')
return obj_list
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('List objects versions S3')
def list_objects_versions_s3(s3_client, bucket: str) -> list:
try:
response = s3_client.list_object_versions(Bucket=bucket)
versions = response.get('Versions', [])
log_command_execution('S3 List objects versions result', response)
return versions
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Put object S3')
def put_object_s3(s3_client, bucket: str, filepath: str):
filename = os.path.basename(filepath)
if isinstance(s3_client, AwsCliClient):
file_content = filepath
else:
with open(filepath, 'rb') as put_file:
file_content = put_file.read()
try:
response = s3_client.put_object(Body=file_content, Bucket=bucket, Key=filename)
log_command_execution('S3 Put object result', response)
return response.get('VersionId')
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Head object S3')
def head_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None):
try:
params = {'Bucket': bucket, 'Key': object_key}
if version_id:
params['VersionId'] = version_id
response = s3_client.head_object(**params)
log_command_execution('S3 Head object result', response)
return response
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Delete object S3')
def delete_object_s3(s3_client, bucket, object_key, version_id: str = None):
try:
params = {'Bucket': bucket, 'Key': object_key}
if version_id:
params['VersionId'] = version_id
response = s3_client.delete_object(**params)
log_command_execution('S3 Delete object result', response)
return response
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Delete objects S3')
def delete_objects_s3(s3_client, bucket: str, object_keys: list):
try:
response = s3_client.delete_objects(Bucket=bucket, Delete=_make_objs_dict(object_keys))
log_command_execution('S3 Delete objects result', response)
return response
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Copy object S3')
def copy_object_s3(s3_client, bucket, object_key, bucket_dst=None):
filename = f'{os.getcwd()}/{uuid.uuid4()}'
try:
response = s3_client.copy_object(Bucket=bucket_dst or bucket,
CopySource=f'{bucket}/{object_key}',
Key=filename)
log_command_execution('S3 Copy objects result', response)
return filename
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Get object S3')
def get_object_s3(s3_client, bucket: str, object_key: str, version_id: str = None):
filename = f'{ASSETS_DIR}/{uuid.uuid4()}'
try:
params = {'Bucket': bucket, 'Key': object_key}
if version_id:
params['VersionId'] = version_id
if isinstance(s3_client, AwsCliClient):
params['file_path'] = filename
response = s3_client.get_object(**params)
log_command_execution('S3 Get objects result', response)
if not isinstance(s3_client, AwsCliClient):
with open(f'{filename}', 'wb') as get_file:
chunk = response['Body'].read(1024)
while chunk:
get_file.write(chunk)
chunk = response['Body'].read(1024)
return filename
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Create multipart upload S3')
def create_multipart_upload_s3(s3_client, bucket_name: str, object_key: str) -> str:
try:
response = s3_client.create_multipart_upload(Bucket=bucket_name, Key=object_key)
log_command_execution('S3 Created multipart upload', response)
assert response.get('UploadId'), f'Expected UploadId in response:\n{response}'
return response.get('UploadId')
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('List multipart uploads S3')
def list_multipart_uploads_s3(s3_client, bucket_name: str) -> Optional[List[dict]]:
try:
response = s3_client.list_multipart_uploads(Bucket=bucket_name)
log_command_execution('S3 List multipart upload', response)
return response.get('Uploads')
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Abort multipart upload S3')
def abort_multipart_uploads_s3(s3_client, bucket_name: str, object_key: str, upload_id: str):
try:
response = s3_client.abort_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id)
log_command_execution('S3 Abort multipart upload', response)
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Upload part S3')
def upload_part_s3(s3_client, bucket_name: str, object_key: str, upload_id: str, part_num: int, filepath: str) -> str:
if isinstance(s3_client, AwsCliClient):
file_content = filepath
else:
with open(filepath, 'rb') as put_file:
file_content = put_file.read()
try:
response = s3_client.upload_part(UploadId=upload_id, Bucket=bucket_name, Key=object_key, PartNumber=part_num,
Body=file_content)
log_command_execution('S3 Upload part', response)
assert response.get('ETag'), f'Expected ETag in response:\n{response}'
return response.get('ETag')
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('List parts S3')
def list_parts_s3(s3_client, bucket_name: str, object_key: str, upload_id: str) -> List[dict]:
try:
response = s3_client.list_parts(UploadId=upload_id, Bucket=bucket_name, Key=object_key)
log_command_execution('S3 List part', response)
assert response.get('Parts'), f'Expected Parts in response:\n{response}'
return response.get('Parts')
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Complete multipart upload S3')
def complete_multipart_upload_s3(s3_client, bucket_name: str, object_key: str, upload_id: str,
parts: list):
try:
parts = [{'ETag': etag, 'PartNumber': part_num} for part_num, etag in parts]
response = s3_client.complete_multipart_upload(Bucket=bucket_name, Key=object_key, UploadId=upload_id,
MultipartUpload={'Parts': parts})
log_command_execution('S3 Complete multipart upload', response)
except ClientError as err:
raise Exception(f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}') from err
@keyword('Put object tagging')
def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list):
try:
tags = [{'Key': tag_key, 'Value': tag_value} for tag_key, tag_value in tags]
tagging = {'TagSet': tags}
s3_client.put_object_tagging(Bucket=bucket_name, Key=object_key, Tagging=tagging)
log_command_execution('S3 Put object tagging', str(tags))
except ClientError as err:
raise Exception(f'Got error during put object tagging: {err}') from err
@keyword('Get object tagging')
def get_object_tagging(s3_client, bucket_name: str, object_key: str) -> list:
try:
response = s3_client.get_object_tagging(Bucket=bucket_name, Key=object_key)
log_command_execution('S3 Get object tagging', response)
return response.get('TagSet')
except ClientError as err:
raise Exception(f'Got error during get object tagging: {err}') from err
@keyword('Delete object tagging')
def delete_object_tagging(s3_client, bucket_name: str, object_key: str):
try:
response = s3_client.delete_object_tagging(Bucket=bucket_name, Key=object_key)
log_command_execution('S3 Delete object tagging', response)
except ClientError as err:
raise Exception(f'Got error during delete object tagging: {err}') from err
@keyword('Get object tagging')
def get_object_attributes(s3_client, bucket_name: str, object_key: str, *attributes: str, version_id: str = None,
max_parts: int = None, part_number: int = None, get_full_resp=True) -> dict:
try:
if not isinstance(s3_client, AwsCliClient):
logger.warn('Method get_object_attributes is not supported by boto3 client')
return {}
response = s3_client.get_object_attributes(bucket_name, object_key, *attributes, version_id=version_id,
max_parts=max_parts, part_number=part_number)
log_command_execution('S3 Get object attributes', response)
for attr in attributes:
assert attr in response, f'Expected attribute {attr} in {response}'
if get_full_resp:
return response
else:
return response.get(attributes[0])
except ClientError as err:
raise Exception(f'Got error during get object attributes: {err}') from err
def _make_objs_dict(key_names):
objs_list = []
for key in key_names:
obj_dict = {'Key': key}
objs_list.append(obj_dict)
objs_dict = {'Objects': objs_list}
return objs_dict

View file

@ -126,9 +126,15 @@ def get_nodes_without_object(wallet: str, cid: str, oid: str):
""" """
nodes_list = [] nodes_list = []
for node in NEOFS_NETMAP: for node in NEOFS_NETMAP:
try:
res = neofs_verbs.head_object(wallet, cid, oid, res = neofs_verbs.head_object(wallet, cid, oid,
endpoint=node, endpoint=node,
is_direct=True) is_direct=True)
if res is None: if res is None:
nodes_list.append(node) nodes_list.append(node)
except Exception as err:
if 'object not found' in str(err):
nodes_list.append(node)
else:
raise Exception(f'Got error {err} on head object command') from err
return nodes_list return nodes_list

View file

@ -5,7 +5,8 @@ Library Collections
Library OperatingSystem Library OperatingSystem
Library container.py Library container.py
Library s3_gate.py Library s3_gate_bucket.py
Library s3_gate_object.py
Library epoch.py Library epoch.py
Library utility_keywords.py Library utility_keywords.py

View file

@ -5,7 +5,8 @@ Library Collections
Library OperatingSystem Library OperatingSystem
Library container.py Library container.py
Library s3_gate.py Library s3_gate_bucket.py
Library s3_gate_object.py
Library utility_keywords.py Library utility_keywords.py
Resource payment_operations.robot Resource payment_operations.robot

View file

@ -0,0 +1,8 @@
# DevEnv variables
export NEOFS_MORPH_DISABLE_CACHE=true
export DEVENV_PATH="${DEVENV_PATH:-${VIRTUAL_ENV}/../../neofs-dev-env}"
pushd $DEVENV_PATH > /dev/null
export `make env`
popd > /dev/null
export PYTHONPATH=${PYTHONPATH}:${VIRTUAL_ENV}/../neofs-keywords/lib:${VIRTUAL_ENV}/../neofs-keywords/robot:${VIRTUAL_ENV}/../robot/resources/lib/:${VIRTUAL_ENV}/../robot/resources/lib/python_keywords:${VIRTUAL_ENV}/../robot/resources/lib/robot:${VIRTUAL_ENV}/../robot/variables:${VIRTUAL_ENV}/../pytest_tests/helpers

View file

@ -0,0 +1,10 @@
robotframework==4.1.2
requests==2.25.1
pexpect==4.8.0
boto3==1.16.33
docker==4.4.0
botocore==1.19.33
urllib3==1.26.3
base58==2.1.0
allure-pytest==2.9.45
pytest==7.1.2