Simplify test checks and fixtures:

Remove default SSH key path.
Replace multiple fixtures for file generation with single function.
Rename references to keyword modules.
Update pytest test cases to be consistent with new keywords.
Remove gas balance checks from container operations.
Add logic to run initial gas transfer only if storage is not free.
Remove robot testsuites for s3 and http gateways.
S3 and http tests are covered and will be maintained in the future in pytest.

Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
Vladimir Domnich 2022-07-05 14:17:36 +04:00 committed by Anastasia Prasolova
parent 8acf738147
commit 6e23e7d454
14 changed files with 122 additions and 335 deletions

View file

@ -71,7 +71,7 @@ class HostClient:
self.ip = ip self.ip = ip
self.login = login self.login = login
self.password = password self.password = password
self.pk = os.getenv('SSH_PK_PATH', '/root/.ssh/id_rsa') self.pk = os.getenv('SSH_PK_PATH')
if init_ssh_client: if init_ssh_client:
self.create_connection(self.SSH_CONNECTION_ATTEMPTS) self.create_connection(self.SSH_CONNECTION_ATTEMPTS)

View file

@ -11,13 +11,10 @@ from robot.api import deco
import rpc_client import rpc_client
import wallet import wallet
from cli_helpers import _cmd_run from cli_helpers import _cmd_run
from common import (ASSETS_DIR, COMPLEX_OBJ_SIZE, COMMON_PLACEMENT_RULE, from common import (ASSETS_DIR, COMMON_PLACEMENT_RULE, CONTROL_NODE_USER, CONTROL_NODE_PWD,
MAINNET_WALLET_PATH, NEO_MAINNET_ENDPOINT, SIMPLE_OBJ_SIZE, REMOTE_HOST, CONTROL_NODE_USER, FREE_STORAGE, MAINNET_WALLET_PATH, NEO_MAINNET_ENDPOINT, REMOTE_HOST)
CONTROL_NODE_PWD)
from payment_neogo import neofs_deposit, transfer_mainnet_gas from payment_neogo import neofs_deposit, transfer_mainnet_gas
from python_keywords.container import create_container from python_keywords.container import create_container
from python_keywords.payment_neogo import get_balance
from python_keywords.utility_keywords import generate_file_and_file_hash
from ssh_helper import HostClient from ssh_helper import HostClient
from wellknown_acl import PUBLIC_ACL from wellknown_acl import PUBLIC_ACL
@ -50,7 +47,7 @@ def init_wallet_with_address():
full_path = f'{os.getcwd()}/{ASSETS_DIR}' full_path = f'{os.getcwd()}/{ASSETS_DIR}'
os.mkdir(full_path) os.mkdir(full_path)
yield wallet.init_wallet_w_addr(ASSETS_DIR) yield wallet.init_wallet(ASSETS_DIR)
shutil.rmtree(full_path) shutil.rmtree(full_path)
@ -58,7 +55,6 @@ def init_wallet_with_address():
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
@allure.title('Prepare wallet and deposit') @allure.title('Prepare wallet and deposit')
def prepare_wallet_and_deposit(init_wallet_with_address): def prepare_wallet_and_deposit(init_wallet_with_address):
deposit = 30
local_wallet_path = None local_wallet_path = None
wallet, addr, _ = init_wallet_with_address wallet, addr, _ = init_wallet_with_address
logger.info(f'Init wallet: {wallet},\naddr: {addr}') logger.info(f'Init wallet: {wallet},\naddr: {addr}')
@ -68,6 +64,8 @@ def prepare_wallet_and_deposit(init_wallet_with_address):
local_wallet_path = os.path.join(ASSETS_DIR, os.path.basename(MAINNET_WALLET_PATH)) local_wallet_path = os.path.join(ASSETS_DIR, os.path.basename(MAINNET_WALLET_PATH))
ssh_client.copy_file_from_host(MAINNET_WALLET_PATH, local_wallet_path) ssh_client.copy_file_from_host(MAINNET_WALLET_PATH, local_wallet_path)
if not FREE_STORAGE:
deposit = 30
transfer_mainnet_gas(wallet, deposit + 1, wallet_path=local_wallet_path or MAINNET_WALLET_PATH) transfer_mainnet_gas(wallet, deposit + 1, wallet_path=local_wallet_path or MAINNET_WALLET_PATH)
neofs_deposit(wallet, deposit) neofs_deposit(wallet, deposit)
@ -90,19 +88,12 @@ def prepare_public_container(prepare_wallet_and_deposit):
def prepare_container_impl(wallet: str, rule=COMMON_PLACEMENT_RULE, basic_acl: str = ''): def prepare_container_impl(wallet: str, rule=COMMON_PLACEMENT_RULE, basic_acl: str = ''):
balance = get_balance(wallet)
assert balance > 0, f'Expected balance is greater than 0. Got {balance}'
cid = create_container(wallet, rule=rule, basic_acl=basic_acl) cid = create_container(wallet, rule=rule, basic_acl=basic_acl)
new_balance = get_balance(wallet)
assert new_balance < balance, 'Expected some fee has charged'
return cid, wallet return cid, wallet
@allure.step('Wait until transaction accepted in block') @allure.step('Wait until transaction accepted in block')
def wait_unitl_transaction_accepted_in_block(tx_id: str): def wait_until_transaction_accepted_in_block(tx_id: str):
""" """
This function return True in case of accepted TX. This function return True in case of accepted TX.
Parameters: Parameters:
@ -126,29 +117,3 @@ def wait_unitl_transaction_accepted_in_block(tx_id: str):
raise e raise e
sleep(sleep_interval) sleep(sleep_interval)
raise TimeoutError(f'Timeout {sleep_interval * attempts} sec. reached on waiting for transaction accepted') raise TimeoutError(f'Timeout {sleep_interval * attempts} sec. reached on waiting for transaction accepted')
@pytest.fixture()
@allure.title('Generate files')
def generate_files():
file_name_simple, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE)
large_file_name, _ = generate_file_and_file_hash(COMPLEX_OBJ_SIZE)
return file_name_simple, large_file_name
@pytest.fixture()
@allure.title('Generate file')
def generate_file():
file_name_simple, _ = generate_file_and_file_hash(SIMPLE_OBJ_SIZE)
return file_name_simple
@pytest.fixture()
@allure.title('Generate large file')
def generate_large_file():
obj_size = int(os.getenv('BIG_OBJ_SIZE', COMPLEX_OBJ_SIZE))
file_path, file_hash = generate_file_and_file_hash(obj_size)
return file_path, file_hash

View file

@ -3,13 +3,13 @@ from time import sleep
import allure import allure
import pytest import pytest
from contract_keywords import tick_epoch from epoch import tick_epoch
from python_keywords.neofs import verify_head_tombstone from python_keywords.neofs import verify_head_tombstone
from python_keywords.neofs_verbs import (delete_object, get_object, get_range, from python_keywords.neofs_verbs import (delete_object, get_object, get_range,
get_range_hash, head_object, get_range_hash, head_object,
put_object, search_object) put_object, search_object)
from python_keywords.storage_policy import get_simple_object_copies from python_keywords.storage_policy import get_simple_object_copies
from python_keywords.utility_keywords import get_file_hash from python_keywords.utility_keywords import generate_file, get_file_hash
logger = logging.getLogger('NeoLogger') logger = logging.getLogger('NeoLogger')
@ -19,7 +19,7 @@ CLEANUP_TIMEOUT = 10
@allure.title('Test native object API') @allure.title('Test native object API')
@pytest.mark.sanity @pytest.mark.sanity
@pytest.mark.grpc_api @pytest.mark.grpc_api
def test_object_api(prepare_container, generate_file): def test_object_api(prepare_container):
cid, wallet = prepare_container cid, wallet = prepare_container
wallet_cid = {'wallet': wallet, 'cid': cid} wallet_cid = {'wallet': wallet, 'cid': cid}
file_usr_header = {'key1': 1, 'key2': 'abc'} file_usr_header = {'key1': 1, 'key2': 'abc'}
@ -27,15 +27,15 @@ def test_object_api(prepare_container, generate_file):
range_cut = '0:10' range_cut = '0:10'
oids = [] oids = []
file_name = generate_file file_path = generate_file()
file_hash = get_file_hash(file_name) file_hash = get_file_hash(file_path)
search_object(**wallet_cid, expected_objects_list=oids) search_object(**wallet_cid, expected_objects_list=oids)
with allure.step('Put objects'): with allure.step('Put objects'):
oids.append(put_object(wallet=wallet, path=file_name, cid=cid)) oids.append(put_object(wallet=wallet, path=file_path, cid=cid))
oids.append(put_object(wallet=wallet, path=file_name, cid=cid, user_headers=file_usr_header)) oids.append(put_object(wallet=wallet, path=file_path, cid=cid, user_headers=file_usr_header))
oids.append(put_object(wallet=wallet, path=file_name, cid=cid, user_headers=file_usr_header_oth)) oids.append(put_object(wallet=wallet, path=file_path, cid=cid, user_headers=file_usr_header_oth))
with allure.step('Validate storage policy for objects'): with allure.step('Validate storage policy for objects'):
for oid_to_check in oids: for oid_to_check in oids:

View file

@ -1,10 +1,12 @@
import logging import logging
import os
from random import choice from random import choice
from time import sleep from time import sleep
import allure import allure
from common import COMPLEX_OBJ_SIZE
import pytest import pytest
from contract_keywords import get_epoch, tick_epoch from epoch import get_epoch, tick_epoch
from python_keywords.http_gate import (get_via_http_curl, get_via_http_gate, from python_keywords.http_gate import (get_via_http_curl, get_via_http_gate,
get_via_http_gate_by_attribute, get_via_http_gate_by_attribute,
get_via_zip_http_gate, get_via_zip_http_gate,
@ -12,7 +14,7 @@ from python_keywords.http_gate import (get_via_http_curl, get_via_http_gate,
upload_via_http_gate_curl) upload_via_http_gate_curl)
from python_keywords.neofs_verbs import get_object, put_object from python_keywords.neofs_verbs import get_object, put_object
from python_keywords.storage_policy import get_nodes_without_object from python_keywords.storage_policy import get_nodes_without_object
from python_keywords.utility_keywords import get_file_hash from python_keywords.utility_keywords import generate_file, get_file_hash
logger = logging.getLogger('NeoLogger') logger = logging.getLogger('NeoLogger')
@ -26,7 +28,7 @@ CLEANUP_TIMEOUT = 10
class TestHttpGate: class TestHttpGate:
@allure.title('Test Put over gRPC, Get over HTTP') @allure.title('Test Put over gRPC, Get over HTTP')
def test_put_grpc_get_http(self, prepare_public_container, generate_files): def test_put_grpc_get_http(self, prepare_public_container):
""" """
Test that object can be put using gRPC interface and get using HTTP. Test that object can be put using gRPC interface and get using HTTP.
@ -42,20 +44,20 @@ class TestHttpGate:
Hashes must be the same. Hashes must be the same.
""" """
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_name_simple, large_file_name = generate_files file_path_simple, file_path_large = generate_file(), generate_file(COMPLEX_OBJ_SIZE)
with allure.step('Put objects using gRPC'): with allure.step('Put objects using gRPC'):
oid_simple = put_object(wallet=wallet, path=file_name_simple, cid=cid) oid_simple = put_object(wallet=wallet, path=file_path_simple, cid=cid)
oid_large = put_object(wallet=wallet, path=large_file_name, cid=cid) oid_large = put_object(wallet=wallet, path=file_path_large, cid=cid)
for oid, file_name in ((oid_simple, file_name_simple), (oid_large, large_file_name)): for oid, file_path in ((oid_simple, file_path_simple), (oid_large, file_path_large)):
self.get_object_and_verify_hashes(oid, file_name, wallet, cid) self.get_object_and_verify_hashes(oid, file_path, wallet, cid)
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#uploading', name='uploading') @allure.link('https://github.com/nspcc-dev/neofs-http-gw#uploading', name='uploading')
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#downloading', name='downloading') @allure.link('https://github.com/nspcc-dev/neofs-http-gw#downloading', name='downloading')
@pytest.mark.sanity @pytest.mark.sanity
@allure.title('Test Put over HTTP, Get over HTTP') @allure.title('Test Put over HTTP, Get over HTTP')
def test_put_http_get_http(self, prepare_public_container, generate_files): def test_put_http_get_http(self, prepare_public_container):
""" """
Test that object can be put and get using HTTP interface. Test that object can be put and get using HTTP interface.
@ -69,14 +71,14 @@ class TestHttpGate:
Hashes must be the same. Hashes must be the same.
""" """
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_name_simple, large_file_name = generate_files file_path_simple, file_path_large = generate_file(), generate_file(COMPLEX_OBJ_SIZE)
with allure.step('Put objects using HTTP'): with allure.step('Put objects using HTTP'):
oid_simple = upload_via_http_gate(cid=cid, path=file_name_simple) oid_simple = upload_via_http_gate(cid=cid, path=file_path_simple)
oid_large = upload_via_http_gate(cid=cid, path=large_file_name) oid_large = upload_via_http_gate(cid=cid, path=file_path_large)
for oid, file_name in ((oid_simple, file_name_simple), (oid_large, large_file_name)): for oid, file_path in ((oid_simple, file_path_simple), (oid_large, file_path_large)):
self.get_object_and_verify_hashes(oid, file_name, wallet, cid) self.get_object_and_verify_hashes(oid, file_path, wallet, cid)
@allure.link('https://github.com/nspcc-dev/neofs-http-gw#by-attributes', name='download by attributes') @allure.link('https://github.com/nspcc-dev/neofs-http-gw#by-attributes', name='download by attributes')
@allure.title('Test Put over HTTP, Get over HTTP with headers') @allure.title('Test Put over HTTP, Get over HTTP with headers')
@ -87,7 +89,7 @@ class TestHttpGate:
{'cat%jpeg': 'cat%jpeg'} {'cat%jpeg': 'cat%jpeg'}
], ids=['simple', 'hyphen', 'percent'] ], ids=['simple', 'hyphen', 'percent']
) )
def test_put_http_get_http_with_headers(self, prepare_public_container, generate_files, attributes): def test_put_http_get_http_with_headers(self, prepare_public_container, attributes):
""" """
Test that object can be downloaded using different attributes in HTTP header. Test that object can be downloaded using different attributes in HTTP header.
@ -101,18 +103,17 @@ class TestHttpGate:
Hashes must be the same. Hashes must be the same.
""" """
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_name_simple, _ = generate_files file_path = generate_file()
with allure.step('Put objects using HTTP with attribute'): with allure.step('Put objects using HTTP with attribute'):
oid_simple = upload_via_http_gate(cid=cid, path=file_name_simple, oid_simple = upload_via_http_gate(cid=cid, path=file_path, headers=self._attr_into_header(attributes))
headers=self._attr_into_header(attributes))
self.get_object_by_attr_and_verify_hashes(oid_simple, file_name_simple, cid, attributes) self.get_object_by_attr_and_verify_hashes(oid_simple, file_path, cid, attributes)
@allure.title('Test Expiration-Epoch in HTTP header') @allure.title('Test Expiration-Epoch in HTTP header')
def test_expiration_epoch_in_http(self, prepare_public_container, generate_file): def test_expiration_epoch_in_http(self, prepare_public_container):
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_name_simple = generate_file file_path = generate_file()
object_not_found_err = 'object not found' object_not_found_err = 'object not found'
oids = [] oids = []
@ -123,7 +124,7 @@ class TestHttpGate:
headers = {'X-Attribute-Neofs-Expiration-Epoch': str(epoch)} headers = {'X-Attribute-Neofs-Expiration-Epoch': str(epoch)}
with allure.step('Put objects using HTTP with attribute Expiration-Epoch'): with allure.step('Put objects using HTTP with attribute Expiration-Epoch'):
oids.append(upload_via_http_gate(cid=cid, path=file_name_simple, headers=headers)) oids.append(upload_via_http_gate(cid=cid, path=file_path, headers=headers))
assert len(oids) == len(epochs), 'Expected all objects has been put successfully' assert len(oids) == len(epochs), 'Expected all objects has been put successfully'
@ -143,32 +144,34 @@ class TestHttpGate:
get_via_http_gate(cid=cid, oid=oid) get_via_http_gate(cid=cid, oid=oid)
@allure.title('Test Zip in HTTP header') @allure.title('Test Zip in HTTP header')
def test_zip_in_http(self, prepare_public_container, generate_files): def test_zip_in_http(self, prepare_public_container):
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_name_simple, file_name_complex = generate_files file_path_simple, file_path_large = generate_file(), generate_file(COMPLEX_OBJ_SIZE)
common_prefix = 'my_files' common_prefix = 'my_files'
headers1 = {'X-Attribute-FilePath': f'{common_prefix}/file1'} headers1 = {'X-Attribute-FilePath': f'{common_prefix}/file1'}
headers2 = {'X-Attribute-FilePath': f'{common_prefix}/file2'} headers2 = {'X-Attribute-FilePath': f'{common_prefix}/file2'}
upload_via_http_gate(cid=cid, path=file_name_simple, headers=headers1) upload_via_http_gate(cid=cid, path=file_path_simple, headers=headers1)
upload_via_http_gate(cid=cid, path=file_name_complex, headers=headers2) upload_via_http_gate(cid=cid, path=file_path_large, headers=headers2)
dir_path = get_via_zip_http_gate(cid=cid, prefix=common_prefix) dir_path = get_via_zip_http_gate(cid=cid, prefix=common_prefix)
with allure.step('Verify hashes'): with allure.step('Verify hashes'):
assert get_file_hash(f'{dir_path}/file1') == get_file_hash(file_name_simple) assert get_file_hash(f'{dir_path}/file1') == get_file_hash(file_path_simple)
assert get_file_hash(f'{dir_path}/file2') == get_file_hash(file_name_complex) assert get_file_hash(f'{dir_path}/file2') == get_file_hash(file_path_large)
@pytest.mark.curl @pytest.mark.curl
@pytest.mark.long @pytest.mark.long
@allure.title('Test Put over HTTP/Curl, Get over HTTP/Curl for large object') @allure.title('Test Put over HTTP/Curl, Get over HTTP/Curl for large object')
def test_put_http_get_http_large_file(self, prepare_public_container, generate_large_file): def test_put_http_get_http_large_file(self, prepare_public_container):
""" """
This test checks upload and download using curl with 'large' object. Large is object with size up to 20Mb. This test checks upload and download using curl with 'large' object. Large is object with size up to 20Mb.
""" """
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_path, file_hash = generate_large_file
obj_size = int(os.getenv('BIG_OBJ_SIZE', COMPLEX_OBJ_SIZE))
file_path = generate_file(obj_size)
with allure.step('Put objects using HTTP'): with allure.step('Put objects using HTTP'):
oid_simple = upload_via_http_gate(cid=cid, path=file_path) oid_simple = upload_via_http_gate(cid=cid, path=file_path)
@ -179,19 +182,19 @@ class TestHttpGate:
@pytest.mark.curl @pytest.mark.curl
@allure.title('Test Put/Get over HTTP using Curl utility') @allure.title('Test Put/Get over HTTP using Curl utility')
def test_put_http_get_http_curl(self, prepare_public_container, generate_files): def test_put_http_get_http_curl(self, prepare_public_container):
""" """
Test checks upload and download over HTTP using curl utility. Test checks upload and download over HTTP using curl utility.
""" """
cid, wallet = prepare_public_container cid, wallet = prepare_public_container
file_name_simple, large_file_name = generate_files file_path_simple, file_path_large = generate_file(), generate_file(COMPLEX_OBJ_SIZE)
with allure.step('Put objects using curl utility'): with allure.step('Put objects using curl utility'):
oid_simple = upload_via_http_gate_curl(cid=cid, filepath=file_name_simple) oid_simple = upload_via_http_gate_curl(cid=cid, filepath=file_path_simple)
oid_large = upload_via_http_gate_curl(cid=cid, filepath=large_file_name) oid_large = upload_via_http_gate_curl(cid=cid, filepath=file_path_large)
for oid, file_name in ((oid_simple, file_name_simple), (oid_large, large_file_name)): for oid, file_path in ((oid_simple, file_path_simple), (oid_large, file_path_large)):
self.get_object_and_verify_hashes(oid, file_name, wallet, cid, object_getter=get_via_http_curl) self.get_object_and_verify_hashes(oid, file_path, wallet, cid, object_getter=get_via_http_curl)
@staticmethod @staticmethod
@allure.step('Try to get object and expect error') @allure.step('Try to get object and expect error')
@ -209,7 +212,7 @@ class TestHttpGate:
got_file_path_http = get_via_http_gate(cid=cid, oid=oid) got_file_path_http = get_via_http_gate(cid=cid, oid=oid)
got_file_path_http_attr = get_via_http_gate_by_attribute(cid=cid, attribute=attrs) got_file_path_http_attr = get_via_http_gate_by_attribute(cid=cid, attribute=attrs)
TestHttpGate._assert_hashes_the_same(file_name, got_file_path_http, got_file_path_http_attr) TestHttpGate._assert_hashes_are_equal(file_name, got_file_path_http, got_file_path_http_attr)
@staticmethod @staticmethod
@allure.step('Verify object can be get using HTTP') @allure.step('Verify object can be get using HTTP')
@ -221,10 +224,10 @@ class TestHttpGate:
got_file_path = get_object(wallet=wallet, cid=cid, oid=oid, endpoint=random_node) got_file_path = get_object(wallet=wallet, cid=cid, oid=oid, endpoint=random_node)
got_file_path_http = object_getter(cid=cid, oid=oid) got_file_path_http = object_getter(cid=cid, oid=oid)
TestHttpGate._assert_hashes_the_same(file_name, got_file_path, got_file_path_http) TestHttpGate._assert_hashes_are_equal(file_name, got_file_path, got_file_path_http)
@staticmethod @staticmethod
def _assert_hashes_the_same(orig_file_name: str, got_file_1: str, got_file_2: str): def _assert_hashes_are_equal(orig_file_name: str, got_file_1: str, got_file_2: str):
msg = 'Expected hashes are equal for files {f1} and {f2}' msg = 'Expected hashes are equal for files {f1} and {f2}'
got_file_hash_http = get_file_hash(got_file_1) got_file_hash_http = get_file_hash(got_file_1)
assert get_file_hash(got_file_2) == got_file_hash_http, msg.format(f1=got_file_2, f2=got_file_1) assert get_file_hash(got_file_2) == got_file_hash_http, msg.format(f1=got_file_2, f2=got_file_1)

View file

@ -5,11 +5,11 @@ from random import choice, choices
import allure import allure
import pytest import pytest
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
from contract_keywords import tick_epoch from epoch import tick_epoch
from python_keywords import s3_gate_bucket, s3_gate_object from python_keywords import s3_gate_bucket, s3_gate_object
from python_keywords.aws_cli_client import AwsCliClient from python_keywords.aws_cli_client import AwsCliClient
from python_keywords.container import list_containers from python_keywords.container import list_containers
from python_keywords.utility_keywords import (generate_file_and_file_hash, from python_keywords.utility_keywords import (generate_file, generate_file_and_file_hash,
get_file_hash) get_file_hash)
from utility import create_file_with_content, get_file_content, split_file from utility import create_file_with_content, get_file_content, split_file
@ -67,13 +67,13 @@ class TestS3Gate:
s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket)
@allure.title('Test S3 Bucket API') @allure.title('Test S3 Bucket API')
def test_s3_buckets(self, generate_files): def test_s3_buckets(self):
""" """
Test base S3 Bucket API (Create/List/Head/Delete). Test base S3 Bucket API (Create/List/Head/Delete).
""" """
file_name_simple, file_name_large = generate_files file_path = generate_file()
file_name = self.object_key_from_file_path(file_name_simple) file_name = self.object_key_from_file_path(file_path)
with allure.step('Create buckets'): with allure.step('Create buckets'):
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client) bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
@ -94,7 +94,7 @@ class TestS3Gate:
s3_gate_bucket.head_bucket(self.s3_client, bucket_2) s3_gate_bucket.head_bucket(self.s3_client, bucket_2)
with allure.step('Check we can put/list object with S3 commands'): with allure.step('Check we can put/list object with S3 commands'):
s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_name_simple) s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path)
s3_gate_object.head_object_s3(self.s3_client, bucket_1, file_name) s3_gate_object.head_object_s3(self.s3_client, bucket_1, file_name)
bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket_1) bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket_1)
@ -122,13 +122,12 @@ class TestS3Gate:
@allure.title('Test S3 Object API') @allure.title('Test S3 Object API')
@pytest.mark.sanity @pytest.mark.sanity
@pytest.mark.parametrize('file_type', ['simple', 'large'], ids=['Simple object', 'Large object']) @pytest.mark.parametrize('file_type', ['simple', 'large'], ids=['Simple object', 'Large object'])
def test_s3_api_object(self, generate_files, file_type): def test_s3_api_object(self, file_type):
""" """
Test base S3 Object API (Put/Head/List) for simple and large objects. Test base S3 Object API (Put/Head/List) for simple and large objects.
""" """
file_name_simple, file_name_large = generate_files file_path = generate_file(SIMPLE_OBJ_SIZE if file_type == 'simple' else COMPLEX_OBJ_SIZE)
file_name_path = file_name_simple if file_type == 'simple' else file_name_large file_name = self.object_key_from_file_path(file_path)
file_name = self.object_key_from_file_path(file_name_path)
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client) bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client) bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client)
@ -138,8 +137,7 @@ class TestS3Gate:
objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket) objects_list = s3_gate_object.list_objects_s3(self.s3_client, bucket)
assert not objects_list, f'Expected empty bucket, got {objects_list}' assert not objects_list, f'Expected empty bucket, got {objects_list}'
s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_path) s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_large)
s3_gate_object.head_object_s3(self.s3_client, bucket, file_name) s3_gate_object.head_object_s3(self.s3_client, bucket, file_name)
bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket) bucket_objects = s3_gate_object.list_objects_s3(self.s3_client, bucket)
@ -391,13 +389,13 @@ class TestS3Gate:
self.try_to_get_object_and_got_error(bucket_2, objects_to_delete_b2) self.try_to_get_object_and_got_error(bucket_2, objects_to_delete_b2)
@allure.title('Test S3: Copy object to the same bucket') @allure.title('Test S3: Copy object to the same bucket')
def test_s3_copy_same_bucket(self, generate_files): def test_s3_copy_same_bucket(self):
""" """
Test object can be copied to the same bucket. Test object can be copied to the same bucket.
""" """
file_simple, file_large = generate_files file_path_simple, file_path_large = generate_file(), generate_file(COMPLEX_OBJ_SIZE)
file_name_simple = self.object_key_from_file_path(file_simple) file_name_simple = self.object_key_from_file_path(file_path_simple)
file_name_large = self.object_key_from_file_path(file_large) file_name_large = self.object_key_from_file_path(file_path_large)
bucket_objects = [file_name_simple, file_name_large] bucket_objects = [file_name_simple, file_name_large]
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client) bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
@ -407,8 +405,8 @@ class TestS3Gate:
assert not objects_list, f'Expected empty bucket, got {objects_list}' assert not objects_list, f'Expected empty bucket, got {objects_list}'
with allure.step('Put objects into bucket'): with allure.step('Put objects into bucket'):
for obj in (file_simple, file_large): for file_path in (file_path_simple, file_path_large):
s3_gate_object.put_object_s3(self.s3_client, bucket, obj) s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
with allure.step('Copy one object into the same bucket'): with allure.step('Copy one object into the same bucket'):
copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple) copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple)
@ -418,7 +416,7 @@ class TestS3Gate:
with allure.step('Check copied object has the same content'): with allure.step('Check copied object has the same content'):
got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path) got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path)
assert get_file_hash(file_simple) == get_file_hash(got_copied_file), 'Hashes must be the same' assert get_file_hash(file_path_simple) == get_file_hash(got_copied_file), 'Hashes must be the same'
with allure.step('Delete one object from bucket'): with allure.step('Delete one object from bucket'):
s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_simple) s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_simple)
@ -427,13 +425,13 @@ class TestS3Gate:
self.check_objects_in_bucket(bucket, expected_objects=bucket_objects, unexpected_objects=[file_name_simple]) self.check_objects_in_bucket(bucket, expected_objects=bucket_objects, unexpected_objects=[file_name_simple])
@allure.title('Test S3: Copy object to another bucket') @allure.title('Test S3: Copy object to another bucket')
def test_s3_copy_to_another_bucket(self, generate_files): def test_s3_copy_to_another_bucket(self):
""" """
Test object can be copied to another bucket. Test object can be copied to another bucket.
""" """
file_simple, file_large = generate_files file_path_simple, file_path_large = generate_file(), generate_file(COMPLEX_OBJ_SIZE)
file_name_simple = self.object_key_from_file_path(file_simple) file_name_simple = self.object_key_from_file_path(file_path_simple)
file_name_large = self.object_key_from_file_path(file_large) file_name_large = self.object_key_from_file_path(file_path_large)
bucket_1_objects = [file_name_simple, file_name_large] bucket_1_objects = [file_name_simple, file_name_large]
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client) bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
@ -445,8 +443,8 @@ class TestS3Gate:
assert not objects_list, f'Expected empty bucket, got {objects_list}' assert not objects_list, f'Expected empty bucket, got {objects_list}'
with allure.step('Put objects into one bucket'): with allure.step('Put objects into one bucket'):
for obj in (file_simple, file_large): for file_path in (file_path_simple, file_path_large):
s3_gate_object.put_object_s3(self.s3_client, bucket_1, obj) s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path)
with allure.step('Copy object from first bucket into second'): with allure.step('Copy object from first bucket into second'):
copy_obj_path_b2 = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, file_name_large, copy_obj_path_b2 = s3_gate_object.copy_object_s3(self.s3_client, bucket_1, file_name_large,
@ -456,7 +454,7 @@ class TestS3Gate:
with allure.step('Check copied object has the same content'): with allure.step('Check copied object has the same content'):
got_copied_file_b2 = s3_gate_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) got_copied_file_b2 = s3_gate_object.get_object_s3(self.s3_client, bucket_2, copy_obj_path_b2)
assert get_file_hash(file_large) == get_file_hash(got_copied_file_b2), 'Hashes must be the same' assert get_file_hash(file_path_large) == get_file_hash(got_copied_file_b2), 'Hashes must be the same'
with allure.step('Delete one object from first bucket'): with allure.step('Delete one object from first bucket'):
s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple) s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple)

View file

@ -2,10 +2,20 @@
import contract import contract
import sys
from robot.api import logger from robot.api import logger
from robot.api.deco import keyword from robot.api.deco import keyword
from robot.libraries.BuiltIn import BuiltIn from robot.libraries.BuiltIn import BuiltIn
ROBOT_AUTO_KEYWORDS = False
if "pytest" in sys.modules:
import os
IR_WALLET_PATH = os.getenv("IR_WALLET_PATH")
IR_WALLET_PASS = os.getenv("IR_WALLET_PASS")
SIDECHAIN_EP = os.getenv("MORPH_ENDPOINT")
else:
IR_WALLET_PATH = BuiltIn().get_variable_value("${IR_WALLET_PATH}") IR_WALLET_PATH = BuiltIn().get_variable_value("${IR_WALLET_PATH}")
IR_WALLET_PASS = BuiltIn().get_variable_value("${IR_WALLET_PASS}") IR_WALLET_PASS = BuiltIn().get_variable_value("${IR_WALLET_PASS}")
SIDECHAIN_EP = BuiltIn().get_variable_value("${MORPH_ENDPOINT}") SIDECHAIN_EP = BuiltIn().get_variable_value("${MORPH_ENDPOINT}")

View file

@ -3,6 +3,7 @@
import hashlib import hashlib
import os import os
import tarfile import tarfile
from typing import Tuple
import uuid import uuid
import docker import docker
@ -17,10 +18,26 @@ from cli_helpers import _cmd_run
ROBOT_AUTO_KEYWORDS = False ROBOT_AUTO_KEYWORDS = False
@keyword('Generate file') def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str:
def generate_file_and_file_hash(size: int) -> str:
""" """
Function generates a big binary file with the specified size in bytes Function generates a binary file with the specified size in bytes.
Args:
size (int): the size in bytes, can be declared as 6e+6 for example
Returns:
(str): the path to the generated file
"""
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
with open(file_path, 'wb') as fout:
fout.write(os.urandom(size))
logger.info(f"file with size {size} bytes has been generated: {file_path}")
return file_path
@keyword('Generate file')
def generate_file_and_file_hash(size: int) -> Tuple[str, str]:
"""
Function generates a binary file with the specified size in bytes
and its hash. and its hash.
Args: Args:
size (int): the size in bytes, can be declared as 6e+6 for example size (int): the size in bytes, can be declared as 6e+6 for example
@ -28,14 +45,10 @@ def generate_file_and_file_hash(size: int) -> str:
(str): the path to the generated file (str): the path to the generated file
(str): the hash of the generated file (str): the hash of the generated file
""" """
filename = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}" file_path = generate_file(size)
with open(filename, 'wb') as fout: file_hash = get_file_hash(file_path)
fout.write(os.urandom(size))
logger.info(f"file with size {size} bytes has been generated: {filename}")
file_hash = get_file_hash(filename) return file_path, file_hash
return filename, file_hash
@keyword('Get File Hash') @keyword('Get File Hash')

View file

@ -5,7 +5,7 @@ Variables wellknown_acl.py
Library container.py Library container.py
Library epoch.py Library epoch.py
Library neofs_verbs.py Library neofs_verbs.py
Library nodes_management.py Library node_management.py
Library storage_policy.py Library storage_policy.py
Library utility_keywords.py Library utility_keywords.py

View file

@ -1,59 +0,0 @@
*** Settings ***
Variables common.py
Variables wellknown_acl.py
Library container.py
Library neofs_verbs.py
Library http_gate.py
Library storage_policy.py
Library utility_keywords.py
Resource payment_operations.robot
Resource setup_teardown.robot
*** Variables ***
${PLACEMENT_RULE} = REP 1 IN X CBF 1 SELECT 1 FROM * AS X
@{INCLUDE_SVC} = http_gate
*** Test cases ***
NeoFS HTTP Gateway
[Documentation] Creates container and does PUT, GET via HTTP Gate
[Timeout] 5 min
[Setup] Setup
Make Up ${INCLUDE_SVC}
${WALLET} ${_} ${_} = Prepare Wallet And Deposit
${CID} = Create container ${WALLET} rule=${PLACEMENT_RULE} basic_acl=${PUBLIC_ACL}
${FILE} ${HASH} = Generate file ${SIMPLE_OBJ_SIZE}
${FILE_L} ${L_HASH} = Generate file ${COMPLEX_OBJ_SIZE}
${S_OID} = Put object ${WALLET} ${FILE} ${CID}
${L_OID} = Put object ${WALLET} ${FILE_L} ${CID}
# By request from Service team - try to GET object from the node without object
@{GET_NODE_LIST} = Get nodes without object ${WALLET} ${CID} ${S_OID}
${NODE} = Evaluate random.choice($GET_NODE_LIST) random
${GET_OBJ_S} = Get object ${WALLET} ${CID} ${S_OID} ${EMPTY} s_file_read ${NODE}
${FILEPATH} = Get via HTTP Gate ${CID} ${S_OID}
${PLAIN_FILE_HASH} = Get file hash ${GET_OBJ_S}
${GATE_FILE_HASH} = Get file hash ${FILEPATH}
Should Be Equal ${HASH} ${PLAIN_FILE_HASH}
Should Be Equal ${HASH} ${GATE_FILE_HASH}
@{GET_NODE_LIST} = Get nodes without object ${WALLET} ${CID} ${L_OID}
${NODE} = Evaluate random.choice($GET_NODE_LIST) random
${GET_OBJ_L} = Get object ${WALLET} ${CID} ${L_OID} ${EMPTY} l_file_read ${NODE}
${FILEPATH} = Get via HTTP Gate ${CID} ${L_OID}
${PLAIN_FILE_HASH} = Get file hash ${GET_OBJ_L}
${GATE_FILE_HASH} = Get file hash ${FILEPATH}
Should Be Equal ${L_HASH} ${PLAIN_FILE_HASH}
Should Be Equal ${L_HASH} ${GATE_FILE_HASH}
[Teardown] Teardown http_gate

View file

@ -1,68 +0,0 @@
*** Settings ***
Variables common.py
Library Collections
Library OperatingSystem
Library container.py
Library s3_gate_bucket.py
Library s3_gate_object.py
Library epoch.py
Library utility_keywords.py
Resource setup_teardown.robot
Resource payment_operations.robot
*** Variables ***
@{INCLUDE_SVC} = s3_gate coredns
*** Test cases ***
Buckets in NeoFS S3 Gateway
[Documentation] Execute operations with bucket via S3 Gate
[Timeout] 10 min
[Setup] Setup
Make Up ${INCLUDE_SVC}
${WALLET} ${_} ${_} = Prepare Wallet And Deposit
${FILE_S3} ${_} = Generate file ${COMPLEX_OBJ_SIZE}
${_} ${S3_OBJECT_KEY} = Split Path ${FILE_S3}
${CID}
... ${BUCKET}
... ${ACCESS_KEY_ID}
... ${SEC_ACCESS_KEY}
... ${OWNER_PRIV_KEY} = Init S3 Credentials ${WALLET}
@{CONTEINERS_LIST} = List Containers ${WALLET}
List Should Contain Value ${CONTEINERS_LIST} ${CID}
${S3_CLIENT} = Config S3 client ${ACCESS_KEY_ID} ${SEC_ACCESS_KEY}
${NEW_BUCKET} = Create Bucket S3 ${S3_CLIENT}
${NEW_BUCKET_EMPTY} = Create Bucket S3 ${S3_CLIENT}
Head bucket S3 ${S3_CLIENT} ${BUCKET}
Head bucket S3 ${S3_CLIENT} ${NEW_BUCKET}
Put object S3 ${S3_CLIENT} ${NEW_BUCKET} ${FILE_S3}
Head object S3 ${S3_CLIENT} ${NEW_BUCKET} ${S3_OBJECT_KEY}
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${NEW_BUCKET}
List Should Contain Value ${LIST_S3_OBJECTS} ${S3_OBJECT_KEY}
Run Keyword and Expect Error *
... Delete Bucket S3 ${S3_CLIENT} ${NEW_BUCKET}
Head bucket S3 ${S3_CLIENT} ${NEW_BUCKET}
Delete Bucket S3 ${S3_CLIENT} ${NEW_BUCKET_EMPTY}
Tick Epoch
Run Keyword And Expect Error *
... Head bucket S3 ${S3_CLIENT} ${NEW_BUCKET_EMPTY}
${BUCKET_LIST} = List Buckets S3 ${S3_CLIENT}
Tick Epoch
List Should Contain Value ${BUCKET_LIST} ${NEW_BUCKET}
List Should Not Contain Value ${BUCKET_LIST} ${NEW_BUCKET_EMPTY}
[Teardown] Teardown s3_gate_bucket

View file

@ -1,77 +0,0 @@
*** Settings ***
Variables common.py
Library Collections
Library OperatingSystem
Library container.py
Library s3_gate_bucket.py
Library s3_gate_object.py
Library utility_keywords.py
Resource payment_operations.robot
Resource setup_teardown.robot
*** Variables ***
@{INCLUDE_SVC} = s3_gate coredns
*** Test cases ***
Objects in NeoFS S3 Gateway
# TODO: check uploading an s3 object via neofs-cli and a neofs object via s3-gate
[Documentation] Execute operations with objects via S3 Gate
[Timeout] 10 min
[Setup] Setup
Make Up ${INCLUDE_SVC}
${WALLET} ${_} ${_} = Prepare Wallet And Deposit
${FILE_S3} ${FILE_S3_HASH} = Generate file ${COMPLEX_OBJ_SIZE}
${_} ${S3_OBJECT_KEY} = Split Path ${FILE_S3}
${CID}
... ${BUCKET}
... ${ACCESS_KEY_ID}
... ${SEC_ACCESS_KEY}
... ${OWNER_PRIV_KEY} = Init S3 Credentials ${WALLET}
@{CONTAINERS_LIST} = List Containers ${WALLET}
List Should Contain Value ${CONTAINERS_LIST} ${CID}
${S3_CLIENT} = Config S3 client ${ACCESS_KEY_ID} ${SEC_ACCESS_KEY}
${NEW_BUCKET} = Create Bucket S3 ${S3_CLIENT}
${NEW_BUCKET_2} = Create Bucket S3 ${S3_CLIENT}
Put object S3 ${S3_CLIENT} ${NEW_BUCKET} ${FILE_S3}
Head object S3 ${S3_CLIENT} ${NEW_BUCKET} ${S3_OBJECT_KEY}
Put object S3 ${S3_CLIENT} ${NEW_BUCKET_2} ${FILE_S3}
Head object S3 ${S3_CLIENT} ${NEW_BUCKET_2} ${S3_OBJECT_KEY}
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${NEW_BUCKET}
List Should Contain Value ${LIST_S3_OBJECTS} ${S3_OBJECT_KEY}
${LIST_S3_OBJECTS_2} = List objects S3 ${S3_CLIENT} ${NEW_BUCKET_2}
List Should Contain Value ${LIST_S3_OBJECTS_2} ${S3_OBJECT_KEY}
${LIST_V2_S3_OBJECTS} = List objects S3 v2 ${S3_CLIENT} ${NEW_BUCKET}
List Should Contain Value ${LIST_V2_S3_OBJECTS} ${S3_OBJECT_KEY}
${OBJ_PATH} = Get object S3 ${S3_CLIENT} ${NEW_BUCKET} ${S3_OBJECT_KEY}
Verify file hash ${OBJ_PATH} ${FILE_S3_HASH}
${HASH} = Get file hash ${OBJ_PATH}
Should Be Equal ${FILE_S3_HASH} ${HASH}
#TODO: Solve the issue on CopyObject #260 https://github.com/nspcc-dev/neofs-s3-gw/issues/260
${COPIED_OBJ_PATH} = Copy object S3 ${S3_CLIENT} ${NEW_BUCKET} ${S3_OBJECT_KEY}
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${NEW_BUCKET}
List Should Contain Value ${LIST_S3_OBJECTS} ${COPIED_OBJ_PATH}
${COPIED_OBJ_PATH_2} = Copy object S3 ${S3_CLIENT} ${NEW_BUCKET_2} ${S3_OBJECT_KEY}
${LIST_S3_OBJECTS_2} = List objects S3 ${S3_CLIENT} ${NEW_BUCKET_2}
List Should Contain Value ${LIST_S3_OBJECTS_2} ${COPIED_OBJ_PATH_2}
Delete object S3 ${S3_CLIENT} ${NEW_BUCKET} ${S3_OBJECT_KEY}
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${NEW_BUCKET}
List Should Not Contain Value ${LIST_S3_OBJECTS} ${S3_OBJECT_KEY}
[Teardown] Teardown s3_gate_object

View file

@ -75,3 +75,5 @@ STORAGE_WALLET_PATH = f"{DEVENV_SERVICES_PATH}/storage/wallet01.json"
CONTROL_NODE_USER = os.getenv('CONTROL_NODE_USER', 'root') CONTROL_NODE_USER = os.getenv('CONTROL_NODE_USER', 'root')
CONTROL_NODE_PWD = os.getenv('CONTROL_NODE_PWD') CONTROL_NODE_PWD = os.getenv('CONTROL_NODE_PWD')
REMOTE_HOST = os.getenv('REMOTE_HOST') REMOTE_HOST = os.getenv('REMOTE_HOST')
FREE_STORAGE = os.getenv('FREE_STORAGE', "false").lower() == "true"