Add S3-gate testcase (#4)

* Add S3-gate testcase
This commit is contained in:
anatoly-bogatyrev 2020-12-11 14:35:02 +03:00 committed by GitHub
parent 14b780257c
commit a20ec58614
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 320 additions and 173 deletions

View file

@ -1,79 +0,0 @@
FROM docker:19.03.11-dind
ENV WD /
ARG REG_USR
ARG REG_PWD
ARG JF_TOKEN
ARG BUILD_NEOFS_NODE
ENV REG_USR=${REG_USR}
ENV REG_PWD=${REG_PWD}
ENV NEOFSCLI_VERSION=0.9.0
ENV JF_TOKEN=${JF_TOKEN}
ENV BUILD_NEOFS_NODE=${BUILD_NEOFS_NODE}
ENV RF_VERSION 3.2.1
RUN apk add --no-cache openssh
RUN apk add --no-cache libressl-dev
RUN apk add --no-cache curl
RUN apk add --no-cache bash bash-doc bash-completion
RUN apk add --no-cache util-linux pciutils usbutils coreutils binutils findutils grep gcc libffi-dev openssl-dev
RUN apk add --no-cache sudo
RUN apk --no-cache add \
make \
python3 \
py3-pip
RUN apk --no-cache add --virtual \
.build-deps \
build-base \
python3-dev
RUN addgroup nobody root && \
echo "export PYTHONPATH=\$PYTHONPATH:/.local/lib/python3.8/site-packages" > /.profile && \
mkdir -p /tests /reports /.local && \
chgrp -R 0 /reports /.local && \
chmod -R g=u /etc/passwd /reports /.local /.profile
RUN pip3 install wheel
RUN pip3 install robotframework
RUN pip3 install pexpect
RUN pip3 install requests
# Golang
ARG GOLANG_VERSION=1.14.3
#we need the go version installed from apk to bootstrap the custom version built from source
RUN apk update && apk add go gcc bash musl-dev openssl-dev ca-certificates && update-ca-certificates
RUN wget https://dl.google.com/go/go$GOLANG_VERSION.src.tar.gz && tar -C /usr/local -xzf go$GOLANG_VERSION.src.tar.gz
RUN cd /usr/local/go/src && ./make.bash
ENV PATH=$PATH:/usr/local/go/bin
RUN rm go$GOLANG_VERSION.src.tar.gz
#we delete the apk installed version to avoid conflict
RUN apk del go
RUN go version
# Add the keys and set permissions
COPY ./ca/* /root/.ssh/
RUN chmod 600 /root/.ssh/id_rsa && \
chmod 600 /root/.ssh/id_rsa.pub
RUN pip3 install docker-compose
RUN export DOCKER_HOST="${HOSTNAME}-docker"
RUN apk add --no-cache git \
--repository https://alpine.global.ssl.fastly.net/alpine/v3.10/community \
--repository https://alpine.global.ssl.fastly.net/alpine/v3.10/main
RUN mkdir -p /robot/vendor
RUN cd /robot/vendor \
&& git clone https://github.com/nspcc-dev/neofs-dev-env.git
WORKDIR ${WD}
COPY ./ ${WD}
RUN cd ${WD} && chmod +x dockerd.sh

View file

@ -1,46 +1,9 @@
VERSION=0.0.18
PREFIX=
B=\033[0;1m
G=\033[0;92m
R=\033[0m
.DEFAULT_GOAL := help
.PHONY: build-image
DATE = $(shell date +%s)
NAME = "testcases_$(DATE)"
build:
@echo "${B}${G}⇒ Build image ${R}"
@docker build \
--build-arg REG_USR=$(REG_USR) \
--build-arg REG_PWD=$(REG_PWD) \
--build-arg JF_TOKEN=$(JF_TOKEN) \
--build-arg BUILD_NEOFS_NODE=${BUILD_NEOFS_NODE} \
--build-arg BUILD_CLI=${BUILD_CLI} \
-f Dockerfile \
-t robot:$(VERSION)$(PREFIX) .
run_docker:
@echo "${B}${G}⇒ Test Run image $(NAME)${R}"
@mkdir artifacts_$(NAME)
@docker run --privileged=true \
--name $(NAME) \
robot:$(VERSION)$(PREFIX) ./dockerd.sh &
@sleep 10;
@docker wait $(NAME);
@echo "${B}${G}⇒ Testsuite has been completed. ${R}";
@echo "${B}${G}⇒ Copy Logs from container to ./artifacts/ ${R}";
@docker cp $(NAME):/artifacts .
@docker rm $(NAME)
run:
@echo "${B}${G}⇒ Test Run ${R}"
@echo "⇒ Test Run"
@robot --timestampoutputs --outputdir artifacts/ robot/testsuites/integration/*.robot
help:
@echo "${B}${G}⇒ build Build image ${R}"
@echo "${B}${G}⇒ run Run testcases ${R}"
@echo "${B}${G}⇒ run_docker Run in docker ${R}"
@echo "⇒ run Run testcases ${R}"

View file

@ -1,19 +1,35 @@
## Запуск тесткейсов
### Локальный запуск тесткейсов
### Первичная подготовка
1. Устаносить зависимости (только для первого запуска):
- pip3 install robotframework
- pip3 install pexpect
- pip3 install requests
1. Установить neofs-cli
- `git clone git@github.com:nspcc-dev/neofs-node.git`
- `cd neofs-node`
- `make`
- `sudo cp bin/neofs-cli /usr/local/bin/neofs-cli` or add alias path to bin/neofs-cli
2. Установить cdn-authmate
- `git clone git@github.com:nspcc-dev/cdn-authmate.git`
- `cd cdn-authmate`
- `make build`
- `sudo cp bin/cdn-authmate /usr/local/bin/cdn-authmate` or add alias path to bin/cdn-authmate
3. Устаносить зависимости для Testcases
- `pip3 install robotframework`
- `pip3 install pexpect`
- `pip3 install requests`
- `pip3 install boto3`
(pip3 заменить на соответсвующий менеджер пакетов python в системе).
При этом должен быть запущен dev-env с тестируемым окружением.
2. Выпольнить `make run`
### Запуск тесткейсов
1. Выполнить `make run`
2. Логи будут доступны в папке artifacts/ после завершения тестов с любым из статусов.
3. Логи будут доступны в папке artifacts/ после завершения тестов с любым из статусов.
### Запуск произвольного тесткейса
@ -30,46 +46,38 @@
* withdraw.robot - оперция Deposit и Withdraw с счета NeoFS
* netmap_simple.robot - проверка Placement policy
* replication.robot - базовый тесткейс проверки репликации объектов
* http_gate.robot - проверка HTTP-gate и интеграции с NeoFS
* s3_gate.robot - проверка S3-gate и интеграции с NeoFS
### Запуск тесткейсов в докере
1. Задать переменные окружения для работы с dev-env:
```
export REG_USR=<registry_user>
export REG_PWD=<registry_pass>
export JF_TOKEN=<JF_token>
```
2. Выполнить `make build`
3. Выполнить `make run_docker`
4. Логи будут доступны в папке artifacts/ после завершения тестов с любым из статусов.
### Запуск тесткейсов в докере с произвольными коммитами
На данный момент доступны произовльные коммиты для NeoFS Node и NeoFS CLI.
Для этого достаточно задать переменные окружения перед запуском `make build`.
```
export BUILD_NEOFS_NODE=<commit or branch>
```
### Запуск smoke-тестов
## Запуск smoke-тестов
Есть сьют со smoke-тестами для CDN-гейтов `robot/testsuites/smoke/selectelcdn_smoke.robot`.
Ему требуются отдельные переменные, в отличие от сьютов NeoFS, которые запускаются на
девэнве. Чтобы библиотеки кейвордов их использовали, нужно установить переменную
окружения
```
export ROBOT_PROFILE=selectel_smoke
```
По умолчанию кейворды используют переменные из файла `robot/resources/lib/neofs_int_vars.py`.
```
robot --outputdir artifacts/ robot/testsuites/smoke/selectelcdn_smoke.robot
```
### Первичная подготовка
### Генерация документации
1. Ему требуются отдельные переменные, в отличие от сьютов NeoFS, которые запускаются на
девэнве. Чтобы библиотеки кейвордов их использовали, нужно установить переменную
окружения
```
export ROBOT_PROFILE=selectel_smoke
```
Dev-env не нужен. Но нужно установить neo-go.
2. Установить neo-go
- `git clone git@github.com:nspcc-dev/neo-go.git`
- `cd neo-go`
- `make`
- `sudo cp bin/neo-go /usr/local/bin/neo-go` or add alias path to bin/neo-go
## Генерация документации
Для генерации документации по шагам:
```

View file

@ -1,10 +0,0 @@
#!/bin/bash
dockerd &
sleep 60
export DOCKER_HOST=unix:///var/run/docker.sock
make hosts -B -C /robot/vendor/neofs-dev-env | grep -v make >> /etc/hosts
make rebuild -C /robot/vendor/neofs-dev-env
make up -C /robot/vendor/neofs-dev-env
sleep 60
robot --timestampoutputs --outputdir /artifacts/ /robot/testsuites/integration/*.robot

View file

@ -0,0 +1 @@
╜d#╟╙BA≤\E╨кООКо╪-^r╙═=n╠ *

View file

@ -2,8 +2,13 @@
import logging
import os
import re
import requests
import shutil
import subprocess
import boto3
import uuid
import io
from robot.api.deco import keyword
from robot.api import logger
@ -15,10 +20,139 @@ ROBOT_AUTO_KEYWORDS = False
if os.getenv('ROBOT_PROFILE') == 'selectel_smoke':
from selectelcdn_smoke_vars import (NEOGO_CLI_PREFIX, NEO_MAINNET_ENDPOINT,
NEOFS_NEO_API_ENDPOINT, NEOFS_ENDPOINT, HTTP_GATE)
NEOFS_NEO_API_ENDPOINT, NEOFS_ENDPOINT, HTTP_GATE, S3_GATE)
else:
from neofs_int_vars import (NEOGO_CLI_PREFIX, NEO_MAINNET_ENDPOINT,
NEOFS_NEO_API_ENDPOINT, NEOFS_ENDPOINT, HTTP_GATE)
NEOFS_NEO_API_ENDPOINT, NEOFS_ENDPOINT, HTTP_GATE, S3_GATE)
@keyword('Init S3 Credentials')
def init_s3_credentials(private_key: str, s3_key):
bucket = str(uuid.uuid4())
Cmd = f'cdn-authmate --debug --with-log issue-secret --neofs-key {private_key} --gate-public-key={s3_key} --peer {NEOFS_ENDPOINT} --container-friendly-name {bucket}'
logger.info("Cmd: %s" % Cmd)
try:
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=150, shell=True)
output = complProc.stdout
logger.info("Output: %s" % output)
m = re.search(r'"cid":\s+"(\w+)"', output)
cid = m.group(1)
logger.info("cid: %s" % cid)
m = re.search(r'"access_key_id":\s+"([\w\/]+)"', output)
access_key_id = m.group(1)
logger.info("access_key_id: %s" % access_key_id)
m = re.search(r'"secret_access_key":\s+"(\w+)"', output)
secret_access_key = m.group(1)
logger.info("secret_access_key: %s" % secret_access_key)
m = re.search(r'"owner_private_key":\s+"(\w+)"', output)
owner_private_key = m.group(1)
logger.info("owner_private_key: %s" % owner_private_key)
return cid, bucket, access_key_id, secret_access_key, owner_private_key
except subprocess.CalledProcessError as e:
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
@keyword('Config S3 client')
def config_s3_client(access_key_id, secret_access_key):
session = boto3.session.Session()
s3_client = session.client(
service_name='s3',
aws_access_key_id=access_key_id,
aws_secret_access_key=secret_access_key,
endpoint_url=S3_GATE, verify=False
)
return s3_client
@keyword('List objects S3 v2')
def list_objects_s3_v2(s3_client, bucket):
response = s3_client.list_objects_v2(Bucket=bucket)
logger.info("S3 v2 List objects result: %s" % response['Contents'])
obj_list = []
for obj in response['Contents']:
obj_list.append(obj['Key'])
logger.info("Found s3 objects: %s" % obj_list)
return obj_list
@keyword('List objects S3')
def list_objects_s3(s3_client, bucket):
response = s3_client.list_objects(Bucket=bucket)
logger.info("S3 List objects result: %s" % response['Contents'])
obj_list = []
for obj in response['Contents']:
obj_list.append(obj['Key'])
logger.info("Found s3 objects: %s" % obj_list)
return obj_list
@keyword('List buckets S3')
def list_buckets_s3(s3_client):
found_buckets = []
response = s3_client.list_buckets()
logger.info("S3 List buckets result: %s" % response)
for bucket in response['Buckets']:
found_buckets.append(bucket['Name'])
return found_buckets
@keyword('Put object S3')
def put_object_s3(s3_client, bucket, filepath):
filename = os.path.basename(filepath)
with open(filepath, "rb") as f:
fileContent = f.read()
response = s3_client.put_object(Body=fileContent, Bucket=bucket, Key=filename)
logger.info("S3 Put object result: %s" % response)
return response
@keyword('Head object S3')
def head_object_s3(s3_client, bucket, object_key):
response = s3_client.head_object(Bucket=bucket, Key=object_key)
logger.info("S3 Head object result: %s" % response)
return response
@keyword('Delete object S3')
def delete_object_s3(s3_client, bucket, object_key):
response = s3_client.delete_object(Bucket=bucket, Key=object_key)
logger.info("S3 Put object result: %s" % response)
return response
@keyword('Copy object S3')
def copy_object_s3(s3_client, bucket, object_key, new_object):
response = s3_client.copy_object(Bucket=bucket, CopySource=bucket+"/"+object_key, Key=new_object)
logger.info("S3 Copy object result: %s" % response)
return response
@keyword('Get object S3')
def get_object_s3(s3_client, bucket, object_key, target_file):
response = s3_client.get_object(Bucket=bucket, Key=object_key)
with open(f"{target_file}", 'wb') as f:
chunk = response['Body'].read(1024)
while chunk:
f.write(chunk)
chunk = response['Body'].read(1024)
return target_file
@keyword('Get via HTTP Gate')

View file

@ -544,6 +544,21 @@ def create_container(private_key: str, basic_acl:str="", rule:str="REP 2 IN X CB
return cid
@keyword('Container List')
def container_list(private_key: str):
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container list'
logger.info("Cmd: %s" % Cmd)
complProc = subprocess.run(Cmd, check=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=15, shell=True)
logger.info("Output: %s" % complProc.stdout)
container_list = re.findall(r'(\w{43,44})', complProc.stdout)
logger.info("Containers list: %s" % container_list)
return container_list
@keyword('Container Existing')
def container_existing(private_key: str, cid: str):
Cmd = f'neofs-cli --rpc-endpoint {NEOFS_ENDPOINT} --key {private_key} container list'
@ -581,7 +596,6 @@ def search_object(private_key: str, cid: str, keys: str, bearer: str, filters: s
if bearer:
bearer_token = f"--bearer {bearer}"
if filters:
filters = f"--filters {filters}"
@ -593,20 +607,21 @@ def search_object(private_key: str, cid: str, keys: str, bearer: str, filters: s
logger.info("Output: %s" % complProc.stdout)
if expected_objects_list:
found_objects = re.findall(r'(\w{43,44})', complProc.stdout)
found_objects = re.findall(r'(\w{43,44})', complProc.stdout)
if expected_objects_list:
if sorted(found_objects) == sorted(expected_objects_list):
logger.info("Found objects list '{}' is equal for expected list '{}'".format(found_objects, expected_objects_list))
else:
raise Exception("Found object list '{}' is not equal to expected list '{}'".format(found_objects, expected_objects_list))
return found_objects
except subprocess.CalledProcessError as e:
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
'''
@keyword('Verify Head Tombstone')
def verify_head_tombstone(private_key: str, cid: str, oid: str):
@ -809,6 +824,11 @@ def delete_object(private_key: str, cid: str, oid: str, bearer: str):
raise Exception("command '{}' return with error (code {}): {}".format(e.cmd, e.returncode, e.output))
@keyword('Get file name')
def get_file_name(filepath):
filename = os.path.basename(filepath)
return filename
@keyword('Get file hash')
def get_file_hash(filename):
file_hash = _get_file_hash(filename)

View file

@ -7,3 +7,4 @@ NEO_MAINNET_ENDPOINT = "main_chain.neofs.devenv:30333"
NEOFS_NEO_API_ENDPOINT = 'http://main_chain.neofs.devenv:30333'
HTTP_GATE = 'http://http.neofs.devenv'
S3_GATE = 'https://s3.neofs.devenv:8080'

View file

@ -37,6 +37,24 @@ def init_wallet():
logger.info(f"Command completed with output: {out}")
return filename
@keyword('Generate wallet from WIF')
def generate_wallet_from_wif(wallet: str, wif: str):
cmd = ( f"{NEOGO_CLI_PREFIX} wallet import --wallet {wallet} --wif {wif}" )
logger.info(f"Executing command: {cmd}")
p = pexpect.spawn(cmd)
p.expect(".*")
p.sendline('\n')
p.sendline('\n')
p.sendline('\n')
p.wait()
out = p.read()
logger.info(f"Command completed with output: {out}")
@keyword('Generate wallet')
def generate_wallet(wallet: str):
cmd = ( f"{NEOGO_CLI_PREFIX} wallet create -w {wallet}" )

View file

@ -7,3 +7,4 @@ NEO_MAINNET_ENDPOINT = "http://92.53.71.51:20332"
# selectel main chain on lobachevsky-1
NEOFS_NEO_API_ENDPOINT = "http://92.53.71.51:20332"
HTTP_GATE = 'http://92.53.71.51:38080'
S3_GATE = 'https://92.53.71.51:28080'

View file

@ -0,0 +1,90 @@
*** Settings ***
Variables ../../variables/common.py
Library Collections
Library ${RESOURCES}/neofs.py
Library ${RESOURCES}/payment_neogo.py
Library ${RESOURCES}/gates.py
*** Test cases ***
NeoFS S3 Gateway
[Documentation] Execute operations via S3 Gate
[Timeout] 5 min
${PRIV_KEY} = Form WIF from String 1dd37fba80fec4e6a6f13fd708d8dcb3b29def768017052f6c930fa1c5d90bbb
${WALLET} = Init wallet
Generate wallet from WIF ${WALLET} ${PRIV_KEY}
${ADDR} = Dump Address ${WALLET}
Dump PrivKey ${WALLET} ${ADDR}
${SCRIPT_HASH} = Get ScripHash ${PRIV_KEY}
${TX_DEPOSIT} = NeoFS Deposit ${WALLET} ${ADDR} ${SCRIPT_HASH} 50
Wait Until Keyword Succeeds 1 min 15 sec
... Transaction accepted in block ${TX_DEPOSIT}
Get Transaction ${TX_DEPOSIT}
${FILE_S3} = Generate file of bytes 256
${FILE_S3_HASH} = Get file hash ${FILE_S3}
${FILE_S3_NAME} = Get file name ${FILE_S3}
${FILE_FS} = Generate file of bytes 256
${FILE_FS_HASH} = Get file hash ${FILE_FS}
${FILE_FS_NAME} = Get file name ${FILE_FS}
${CID}
... ${BUCKET}
... ${ACCESS_KEY_ID}
... ${SEC_ACCESS_KEY}
... ${OWNER_PRIV_KEY} = Init S3 Credentials ${PRIV_KEY} keys/s3_docker_hcs.pub.key
${CONTEINERS_LIST} = Container List ${PRIV_KEY}
List Should Contain Value ${CONTEINERS_LIST} ${CID}
${S3_CLIENT} = Config S3 client ${ACCESS_KEY_ID} ${SEC_ACCESS_KEY}
${LIST_S3_BUCKETS} = List buckets S3 ${S3_CLIENT}
List Should Contain Value ${LIST_S3_BUCKETS} ${BUCKET}
Put object S3 ${S3_CLIENT} ${BUCKET} ${FILE_S3}
Head object S3 ${S3_CLIENT} ${BUCKET} ${FILE_S3_NAME}
${OID_FS} = Put object to NeoFS ${PRIV_KEY} ${FILE_FS} ${CID} ${EMPTY} ${EMPTY}
Head object ${PRIV_KEY} ${CID} ${OID_FS} ${EMPTY}
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${BUCKET}
List Should Contain Value ${LIST_S3_OBJECTS} ${FILE_S3_NAME}
List Should Contain Value ${LIST_S3_OBJECTS} ${FILE_FS_NAME}
${LIST_V2_S3_OBJECTS} = List objects S3 v2 ${S3_CLIENT} ${BUCKET}
List Should Contain Value ${LIST_V2_S3_OBJECTS} ${FILE_S3_NAME}
List Should Contain Value ${LIST_V2_S3_OBJECTS} ${FILE_S3_NAME}
${OID_LIST_S3} = Search object ${PRIV_KEY} ${CID} ${EMPTY} ${EMPTY} FileName=${FILE_S3_NAME}
${OID_S3} = Get From List ${OID_LIST_S3} 0
Get object S3 ${S3_CLIENT} ${BUCKET} ${FILE_S3_NAME} s3_obj_get_s3
Get object S3 ${S3_CLIENT} ${BUCKET} ${FILE_FS_NAME} fs_obj_get_s3
Verify file hash s3_obj_get_s3 ${FILE_S3_HASH}
Verify file hash fs_obj_get_s3 ${FILE_FS_HASH}
Get object from NeoFS ${PRIV_KEY} ${CID} ${OID_S3} ${EMPTY} s3_obj_get_fs
Get object from NeoFS ${PRIV_KEY} ${CID} ${OID_FS} ${EMPTY} fs_obj_get_fs
Verify file hash s3_obj_get_fs ${FILE_S3_HASH}
Verify file hash fs_obj_get_fs ${FILE_FS_HASH}
Copy object S3 ${S3_CLIENT} ${BUCKET} ${FILE_S3_NAME} NewName
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${BUCKET}
List Should Contain Value ${LIST_S3_OBJECTS} NewName
Delete object S3 ${S3_CLIENT} ${BUCKET} ${FILE_S3_NAME}
${LIST_S3_OBJECTS} = List objects S3 ${S3_CLIENT} ${BUCKET}
List Should Not Contain Value ${LIST_S3_OBJECTS} FILE_S3_NAME
[Teardown] Cleanup Files s3_obj_get_fs fs_obj_get_fs s3_obj_get_s3 fs_obj_get_s3
... ${FILE_S3} ${FILE_FS} hcs.pub.key