Fix skipping s3 test by python mistake

Signed-off-by: Yulia Kovshova <y.kovshova@yadro.com>
This commit is contained in:
Юлия Ковшова 2022-09-23 15:09:41 +04:00 committed by Julia Kovshova
parent d28d7c6e6d
commit c53e48d1f8
3 changed files with 18 additions and 13 deletions

View file

@ -282,7 +282,7 @@ class AwsCliClient:
f"aws {self.common_flags} s3api get-object-tagging --bucket {Bucket} --key {Key} " f"aws {self.common_flags} s3api get-object-tagging --bucket {Bucket} --key {Key} "
f"{version} --endpoint {S3_GATE}" f"{version} --endpoint {S3_GATE}"
) )
output = _cmd_run(cmd) output = _cmd_run(cmd, REGULAR_TIMEOUT)
return self._to_json(output) return self._to_json(output)
def delete_object_tagging(self, Bucket: str, Key: str) -> dict: def delete_object_tagging(self, Bucket: str, Key: str) -> dict:
@ -365,7 +365,7 @@ class AwsCliClient:
f"--upload-id {UploadId} --part-number {PartNumber} --body {Body} " f"--upload-id {UploadId} --part-number {PartNumber} --body {Body} "
f"--endpoint-url {S3_GATE}" f"--endpoint-url {S3_GATE}"
) )
output = _cmd_run(cmd) output = _cmd_run(cmd, LONG_TIMEOUT)
return self._to_json(output) return self._to_json(output)
def list_parts(self, UploadId: str, Bucket: str, Key: str) -> dict: def list_parts(self, UploadId: str, Bucket: str, Key: str) -> dict:

View file

@ -11,6 +11,7 @@ import allure
import urllib3 import urllib3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from cli_helpers import log_command_execution from cli_helpers import log_command_execution
from steps.aws_cli_client import AwsCliClient from steps.aws_cli_client import AwsCliClient
from steps.s3_gate_bucket import S3_SYNC_WAIT_TIME from steps.s3_gate_bucket import S3_SYNC_WAIT_TIME
@ -441,7 +442,7 @@ def get_object_attributes(
bucket_name, bucket_name,
object_key, object_key,
*attributes, *attributes,
VersionId=version_id, version_id=version_id,
max_parts=max_parts, max_parts=max_parts,
part_number=part_number, part_number=part_number,
) )

View file

@ -18,10 +18,11 @@ from s3_helper import (
set_bucket_versioning, set_bucket_versioning,
try_to_get_objects_and_expect_error, try_to_get_objects_and_expect_error,
) )
from utility import create_file_with_content, get_file_content, split_file
from steps import s3_gate_bucket, s3_gate_object from steps import s3_gate_bucket, s3_gate_object
from steps.aws_cli_client import AwsCliClient from steps.aws_cli_client import AwsCliClient
from steps.s3_gate_base import TestS3GateBase from steps.s3_gate_base import TestS3GateBase
from utility import create_file_with_content, get_file_content, split_file
logger = logging.getLogger("NeoLogger") logger = logging.getLogger("NeoLogger")
@ -426,7 +427,7 @@ class TestS3Gate(TestS3GateBase):
assert set(put_objects).difference(set(objects_to_delete_b1)) == set( assert set(put_objects).difference(set(objects_to_delete_b1)) == set(
bucket_objects bucket_objects
), f"Expected all objects {put_objects} in objects list {bucket_objects}" ), f"Expected all objects {put_objects} in objects list {bucket_objects}"
try_to_get_objects_and_expect_error(bucket_1, objects_to_delete_b1) try_to_get_objects_and_expect_error(self.s3_client, bucket_1, objects_to_delete_b1)
with allure.step("Delete some objects from bucket_2 at once"): with allure.step("Delete some objects from bucket_2 at once"):
objects_to_delete_b2 = choices(put_objects, k=max_delete_objects) objects_to_delete_b2 = choices(put_objects, k=max_delete_objects)
@ -437,7 +438,7 @@ class TestS3Gate(TestS3GateBase):
assert set(put_objects).difference(set(objects_to_delete_b2)) == set( assert set(put_objects).difference(set(objects_to_delete_b2)) == set(
objects_list objects_list
), f"Expected all objects {put_objects} in objects list {bucket_objects}" ), f"Expected all objects {put_objects} in objects list {bucket_objects}"
try_to_get_objects_and_expect_error(bucket_2, objects_to_delete_b2) try_to_get_objects_and_expect_error(self.s3_client, bucket_2, objects_to_delete_b2)
@allure.title("Test S3: Copy object to the same bucket") @allure.title("Test S3: Copy object to the same bucket")
def test_s3_copy_same_bucket(self): def test_s3_copy_same_bucket(self):
@ -464,7 +465,7 @@ class TestS3Gate(TestS3GateBase):
copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple) copy_obj_path = s3_gate_object.copy_object_s3(self.s3_client, bucket, file_name_simple)
bucket_objects.append(copy_obj_path) bucket_objects.append(copy_obj_path)
check_objects_in_bucket(bucket, bucket_objects) check_objects_in_bucket(self.s3_client, bucket, bucket_objects)
with allure.step("Check copied object has the same content"): with allure.step("Check copied object has the same content"):
got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path) got_copied_file = s3_gate_object.get_object_s3(self.s3_client, bucket, copy_obj_path)
@ -477,7 +478,10 @@ class TestS3Gate(TestS3GateBase):
bucket_objects.remove(file_name_simple) bucket_objects.remove(file_name_simple)
check_objects_in_bucket( check_objects_in_bucket(
bucket, expected_objects=bucket_objects, unexpected_objects=[file_name_simple] self.s3_client,
bucket,
expected_objects=bucket_objects,
unexpected_objects=[file_name_simple],
) )
@allure.title("Test S3: Copy object to another bucket") @allure.title("Test S3: Copy object to another bucket")
@ -507,8 +511,8 @@ class TestS3Gate(TestS3GateBase):
copy_obj_path_b2 = s3_gate_object.copy_object_s3( copy_obj_path_b2 = s3_gate_object.copy_object_s3(
self.s3_client, bucket_1, file_name_large, bucket_dst=bucket_2 self.s3_client, bucket_1, file_name_large, bucket_dst=bucket_2
) )
check_objects_in_bucket(bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects)
check_objects_in_bucket(bucket_2, expected_objects=[copy_obj_path_b2]) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2])
with allure.step("Check copied object has the same content"): with allure.step("Check copied object has the same content"):
got_copied_file_b2 = s3_gate_object.get_object_s3( got_copied_file_b2 = s3_gate_object.get_object_s3(
@ -522,12 +526,12 @@ class TestS3Gate(TestS3GateBase):
s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple) s3_gate_object.delete_object_s3(self.s3_client, bucket_1, file_name_simple)
bucket_1_objects.remove(file_name_simple) bucket_1_objects.remove(file_name_simple)
check_objects_in_bucket(bucket_1, expected_objects=bucket_1_objects) check_objects_in_bucket(self.s3_client, bucket_1, expected_objects=bucket_1_objects)
check_objects_in_bucket(bucket_2, expected_objects=[copy_obj_path_b2]) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[copy_obj_path_b2])
with allure.step("Delete one object from second bucket and check it is empty"): with allure.step("Delete one object from second bucket and check it is empty"):
s3_gate_object.delete_object_s3(self.s3_client, bucket_2, copy_obj_path_b2) s3_gate_object.delete_object_s3(self.s3_client, bucket_2, copy_obj_path_b2)
check_objects_in_bucket(bucket_2, expected_objects=[]) check_objects_in_bucket(self.s3_client, bucket_2, expected_objects=[])
def check_object_attributes(self, bucket: str, object_key: str, parts_count: int): def check_object_attributes(self, bucket: str, object_key: str, parts_count: int):
if not isinstance(self.s3_client, AwsCliClient): if not isinstance(self.s3_client, AwsCliClient):