From f9d1a4dfae2de65ffd18d34ee54cb59f56bc2d5e Mon Sep 17 00:00:00 2001 From: Yulia Kovshova Date: Tue, 4 Oct 2022 11:22:04 +0300 Subject: [PATCH] [#312] Add new test for s3 Bucket function Signed-off-by: Yulia Kovshova --- pytest_tests/steps/aws_cli_client.py | 12 ++ pytest_tests/steps/s3_gate_bucket.py | 20 +- .../services/s3_gate/test_s3_bucket.py | 174 ++++++++++++++++++ 3 files changed, 205 insertions(+), 1 deletion(-) create mode 100644 pytest_tests/testsuites/services/s3_gate/test_s3_bucket.py diff --git a/pytest_tests/steps/aws_cli_client.py b/pytest_tests/steps/aws_cli_client.py index ff77975..5ab0742 100644 --- a/pytest_tests/steps/aws_cli_client.py +++ b/pytest_tests/steps/aws_cli_client.py @@ -23,6 +23,10 @@ class AwsCliClient: Bucket: str, ObjectLockEnabledForBucket: Optional[bool] = None, ACL: Optional[str] = None, + GrantFullControl: Optional[str] = None, + GrantRead: Optional[str] = None, + GrantWrite: Optional[str] = None, + CreateBucketConfiguration: Optional[dict] = None, ): if ObjectLockEnabledForBucket is None: object_lock = "" @@ -36,6 +40,14 @@ class AwsCliClient: ) if ACL: cmd += f" --acl {ACL}" + if GrantFullControl: + cmd += f" --grant-full-control {GrantFullControl}" + if GrantWrite: + cmd += f" --grant-write {GrantWrite}" + if GrantRead: + cmd += f" --grant-read {GrantRead}" + if CreateBucketConfiguration: + cmd += f" --create-bucket-configuration LocationConstraint={CreateBucketConfiguration['LocationConstraint']}" _cmd_run(cmd, REGULAR_TIMEOUT) def list_buckets(self) -> dict: diff --git a/pytest_tests/steps/s3_gate_bucket.py b/pytest_tests/steps/s3_gate_bucket.py index 86ef11c..4e3b53f 100644 --- a/pytest_tests/steps/s3_gate_bucket.py +++ b/pytest_tests/steps/s3_gate_bucket.py @@ -24,7 +24,13 @@ class VersioningStatus(Enum): @allure.step("Create bucket S3") def create_bucket_s3( - s3_client, object_lock_enabled_for_bucket: Optional[bool] = None, acl: Optional[str] = None + s3_client, + object_lock_enabled_for_bucket: Optional[bool] = None, + acl: Optional[str] = None, + grant_write: Optional[str] = None, + grant_read: Optional[str] = None, + grant_full_control: Optional[str] = None, + bucket_configuration: Optional[str] = None, ) -> str: bucket_name = str(uuid.uuid4()) @@ -34,6 +40,18 @@ def create_bucket_s3( params.update({"ObjectLockEnabledForBucket": object_lock_enabled_for_bucket}) if acl is not None: params.update({"ACL": acl}) + elif grant_write or grant_read or grant_full_control: + if grant_write: + params.update({"GrantWrite": grant_write}) + elif grant_read: + params.update({"GrantRead": grant_read}) + elif grant_full_control: + params.update({"GrantFullControl": grant_full_control}) + if bucket_configuration: + params.update( + {"CreateBucketConfiguration": {"LocationConstraint": bucket_configuration}} + ) + s3_bucket = s3_client.create_bucket(**params) log_command_execution(f"Created S3 bucket {bucket_name}", s3_bucket) sleep(S3_SYNC_WAIT_TIME) diff --git a/pytest_tests/testsuites/services/s3_gate/test_s3_bucket.py b/pytest_tests/testsuites/services/s3_gate/test_s3_bucket.py new file mode 100644 index 0000000..7c278b3 --- /dev/null +++ b/pytest_tests/testsuites/services/s3_gate/test_s3_bucket.py @@ -0,0 +1,174 @@ +import os +from datetime import datetime, timedelta + +import allure +import pytest +from python_keywords.storage_policy import get_simple_object_copies +from python_keywords.utility_keywords import generate_file +from s3_helper import check_objects_in_bucket, object_key_from_file_path, set_bucket_versioning + +from steps import s3_gate_bucket, s3_gate_object +from steps.s3_gate_base import TestS3GateBase + + +def pytest_generate_tests(metafunc): + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True) + + +@pytest.mark.s3_gate +class TestS3GateBucket(TestS3GateBase): + @allure.title("Test S3: Create Bucket with different ACL") + def test_s3_create_bucket_with_ACL(self): + + with allure.step("Create bucket with ACL private"): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True, acl="private") + bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + bucket_permission = [permission.get("Permission") for permission in bucket_acl] + assert bucket_permission == [ + "FULL_CONTROL" + ], "Permission for CanonicalUser is FULL_CONTROL" + + with allure.step("Create bucket with ACL = public-read"): + bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, True, acl="public-read") + bucket_acl_1 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_1) + bucket_permission_1 = [permission.get("Permission") for permission in bucket_acl_1] + assert bucket_permission_1 == [ + "FULL_CONTROL", + "FULL_CONTROL", + ], "Permission for all groups is FULL_CONTROL" + + with allure.step("Create bucket with ACL public-read-write"): + bucket_2 = s3_gate_bucket.create_bucket_s3( + self.s3_client, True, acl="public-read-write" + ) + bucket_acl_2 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_2) + bucket_permission_2 = [permission.get("Permission") for permission in bucket_acl_2] + assert bucket_permission_2 == [ + "FULL_CONTROL", + "FULL_CONTROL", + ], "Permission for CanonicalUser is FULL_CONTROL" + + with allure.step("Create bucket with ACL = authenticated-read"): + bucket_3 = s3_gate_bucket.create_bucket_s3( + self.s3_client, True, acl="authenticated-read" + ) + bucket_acl_3 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_3) + bucket_permission_3 = [permission.get("Permission") for permission in bucket_acl_3] + assert bucket_permission_3 == [ + "FULL_CONTROL", + "FULL_CONTROL", + ], "Permission for all groups is FULL_CONTROL" + + @allure.title("Test S3: Create Bucket with different ACL by grand") + def test_s3_create_bucket_with_grands(self): + + with allure.step("Create bucket with --grant-read"): + bucket = s3_gate_bucket.create_bucket_s3( + self.s3_client, + True, + grant_read="uri=http://acs.amazonaws.com/groups/global/AllUsers", + ) + bucket_acl = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket) + bucket_permission = [permission.get("Permission") for permission in bucket_acl] + assert bucket_permission == [ + "FULL_CONTROL", + "FULL_CONTROL", + ], "Permission for CanonicalUser is FULL_CONTROL" + + with allure.step("Create bucket with --grant-wtite"): + bucket_1 = s3_gate_bucket.create_bucket_s3( + self.s3_client, + True, + grant_write="uri=http://acs.amazonaws.com/groups/global/AllUsers", + ) + bucket_acl_1 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_1) + bucket_permission_1 = [permission.get("Permission") for permission in bucket_acl_1] + assert bucket_permission_1 == [ + "FULL_CONTROL", + "FULL_CONTROL", + ], "Permission for all groups is FULL_CONTROL" + + with allure.step("Create bucket with --grant-full-control"): + bucket_2 = s3_gate_bucket.create_bucket_s3( + self.s3_client, + True, + grant_full_control="uri=http://acs.amazonaws.com/groups/global/AllUsers", + ) + bucket_acl_2 = s3_gate_bucket.get_bucket_acl(self.s3_client, bucket_2) + bucket_permission_2 = [permission.get("Permission") for permission in bucket_acl_2] + assert bucket_permission_2 == [ + "FULL_CONTROL", + "FULL_CONTROL", + ], "Permission for CanonicalUser is FULL_CONTROL" + + @allure.title("Test S3: create bucket with object lock") + def test_s3_bucket_object_lock(self): + file_path = generate_file() + file_name = object_key_from_file_path(file_path) + + with allure.step("Create bucket with --no-object-lock-enabled-for-bucket"): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, False) + date_obj = datetime.utcnow() + timedelta(days=1) + with pytest.raises( + Exception, match=r".*Object Lock configuration does not exist for this bucket.*" + ): + # An error occurred (ObjectLockConfigurationNotFoundError) when calling the PutObject operation (reached max retries: 0): + # Object Lock configuration does not exist for this bucket + s3_gate_object.put_object_s3( + self.s3_client, + bucket, + file_path, + ObjectLockMode="COMPLIANCE", + ObjectLockRetainUntilDate=date_obj.strftime("%Y-%m-%dT%H:%M:%S"), + ) + with allure.step("Create bucket with --object-lock-enabled-for-bucket"): + bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client, True) + date_obj_1 = datetime.utcnow() + timedelta(days=1) + s3_gate_object.put_object_s3( + self.s3_client, + bucket_1, + file_path, + ObjectLockMode="COMPLIANCE", + ObjectLockRetainUntilDate=date_obj_1.strftime("%Y-%m-%dT%H:%M:%S"), + ObjectLockLegalHoldStatus="ON", + ) + object_1 = s3_gate_object.get_object_s3( + self.s3_client, bucket_1, file_name, full_output=True + ) + assert ( + object_1.get("ObjectLockMode") == "COMPLIANCE" + ), "Expected Object Lock Mode is COMPLIANCE" + assert str(date_obj_1.strftime("%Y-%m-%dT%H:%M:%S")) in object_1.get( + "ObjectLockRetainUntilDate" + ), f'Expected Object Lock Retain Until Date is {str(date_obj_1.strftime("%Y-%m-%dT%H:%M:%S"))}' + assert ( + object_1.get("ObjectLockLegalHoldStatus") == "ON" + ), "Expected Object Lock Legal Hold Status is ON" + + @allure.title("Test S3: delete bucket") + def test_s3_delete_bucket(self): + file_path_1 = generate_file() + file_name_1 = object_key_from_file_path(file_path_1) + file_path_2 = generate_file() + file_name_2 = object_key_from_file_path(file_path_2) + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client) + + with allure.step("Put two objects into bucket"): + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_1) + s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2) + check_objects_in_bucket(self.s3_client, bucket, [file_name_1, file_name_2]) + + with allure.step("Try to delete not empty bucket and get error"): + with pytest.raises(Exception, match=r".*The bucket you tried to delete is not empty.*"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + + with allure.step("Delete object in bucket"): + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_1) + s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name_2) + check_objects_in_bucket(self.s3_client, bucket, []) + + with allure.step(f"Delete empty bucket"): + s3_gate_bucket.delete_bucket_s3(self.s3_client, bucket) + with pytest.raises(Exception, match=r".*Not Found.*"): + s3_gate_bucket.head_bucket(self.s3_client, bucket)