From 2b08a932ac5a240810e4a77183322db01334998c Mon Sep 17 00:00:00 2001 From: Yulia Kovshova Date: Mon, 14 Nov 2022 15:04:15 +0300 Subject: [PATCH] [#312] Add new policy test Signed-off-by: Yulia Kovshova --- pytest_tests/steps/aws_cli_client.py | 48 ++++++ pytest_tests/steps/s3_gate_base.py | 12 +- pytest_tests/steps/s3_gate_bucket.py | 79 +++++++++ .../services/s3_gate/test_s3_policy.py | 153 ++++++++++++++++++ robot/resources/files/policy.json | 4 + .../lib/python_keywords/container.py | 10 ++ 6 files changed, 302 insertions(+), 4 deletions(-) create mode 100644 pytest_tests/testsuites/services/s3_gate/test_s3_policy.py create mode 100644 robot/resources/files/policy.json diff --git a/pytest_tests/steps/aws_cli_client.py b/pytest_tests/steps/aws_cli_client.py index 66da705..5b256b9 100644 --- a/pytest_tests/steps/aws_cli_client.py +++ b/pytest_tests/steps/aws_cli_client.py @@ -71,6 +71,14 @@ class AwsCliClient: output = _cmd_run(cmd, REGULAR_TIMEOUT) return self._to_json(output) + def get_bucket_location(self, Bucket: str) -> dict: + cmd = ( + f"aws {self.common_flags} s3api get-bucket-location --bucket {Bucket} " + f"--endpoint {S3_GATE}" + ) + output = _cmd_run(cmd, REGULAR_TIMEOUT) + return self._to_json(output) + def put_bucket_versioning(self, Bucket: str, VersioningConfiguration: dict) -> dict: cmd = ( f"aws {self.common_flags} s3api put-bucket-versioning --bucket {Bucket} " @@ -310,6 +318,46 @@ class AwsCliClient: output = _cmd_run(cmd) return self._to_json(output) + def get_bucket_policy(self, Bucket: str) -> dict: + cmd = ( + f"aws {self.common_flags} s3api get-bucket-policy --bucket {Bucket} " + f"--endpoint {S3_GATE}" + ) + output = _cmd_run(cmd) + return self._to_json(output) + + def put_bucket_policy(self, Bucket: str, Policy: dict) -> dict: + cmd = ( + f"aws {self.common_flags} s3api put-bucket-policy --bucket {Bucket} " + f"--policy {json.dumps(Policy)} --endpoint {S3_GATE}" + ) + output = _cmd_run(cmd) + return self._to_json(output) + + def get_bucket_cors(self, Bucket: str) -> dict: + cmd = ( + f"aws {self.common_flags} s3api get-bucket-cors --bucket {Bucket} " + f"--endpoint {S3_GATE}" + ) + output = _cmd_run(cmd) + return self._to_json(output) + + def put_bucket_cors(self, Bucket: str, CORSConfiguration: dict) -> dict: + cmd = ( + f"aws {self.common_flags} s3api put-bucket-cors --bucket {Bucket} " + f"--cors-configuration '{json.dumps(CORSConfiguration)}' --endpoint {S3_GATE}" + ) + output = _cmd_run(cmd) + return self._to_json(output) + + def delete_bucket_cors(self, Bucket: str) -> dict: + cmd = ( + f"aws {self.common_flags} s3api delete-bucket-cors --bucket {Bucket} " + f"--endpoint {S3_GATE}" + ) + output = _cmd_run(cmd) + return self._to_json(output) + def put_bucket_tagging(self, Bucket: str, Tagging: dict) -> dict: cmd = ( f"aws {self.common_flags} s3api put-bucket-tagging --bucket {Bucket} " diff --git a/pytest_tests/steps/s3_gate_base.py b/pytest_tests/steps/s3_gate_base.py index 001a3e2..0ae97c8 100644 --- a/pytest_tests/steps/s3_gate_base.py +++ b/pytest_tests/steps/s3_gate_base.py @@ -46,18 +46,18 @@ class TestS3GateBase: def s3_client(self, prepare_wallet_and_deposit, client_shell: Shell, request): wallet = prepare_wallet_and_deposit s3_bearer_rules_file = f"{os.getcwd()}/robot/resources/files/s3_bearer_rules.json" - + policy = None if isinstance(request.param, str) else request.param[1] ( cid, bucket, access_key_id, secret_access_key, owner_private_key, - ) = init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file) + ) = init_s3_credentials(wallet, s3_bearer_rules_file=s3_bearer_rules_file, policy=policy) containers_list = list_containers(wallet, shell=client_shell) assert cid in containers_list, f"Expected cid {cid} in {containers_list}" - if request.param == "aws cli": + if "aws cli" in request.param: client = configure_cli_client(access_key_id, secret_access_key) else: client = configure_boto3_client(access_key_id, secret_access_key) @@ -66,7 +66,9 @@ class TestS3GateBase: @allure.step("Init S3 Credentials") -def init_s3_credentials(wallet_path: str, s3_bearer_rules_file: Optional[str] = None): +def init_s3_credentials( + wallet_path: str, s3_bearer_rules_file: Optional[str] = None, policy: Optional[dict] = None +): bucket = str(uuid.uuid4()) s3_bearer_rules = s3_bearer_rules_file or "robot/resources/files/s3_bearer_rules.json" gate_public_key = get_wallet_public_key(S3_GATE_WALLET_PATH, S3_GATE_WALLET_PASS) @@ -76,6 +78,8 @@ def init_s3_credentials(wallet_path: str, s3_bearer_rules_file: Optional[str] = f"--peer {NEOFS_ENDPOINT} --container-friendly-name {bucket} " f"--bearer-rules {s3_bearer_rules}" ) + if policy: + cmd += f" --container-policy {policy}'" logger.info(f"Executing command: {cmd}") try: diff --git a/pytest_tests/steps/s3_gate_bucket.py b/pytest_tests/steps/s3_gate_bucket.py index 8f05c53..cd0c92b 100644 --- a/pytest_tests/steps/s3_gate_bucket.py +++ b/pytest_tests/steps/s3_gate_bucket.py @@ -1,3 +1,4 @@ +import json import logging import uuid from enum import Enum @@ -234,3 +235,81 @@ def get_object_lock_configuration(s3_client, bucket: str): f'Error Message: {err.response["Error"]["Message"]}\n' f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' ) from err + + +def get_bucket_policy(s3_client, bucket: str): + params = {"Bucket": bucket} + try: + response = s3_client.get_bucket_policy(**params) + log_command_execution("S3 get_object_lock_configuration result", response) + return response.get("ObjectLockConfiguration") + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err + + +def put_bucket_policy(s3_client, bucket: str, policy: dict): + params = {"Bucket": bucket, "Policy": json.dumps(policy)} + try: + response = s3_client.put_bucket_policy(**params) + log_command_execution("S3 put_bucket_policy result", response) + return response + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err + + +def get_bucket_cors(s3_client, bucket: str): + params = {"Bucket": bucket} + try: + response = s3_client.get_bucket_cors(**params) + log_command_execution("S3 get_bucket_cors result", response) + return response.get("CORSRules") + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err + + +def get_bucket_location(s3_client, bucket: str): + params = {"Bucket": bucket} + try: + response = s3_client.get_bucket_location(**params) + log_command_execution("S3 get_bucket_location result", response) + return response.get("LocationConstraint") + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err + + +def put_bucket_cors(s3_client, bucket: str, cors_configuration: dict): + params = {"Bucket": bucket, "CORSConfiguration": cors_configuration} + try: + response = s3_client.put_bucket_cors(**params) + log_command_execution("S3 put_bucket_cors result", response) + return response + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err + + +def delete_bucket_cors(s3_client, bucket: str): + params = {"Bucket": bucket} + try: + response = s3_client.delete_bucket_cors(**params) + log_command_execution("S3 delete_bucket_cors result", response) + return response.get("ObjectLockConfiguration") + except ClientError as err: + raise Exception( + f'Error Message: {err.response["Error"]["Message"]}\n' + f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' + ) from err diff --git a/pytest_tests/testsuites/services/s3_gate/test_s3_policy.py b/pytest_tests/testsuites/services/s3_gate/test_s3_policy.py new file mode 100644 index 0000000..08e9c29 --- /dev/null +++ b/pytest_tests/testsuites/services/s3_gate/test_s3_policy.py @@ -0,0 +1,153 @@ +import os +import time +from datetime import datetime, timedelta +from random import choice +from string import ascii_letters +from typing import Tuple + +import allure +import pytest +from file_helper import generate_file, generate_file_with_content +from python_keywords.container import search_container_by_name +from python_keywords.storage_policy import get_simple_object_copies +from s3_helper import ( + assert_object_lock_mode, + check_objects_in_bucket, + object_key_from_file_path, + set_bucket_versioning, +) + +from steps import s3_gate_bucket, s3_gate_object +from steps.s3_gate_base import TestS3GateBase + + +def pytest_generate_tests(metafunc): + policy = f"{os.getcwd()}/robot/resources/files/policy.json" + if "s3_client" in metafunc.fixturenames: + metafunc.parametrize( + "s3_client", + [("aws cli", policy), ("boto3", policy)], + indirect=True, + ids=["aws cli", "boto3"], + ) + + +@pytest.mark.s3_gate +class TestS3GatePolicy(TestS3GateBase): + @allure.title("Test S3: Verify bucket creation with retention policy applied") + def test_s3_bucket_location(self, client_shell): + file_path_1 = generate_file() + file_name_1 = object_key_from_file_path(file_path_1) + file_path_2 = generate_file() + file_name_2 = object_key_from_file_path(file_path_2) + + with allure.step("Create two buckets with different bucket configuration"): + bucket_1 = s3_gate_bucket.create_bucket_s3( + self.s3_client, bucket_configuration="complex" + ) + set_bucket_versioning(self.s3_client, bucket_1, s3_gate_bucket.VersioningStatus.ENABLED) + bucket_2 = s3_gate_bucket.create_bucket_s3(self.s3_client, bucket_configuration="rep-3") + set_bucket_versioning(self.s3_client, bucket_2, s3_gate_bucket.VersioningStatus.ENABLED) + list_buckets = s3_gate_bucket.list_buckets_s3(self.s3_client) + assert ( + bucket_1 in list_buckets and bucket_2 in list_buckets + ), f"Expected two buckets {bucket_1, bucket_2}, got {list_buckets}" + + # with allure.step("Check head buckets"): + head_1 = s3_gate_bucket.head_bucket(self.s3_client, bucket_1) + head_2 = s3_gate_bucket.head_bucket(self.s3_client, bucket_2) + assert head_1 == {} or head_1.get("HEAD") == None, "Expected head is empty" + assert head_2 == {} or head_2.get("HEAD") == None, "Expected head is empty" + + with allure.step("Put objects into buckets"): + version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket_1, file_path_1) + version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket_2, file_path_2) + check_objects_in_bucket(self.s3_client, bucket_1, [file_name_1]) + check_objects_in_bucket(self.s3_client, bucket_2, [file_name_2]) + + with allure.step("Check bucket location"): + bucket_loc_1 = s3_gate_bucket.get_bucket_location(self.s3_client, bucket_1) + bucket_loc_2 = s3_gate_bucket.get_bucket_location(self.s3_client, bucket_2) + assert bucket_loc_1 == "complex" + assert bucket_loc_2 == "rep-3" + + with allure.step("Check object policy"): + cid_1 = search_container_by_name(self.wallet, bucket_1, shell=client_shell) + copies_1 = get_simple_object_copies( + wallet=self.wallet, cid=cid_1, oid=version_id_1, shell=client_shell + ) + assert copies_1 == 1 + cid_2 = search_container_by_name(self.wallet, bucket_2, shell=client_shell) + copies_2 = get_simple_object_copies( + wallet=self.wallet, cid=cid_2, oid=version_id_2, shell=client_shell + ) + assert copies_2 == 3 + + @allure.title("Test S3: bucket policy ") + def test_s3_bucket_policy(self): + with allure.step("Create bucket with default policy"): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client) + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + + with allure.step("GetBucketPolicy"): + s3_gate_bucket.get_bucket_policy(self.s3_client, bucket) + + with allure.step("Put new policy"): + custom_policy = f"file://{os.getcwd()}/robot/resources/files/bucket_policy.json" + custom_policy = { + "Version": "2008-10-17", + "Id": "aaaa-bbbb-cccc-dddd", + "Statement": [ + { + "Sid": "AddPerm", + "Effect": "Allow", + "Principal": {"AWS": "*"}, + "Action": ["s3:GetObject"], + "Resource": [f"arn:aws:s3:::{bucket}/*"], + } + ], + } + + s3_gate_bucket.put_bucket_policy(self.s3_client, bucket, custom_policy) + with allure.step("GetBucketPolicy"): + policy_1 = s3_gate_bucket.get_bucket_policy(self.s3_client, bucket) + print(policy_1) + + @allure.title("Test S3: bucket policy ") + def test_s3_cors(self): + with allure.step("Create bucket without cors"): + bucket = s3_gate_bucket.create_bucket_s3(self.s3_client) + set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED) + + with pytest.raises(Exception): + bucket_cors = s3_gate_bucket.get_bucket_cors(self.s3_client, bucket) + + with allure.step("Put bucket cors"): + cors = { + "CORSRules": [ + { + "AllowedOrigins": ["http://www.example.com"], + "AllowedHeaders": ["*"], + "AllowedMethods": ["PUT", "POST", "DELETE"], + "MaxAgeSeconds": 3000, + "ExposeHeaders": ["x-amz-server-side-encryption"], + }, + { + "AllowedOrigins": ["*"], + "AllowedHeaders": ["Authorization"], + "AllowedMethods": ["GET"], + "MaxAgeSeconds": 3000, + }, + ] + } + s3_gate_bucket.put_bucket_cors(self.s3_client, bucket, cors) + bucket_cors = s3_gate_bucket.get_bucket_cors(self.s3_client, bucket) + assert bucket_cors == cors.get( + "CORSRules" + ), f"Expected corsrules must be {cors.get('CORSRules')}" + + with allure.step("delete bucket cors"): + s3_gate_bucket.delete_bucket_cors(self.s3_client, bucket) + + with pytest.raises(Exception): + bucket_cors = s3_gate_bucket.get_bucket_cors(self.s3_client, bucket) diff --git a/robot/resources/files/policy.json b/robot/resources/files/policy.json new file mode 100644 index 0000000..11a6983 --- /dev/null +++ b/robot/resources/files/policy.json @@ -0,0 +1,4 @@ +{ + "rep-3": "REP 3", + "complex": "REP 1 IN X CBF 1 SELECT 1 FROM * AS X" +} \ No newline at end of file diff --git a/robot/resources/lib/python_keywords/container.py b/robot/resources/lib/python_keywords/container.py index 1e25b59..4b4a4cf 100644 --- a/robot/resources/lib/python_keywords/container.py +++ b/robot/resources/lib/python_keywords/container.py @@ -204,3 +204,13 @@ def _parse_cid(output: str) -> str: if len(splitted) != 2: raise ValueError(f"no CID was parsed from command output: \t{first_line}") return splitted[1] + + +@allure.step("Search container by name") +def search_container_by_name(wallet: str, name: str, shell: Shell): + list_cids = list_containers(wallet, shell) + for cid in list_cids: + cont_info = get_container(wallet, cid, shell, True) + if cont_info.get("attributes").get("Name", None) == name: + return cid + return None