[#312] Add new Locking test

Signed-off-by: Yulia Kovshova <y.kovshova@yadro.com>
This commit is contained in:
Юлия Ковшова 2022-10-27 19:42:05 +03:00 committed by Julia Kovshova
parent d21a89485b
commit c9e42a7a0a
5 changed files with 360 additions and 8 deletions

View file

@ -1,9 +1,12 @@
import datetime
import os import os
from datetime import datetime, timedelta
from typing import Optional from typing import Optional
import allure import allure
import s3_gate_bucket import s3_gate_bucket
import s3_gate_object import s3_gate_object
from dateutil.parser import parse
@allure.step("Expected all objects are presented in the bucket") @allure.step("Expected all objects are presented in the bucket")
@ -97,8 +100,9 @@ def assert_object_lock_mode(
bucket: str, bucket: str,
file_name: str, file_name: str,
object_lock_mode: str, object_lock_mode: str,
retain_untile_date, retain_untile_date: datetime,
legal_hold_status: str, legal_hold_status: str = "OFF",
retain_period: Optional[int] = None,
): ):
object_dict = s3_gate_object.get_object_s3(s3_client, bucket, file_name, full_output=True) object_dict = s3_gate_object.get_object_s3(s3_client, bucket, file_name, full_output=True)
assert ( assert (
@ -109,10 +113,17 @@ def assert_object_lock_mode(
), f"Expected Object Lock Legal Hold Status is {legal_hold_status}" ), f"Expected Object Lock Legal Hold Status is {legal_hold_status}"
object_retain_date = object_dict.get("ObjectLockRetainUntilDate") object_retain_date = object_dict.get("ObjectLockRetainUntilDate")
retain_date = ( retain_date = (
object_retain_date parse(object_retain_date) if isinstance(object_retain_date, str) else object_retain_date
if isinstance(object_retain_date, str)
else object_retain_date.strftime("%Y-%m-%dT%H:%M:%S")
) )
assert str(retain_untile_date.strftime("%Y-%m-%dT%H:%M:%S")) in str( if retain_untile_date:
retain_date assert retain_date.strftime("%Y-%m-%dT%H:%M:%S") == retain_untile_date.strftime(
"%Y-%m-%dT%H:%M:%S"
), f'Expected Object Lock Retain Until Date is {str(retain_untile_date.strftime("%Y-%m-%dT%H:%M:%S"))}' ), f'Expected Object Lock Retain Until Date is {str(retain_untile_date.strftime("%Y-%m-%dT%H:%M:%S"))}'
elif retain_period:
last_modify_date = object_dict.get("LastModified")
last_modify = (
parse(last_modify_date) if isinstance(last_modify_date, str) else last_modify_date
)
assert (
retain_date - last_modify + timedelta(seconds=1)
).days == retain_period, f"Expected retention period is {retain_period} days"

View file

@ -348,6 +348,35 @@ class AwsCliClient:
output = _cmd_run(cmd) output = _cmd_run(cmd)
return self._to_json(output) return self._to_json(output)
def put_object_retention(
self,
Bucket: str,
Key: str,
Retention: dict,
VersionId: Optional[str] = None,
BypassGovernanceRetention: Optional[bool] = None,
) -> dict:
version = f" --version-id {VersionId}" if VersionId else ""
cmd = (
f"aws {self.common_flags} s3api put-object-retention --bucket {Bucket} --key {Key} "
f"{version} --retention '{json.dumps(Retention, indent=4, sort_keys=True, default=str)}' --endpoint {S3_GATE}"
)
if not BypassGovernanceRetention is None:
cmd += " --bypass-governance-retention"
output = _cmd_run(cmd)
return self._to_json(output)
def put_object_legal_hold(
self, Bucket: str, Key: str, LegalHold: dict, VersionId: Optional[str] = None
) -> dict:
version = f" --version-id {VersionId}" if VersionId else ""
cmd = (
f"aws {self.common_flags} s3api put-object-legal-hold --bucket {Bucket} --key {Key} "
f"{version} --legal-hold '{json.dumps(LegalHold)}' --endpoint {S3_GATE}"
)
output = _cmd_run(cmd)
return self._to_json(output)
def put_object_tagging(self, Bucket: str, Key: str, Tagging: dict) -> dict: def put_object_tagging(self, Bucket: str, Key: str, Tagging: dict) -> dict:
cmd = ( cmd = (
f"aws {self.common_flags} s3api put-object-tagging --bucket {Bucket} --key {Key} " f"aws {self.common_flags} s3api put-object-tagging --bucket {Bucket} --key {Key} "
@ -483,6 +512,22 @@ class AwsCliClient:
output = _cmd_run(cmd) output = _cmd_run(cmd)
return self._to_json(output) return self._to_json(output)
def put_object_lock_configuration(self, Bucket, ObjectLockConfiguration):
cmd = (
f"aws {self.common_flags} s3api put-object-lock-configuration --bucket {Bucket} "
f"--object-lock-configuration '{json.dumps(ObjectLockConfiguration)}' --endpoint-url {S3_GATE}"
)
output = _cmd_run(cmd)
return self._to_json(output)
def get_object_lock_configuration(self, Bucket):
cmd = (
f"aws {self.common_flags} s3api get-object-lock-configuration --bucket {Bucket} "
f"--endpoint-url {S3_GATE}"
)
output = _cmd_run(cmd)
return self._to_json(output)
@staticmethod @staticmethod
def _to_json(output: str) -> dict: def _to_json(output: str) -> dict:
json_output = {} json_output = {}

View file

@ -206,3 +206,31 @@ def put_bucket_acl_s3(
f'Error Message: {err.response["Error"]["Message"]}\n' f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}' f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err ) from err
@allure.step("Put object lock configuration")
def put_object_lock_configuration(s3_client, bucket: str, configuration: dict):
params = {"Bucket": bucket, "ObjectLockConfiguration": configuration}
try:
response = s3_client.put_object_lock_configuration(**params)
log_command_execution("S3 put_object_lock_configuration result", response)
return response
except ClientError as err:
raise Exception(
f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err
@allure.step("Get object lock configuration")
def get_object_lock_configuration(s3_client, bucket: str):
params = {"Bucket": bucket}
try:
response = s3_client.get_object_lock_configuration(**params)
log_command_execution("S3 get_object_lock_configuration result", response)
return response.get("ObjectLockConfiguration")
except ClientError as err:
raise Exception(
f'Error Message: {err.response["Error"]["Message"]}\n'
f'Http status code: {err.response["ResponseMetadata"]["HTTPStatusCode"]}'
) from err

View file

@ -440,6 +440,43 @@ def complete_multipart_upload_s3(
) from err ) from err
@allure.step("Put object retention")
def put_object_retention(
s3_client,
bucket_name: str,
object_key: str,
retention: dict,
version_id: Optional[str] = None,
bypass_governance_retention: Optional[bool] = None,
):
try:
params = {"Bucket": bucket_name, "Key": object_key, "Retention": retention}
if version_id:
params.update({"VersionId": version_id})
if not bypass_governance_retention is None:
params.update({"BypassGovernanceRetention": bypass_governance_retention})
s3_client.put_object_retention(**params)
log_command_execution("S3 Put object retention ", str(retention))
except ClientError as err:
raise Exception(f"Got error during put object tagging: {err}") from err
@allure.step("Put object legal hold")
def put_object_legal_hold(
s3_client, bucket_name: str, object_key: str, legal_hold: str, version_id: Optional[str] = None
):
try:
params = {"Bucket": bucket_name, "Key": object_key, "LegalHold": {"Status": legal_hold}}
if version_id:
params.update({"VersionId": version_id})
s3_client.put_object_legal_hold(**params)
log_command_execution("S3 Put object legal hold ", str(legal_hold))
except ClientError as err:
raise Exception(f"Got error during put object tagging: {err}") from err
@allure.step("Put object tagging") @allure.step("Put object tagging")
def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list): def put_object_tagging(s3_client, bucket_name: str, object_key: str, tags: list):
try: try:

View file

@ -0,0 +1,231 @@
import os
import time
from datetime import datetime, timedelta
from random import choice
from string import ascii_letters
from typing import Tuple
import allure
import pytest
from file_helper import generate_file, generate_file_with_content
from s3_helper import assert_object_lock_mode, check_objects_in_bucket, object_key_from_file_path
from steps import s3_gate_bucket, s3_gate_object
from steps.s3_gate_base import TestS3GateBase
def pytest_generate_tests(metafunc):
if "s3_client" in metafunc.fixturenames:
metafunc.parametrize("s3_client", ["aws cli", "boto3"], indirect=True)
@pytest.mark.s3_gate
@pytest.mark.parametrize("version_id", [None, "second"])
class TestS3GateLocking(TestS3GateBase):
@allure.title("Test S3: Checking the operation of retention period & legal lock on the object")
def test_s3_object_locking(self, version_id):
file_path = generate_file()
file_name = object_key_from_file_path(file_path)
retention_period = 2
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True)
with allure.step("Put several versions of object into bucket"):
s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
file_name_1 = generate_file_with_content(file_path=file_path)
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
check_objects_in_bucket(self.s3_client, bucket, [file_name])
if version_id:
version_id = version_id_2
with allure.step(f"Put retention period {retention_period}min to object {file_name}"):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period)
retention = {
"Mode": "COMPLIANCE",
"RetainUntilDate": date_obj,
}
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id
)
assert_object_lock_mode(
self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "OFF"
)
with allure.step(f"Put legal hold to object {file_name}"):
s3_gate_object.put_object_legal_hold(
self.s3_client, bucket, file_name, "ON", version_id
)
assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "ON")
with allure.step(f"Fail with deleting object with legal hold and retention period"):
if version_id:
with pytest.raises(Exception):
# An error occurred (AccessDenied) when calling the DeleteObject operation (reached max retries: 0): Access Denied.
s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id)
with allure.step(f"Check retention period is no longer set on the uploaded object"):
time.sleep((retention_period + 1) * 60)
assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "ON")
with allure.step(f"Fail with deleting object with legal hold and retention period"):
if version_id:
with pytest.raises(Exception):
# An error occurred (AccessDenied) when calling the DeleteObject operation (reached max retries: 0): Access Denied.
s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id)
else:
s3_gate_object.delete_object_s3(self.s3_client, bucket, file_name, version_id)
@allure.title("Test S3: Checking the impossibility to change the retention mode COMPLIANCE")
def test_s3_mode_compliance(self, version_id):
file_path = generate_file()
file_name = object_key_from_file_path(file_path)
retention_period = 2
retention_period_1 = 1
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True)
with allure.step("Put object into bucket"):
obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
if version_id:
version_id = obj_version
check_objects_in_bucket(self.s3_client, bucket, [file_name])
with allure.step(f"Put retention period {retention_period}min to object {file_name}"):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period)
retention = {
"Mode": "COMPLIANCE",
"RetainUntilDate": date_obj,
}
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id
)
assert_object_lock_mode(
self.s3_client, bucket, file_name, "COMPLIANCE", date_obj, "OFF"
)
with allure.step(
f"Try to change retention period {retention_period_1}min to object {file_name}"
):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1)
retention = {
"Mode": "COMPLIANCE",
"RetainUntilDate": date_obj,
}
with pytest.raises(Exception):
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id
)
@allure.title("Test S3: Checking the ability to change retention mode GOVERNANCE")
def test_s3_mode_governance(self, version_id):
file_path = generate_file()
file_name = object_key_from_file_path(file_path)
retention_period = 3
retention_period_1 = 2
retention_period_2 = 5
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True)
with allure.step("Put object into bucket"):
obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
if version_id:
version_id = obj_version
check_objects_in_bucket(self.s3_client, bucket, [file_name])
with allure.step(f"Put retention period {retention_period}min to object {file_name}"):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period)
retention = {
"Mode": "GOVERNANCE",
"RetainUntilDate": date_obj,
}
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id
)
assert_object_lock_mode(
self.s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF"
)
with allure.step(
f"Try to change retention period {retention_period_1}min to object {file_name}"
):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1)
retention = {
"Mode": "GOVERNANCE",
"RetainUntilDate": date_obj,
}
with pytest.raises(Exception):
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id
)
with allure.step(
f"Try to change retention period {retention_period_1}min to object {file_name}"
):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period_1)
retention = {
"Mode": "GOVERNANCE",
"RetainUntilDate": date_obj,
}
with pytest.raises(Exception):
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id
)
with allure.step(f"Put new retention period {retention_period_2}min to object {file_name}"):
date_obj = datetime.utcnow() + timedelta(minutes=retention_period_2)
retention = {
"Mode": "GOVERNANCE",
"RetainUntilDate": date_obj,
}
s3_gate_object.put_object_retention(
self.s3_client, bucket, file_name, retention, version_id, True
)
assert_object_lock_mode(
self.s3_client, bucket, file_name, "GOVERNANCE", date_obj, "OFF"
)
@allure.title("Test S3: Checking if an Object Cannot Be Locked")
def test_s3_legal_hold(self, version_id):
file_path = generate_file()
file_name = object_key_from_file_path(file_path)
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, False)
with allure.step("Put object into bucket"):
obj_version = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
if version_id:
version_id = obj_version
check_objects_in_bucket(self.s3_client, bucket, [file_name])
with allure.step(f"Put legal hold to object {file_name}"):
with pytest.raises(Exception):
s3_gate_object.put_object_legal_hold(
self.s3_client, bucket, file_name, "ON", version_id
)
@pytest.mark.s3_gate
class TestS3GateLockingBucket(TestS3GateBase):
@allure.title("Test S3: Bucket Lock")
def test_s3_bucket_lock(self):
file_path = generate_file()
file_name = object_key_from_file_path(file_path)
configuration = {"Rule": {"DefaultRetention": {"Mode": "COMPLIANCE", "Days": 1}}}
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client, True)
with allure.step("PutObjectLockConfiguration with ObjectLockEnabled=False"):
s3_gate_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration)
with allure.step("PutObjectLockConfiguration with ObjectLockEnabled=True"):
configuration["ObjectLockEnabled"] = "Enabled"
s3_gate_bucket.put_object_lock_configuration(self.s3_client, bucket, configuration)
with allure.step("GetObjectLockConfiguration"):
config = s3_gate_bucket.get_object_lock_configuration(self.s3_client, bucket)
configuration["Rule"]["DefaultRetention"]["Years"] = 0
assert config == configuration, f"Configurations must be equal {configuration}"
with allure.step("Put object into bucket"):
s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
assert_object_lock_mode(self.s3_client, bucket, file_name, "COMPLIANCE", None, "OFF", 1)