[#350] Move file-related functions to file_helper
Signed-off-by: Vladimir Domnich <v.domnich@yadro.com>
This commit is contained in:
parent
ce41104d3a
commit
5eeb8b4058
6 changed files with 153 additions and 93 deletions
|
@ -2,7 +2,7 @@ import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
|
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE
|
||||||
|
@ -27,6 +27,38 @@ def generate_file(size: int = SIMPLE_OBJ_SIZE) -> str:
|
||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
|
def generate_file_with_content(
|
||||||
|
file_path: Optional[str] = None,
|
||||||
|
content: Optional[str] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Creates a new file with specified content.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to the file that should be created. If not specified, then random file
|
||||||
|
path will be generated.
|
||||||
|
content: Content that should be stored in the file. If not specified, then random binary
|
||||||
|
content will be generated.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated file.
|
||||||
|
"""
|
||||||
|
mode = "w+"
|
||||||
|
if content is None:
|
||||||
|
content = os.urandom(SIMPLE_OBJ_SIZE)
|
||||||
|
mode = "wb"
|
||||||
|
|
||||||
|
if not file_path:
|
||||||
|
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
|
||||||
|
else:
|
||||||
|
if not os.path.exists(os.path.dirname(file_path)):
|
||||||
|
os.makedirs(os.path.dirname(file_path))
|
||||||
|
|
||||||
|
with open(file_path, mode) as file:
|
||||||
|
file.write(content)
|
||||||
|
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
@allure.step("Get File Hash")
|
@allure.step("Get File Hash")
|
||||||
def get_file_hash(file_path: str, len: Optional[int] = None) -> str:
|
def get_file_hash(file_path: str, len: Optional[int] = None) -> str:
|
||||||
"""Generates hash for the specified file.
|
"""Generates hash for the specified file.
|
||||||
|
@ -65,3 +97,54 @@ def concat_files(file_paths: list, resulting_file_path: Optional[str] = None) ->
|
||||||
with open(file, "rb") as part_file:
|
with open(file, "rb") as part_file:
|
||||||
f.write(part_file.read())
|
f.write(part_file.read())
|
||||||
return resulting_file_path
|
return resulting_file_path
|
||||||
|
|
||||||
|
|
||||||
|
def split_file(file_path: str, parts: int) -> list[str]:
|
||||||
|
"""Splits specified file into several specified number of parts.
|
||||||
|
|
||||||
|
Each part is saved under name `{original_file}_part_{i}`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to the file that should be split.
|
||||||
|
parts: Number of parts the file should be split into.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Paths to the part files.
|
||||||
|
"""
|
||||||
|
with open(file_path, "rb") as file:
|
||||||
|
content = file.read()
|
||||||
|
|
||||||
|
content_size = len(content)
|
||||||
|
chunk_size = int((content_size + parts) / parts)
|
||||||
|
|
||||||
|
part_id = 1
|
||||||
|
part_file_paths = []
|
||||||
|
for content_offset in range(0, content_size + 1, chunk_size):
|
||||||
|
part_file_name = f"{file_path}_part_{part_id}"
|
||||||
|
part_file_paths.append(part_file_name)
|
||||||
|
with open(part_file_name, "wb") as out_file:
|
||||||
|
out_file.write(content[content_offset : content_offset + chunk_size])
|
||||||
|
part_id += 1
|
||||||
|
|
||||||
|
return part_file_paths
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_content(file_path: str, content_len: Optional[int] = None, mode: str = "r") -> Any:
|
||||||
|
"""Returns content of specified file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to the file.
|
||||||
|
content_len: Limit of content length. If None, then entire file content is returned;
|
||||||
|
otherwise only the first content_len bytes of the content are returned.
|
||||||
|
mode: Mode of opening the file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Content of the specified file.
|
||||||
|
"""
|
||||||
|
with open(file_path, mode) as file:
|
||||||
|
if content_len:
|
||||||
|
content = file.read(content_len)
|
||||||
|
else:
|
||||||
|
content = file.read()
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
|
@ -1,64 +1,33 @@
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
import uuid
|
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
from common import ASSETS_DIR, SIMPLE_OBJ_SIZE, STORAGE_GC_TIME
|
from common import STORAGE_GC_TIME
|
||||||
|
|
||||||
|
|
||||||
def create_file_with_content(file_path: str = None, content: str = None) -> str:
|
|
||||||
mode = "w+"
|
|
||||||
if not content:
|
|
||||||
content = os.urandom(SIMPLE_OBJ_SIZE)
|
|
||||||
mode = "wb"
|
|
||||||
|
|
||||||
if not file_path:
|
|
||||||
file_path = f"{os.getcwd()}/{ASSETS_DIR}/{str(uuid.uuid4())}"
|
|
||||||
else:
|
|
||||||
if not os.path.exists(os.path.dirname(file_path)):
|
|
||||||
os.makedirs(os.path.dirname(file_path))
|
|
||||||
|
|
||||||
with open(file_path, mode) as out_file:
|
|
||||||
out_file.write(content)
|
|
||||||
|
|
||||||
return file_path
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_content(file_path: str, content_len: int = None, mode="r") -> str:
|
|
||||||
with open(file_path, mode) as out_file:
|
|
||||||
if content_len:
|
|
||||||
content = out_file.read(content_len)
|
|
||||||
else:
|
|
||||||
content = out_file.read()
|
|
||||||
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
def split_file(file_path: str, parts: int) -> list[str]:
|
|
||||||
files = []
|
|
||||||
with open(file_path, "rb") as in_file:
|
|
||||||
data = in_file.read()
|
|
||||||
|
|
||||||
content_size = len(data)
|
|
||||||
|
|
||||||
chunk_size = int((content_size + parts) / parts)
|
|
||||||
part_id = 1
|
|
||||||
for start_position in range(0, content_size + 1, chunk_size):
|
|
||||||
part_file_name = f"{file_path}_part_{part_id}"
|
|
||||||
files.append(part_file_name)
|
|
||||||
with open(part_file_name, "wb") as out_file:
|
|
||||||
out_file.write(data[start_position : start_position + chunk_size])
|
|
||||||
part_id += 1
|
|
||||||
|
|
||||||
return files
|
|
||||||
|
|
||||||
|
|
||||||
def parse_time(value: str) -> int:
|
def parse_time(value: str) -> int:
|
||||||
if value.endswith("s"):
|
"""Converts time interval in text form into time interval as number of seconds.
|
||||||
return int(value[:-1])
|
|
||||||
|
|
||||||
if value.endswith("m"):
|
Args:
|
||||||
return int(value[:-1]) * 60
|
value: time interval as text.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of seconds in the parsed time interval.
|
||||||
|
"""
|
||||||
|
value = value.lower()
|
||||||
|
|
||||||
|
for suffix in ["s", "sec"]:
|
||||||
|
if value.endswith(suffix):
|
||||||
|
return int(value[: -len(suffix)])
|
||||||
|
|
||||||
|
for suffix in ["m", "min"]:
|
||||||
|
if value.endswith(suffix):
|
||||||
|
return int(value[: -len(suffix)]) * 60
|
||||||
|
|
||||||
|
for suffix in ["h", "hr", "hour"]:
|
||||||
|
if value.endswith(suffix):
|
||||||
|
return int(value[: -len(suffix)]) * 60 * 60
|
||||||
|
|
||||||
|
raise ValueError(f"Unknown units in time value '{value}'")
|
||||||
|
|
||||||
|
|
||||||
def placement_policy_from_container(container_info: str) -> str:
|
def placement_policy_from_container(container_info: str) -> str:
|
||||||
|
@ -88,7 +57,6 @@ def placement_policy_from_container(container_info: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def wait_for_gc_pass_on_storage_nodes() -> None:
|
def wait_for_gc_pass_on_storage_nodes() -> None:
|
||||||
# We add 15 seconds to allow some time for GC process itself
|
|
||||||
wait_time = parse_time(STORAGE_GC_TIME)
|
wait_time = parse_time(STORAGE_GC_TIME)
|
||||||
with allure.step(f"Wait {wait_time}s until GC completes on storage nodes"):
|
with allure.step(f"Wait {wait_time}s until GC completes on storage nodes"):
|
||||||
time.sleep(wait_time)
|
time.sleep(wait_time)
|
||||||
|
|
|
@ -6,7 +6,7 @@ import pytest
|
||||||
from common import COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
|
from common import COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
|
||||||
from container import create_container
|
from container import create_container
|
||||||
from epoch import get_epoch, tick_epoch
|
from epoch import get_epoch, tick_epoch
|
||||||
from file_helper import generate_file, get_file_hash
|
from file_helper import generate_file, get_file_content, get_file_hash
|
||||||
from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status
|
from grpc_responses import OBJECT_ALREADY_REMOVED, OBJECT_NOT_FOUND, error_matches_status
|
||||||
from python_keywords.neofs_verbs import (
|
from python_keywords.neofs_verbs import (
|
||||||
delete_object,
|
delete_object,
|
||||||
|
@ -19,7 +19,7 @@ from python_keywords.neofs_verbs import (
|
||||||
)
|
)
|
||||||
from python_keywords.storage_policy import get_complex_object_copies, get_simple_object_copies
|
from python_keywords.storage_policy import get_complex_object_copies, get_simple_object_copies
|
||||||
from tombstone import verify_head_tombstone
|
from tombstone import verify_head_tombstone
|
||||||
from utility import get_file_content, wait_for_gc_pass_on_storage_nodes
|
from utility import wait_for_gc_pass_on_storage_nodes
|
||||||
|
|
||||||
logger = logging.getLogger("NeoLogger")
|
logger = logging.getLogger("NeoLogger")
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,13 @@ import allure
|
||||||
import pytest
|
import pytest
|
||||||
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
|
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, SIMPLE_OBJ_SIZE
|
||||||
from epoch import tick_epoch
|
from epoch import tick_epoch
|
||||||
from file_helper import generate_file, get_file_hash
|
from file_helper import (
|
||||||
|
generate_file,
|
||||||
|
generate_file_with_content,
|
||||||
|
get_file_content,
|
||||||
|
get_file_hash,
|
||||||
|
split_file,
|
||||||
|
)
|
||||||
from s3_helper import (
|
from s3_helper import (
|
||||||
check_objects_in_bucket,
|
check_objects_in_bucket,
|
||||||
check_tags_by_bucket,
|
check_tags_by_bucket,
|
||||||
|
@ -14,7 +20,6 @@ from s3_helper import (
|
||||||
set_bucket_versioning,
|
set_bucket_versioning,
|
||||||
try_to_get_objects_and_expect_error,
|
try_to_get_objects_and_expect_error,
|
||||||
)
|
)
|
||||||
from utility import create_file_with_content, get_file_content, split_file
|
|
||||||
|
|
||||||
from steps import s3_gate_bucket, s3_gate_object
|
from steps import s3_gate_bucket, s3_gate_object
|
||||||
from steps.aws_cli_client import AwsCliClient
|
from steps.aws_cli_client import AwsCliClient
|
||||||
|
@ -171,8 +176,8 @@ class TestS3Gate(TestS3GateBase):
|
||||||
if not isinstance(self.s3_client, AwsCliClient):
|
if not isinstance(self.s3_client, AwsCliClient):
|
||||||
pytest.skip("This test is not supported with boto3 client")
|
pytest.skip("This test is not supported with boto3 client")
|
||||||
|
|
||||||
create_file_with_content(file_path=file_path_1)
|
generate_file_with_content(file_path=file_path_1)
|
||||||
create_file_with_content(file_path=file_path_2)
|
generate_file_with_content(file_path=file_path_2)
|
||||||
|
|
||||||
self.s3_client.sync(bucket_name=bucket, dir_path=os.path.dirname(file_path_1))
|
self.s3_client.sync(bucket_name=bucket, dir_path=os.path.dirname(file_path_1))
|
||||||
|
|
||||||
|
@ -182,7 +187,7 @@ class TestS3Gate(TestS3GateBase):
|
||||||
with allure.step("Check these are the same objects"):
|
with allure.step("Check these are the same objects"):
|
||||||
assert set(key_to_path.keys()) == set(
|
assert set(key_to_path.keys()) == set(
|
||||||
objects
|
objects
|
||||||
), f"Expected all abjects saved. Got {objects}"
|
), f"Expected all objects saved. Got {objects}"
|
||||||
for obj_key in objects:
|
for obj_key in objects:
|
||||||
got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key)
|
got_object = s3_gate_object.get_object_s3(self.s3_client, bucket, obj_key)
|
||||||
assert get_file_hash(got_object) == get_file_hash(
|
assert get_file_hash(got_object) == get_file_hash(
|
||||||
|
@ -196,13 +201,13 @@ class TestS3Gate(TestS3GateBase):
|
||||||
"""
|
"""
|
||||||
version_1_content = "Version 1"
|
version_1_content = "Version 1"
|
||||||
version_2_content = "Version 2"
|
version_2_content = "Version 2"
|
||||||
file_name_simple = create_file_with_content(content=version_1_content)
|
file_name_simple = generate_file_with_content(content=version_1_content)
|
||||||
obj_key = os.path.basename(file_name_simple)
|
obj_key = os.path.basename(file_name_simple)
|
||||||
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
||||||
|
|
||||||
with allure.step("Put several versions of object into bucket"):
|
with allure.step("Put several versions of object into bucket"):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
||||||
create_file_with_content(file_path=file_name_simple, content=version_2_content)
|
generate_file_with_content(file_path=file_name_simple, content=version_2_content)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
||||||
|
|
||||||
with allure.step("Check bucket shows all versions"):
|
with allure.step("Check bucket shows all versions"):
|
||||||
|
|
|
@ -7,10 +7,9 @@ import allure
|
||||||
import pytest
|
import pytest
|
||||||
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, FREE_STORAGE, SIMPLE_OBJ_SIZE
|
from common import ASSETS_DIR, COMPLEX_OBJ_SIZE, FREE_STORAGE, SIMPLE_OBJ_SIZE
|
||||||
from data_formatters import get_wallet_public_key
|
from data_formatters import get_wallet_public_key
|
||||||
from file_helper import concat_files, generate_file, get_file_hash
|
from file_helper import concat_files, generate_file, generate_file_with_content, get_file_hash
|
||||||
from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas
|
from python_keywords.payment_neogo import neofs_deposit, transfer_mainnet_gas
|
||||||
from s3_helper import check_objects_in_bucket, set_bucket_versioning
|
from s3_helper import check_objects_in_bucket, set_bucket_versioning
|
||||||
from utility import create_file_with_content
|
|
||||||
from wallet import init_wallet
|
from wallet import init_wallet
|
||||||
|
|
||||||
from steps import s3_gate_bucket, s3_gate_object
|
from steps import s3_gate_bucket, s3_gate_object
|
||||||
|
@ -79,7 +78,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
@allure.title("Test S3: Copy version of object")
|
@allure.title("Test S3: Copy version of object")
|
||||||
def test_s3_copy_version_object(self):
|
def test_s3_copy_version_object(self):
|
||||||
version_1_content = "Version 1"
|
version_1_content = "Version 1"
|
||||||
file_name_simple = create_file_with_content(content=version_1_content)
|
file_name_simple = generate_file_with_content(content=version_1_content)
|
||||||
obj_key = os.path.basename(file_name_simple)
|
obj_key = os.path.basename(file_name_simple)
|
||||||
|
|
||||||
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
||||||
|
@ -116,7 +115,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
@allure.title("Test S3: Checking copy with acl")
|
@allure.title("Test S3: Checking copy with acl")
|
||||||
def test_s3_copy_acl(self):
|
def test_s3_copy_acl(self):
|
||||||
version_1_content = "Version 1"
|
version_1_content = "Version 1"
|
||||||
file_name_simple = create_file_with_content(content=version_1_content)
|
file_name_simple = generate_file_with_content(content=version_1_content)
|
||||||
obj_key = os.path.basename(file_name_simple)
|
obj_key = os.path.basename(file_name_simple)
|
||||||
|
|
||||||
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
bucket_1 = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
||||||
|
@ -246,7 +245,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
def test_s3_delete_versioning(self):
|
def test_s3_delete_versioning(self):
|
||||||
version_1_content = "Version 1"
|
version_1_content = "Version 1"
|
||||||
version_2_content = "Version 2"
|
version_2_content = "Version 2"
|
||||||
file_name_simple = create_file_with_content(content=version_1_content)
|
file_name_simple = generate_file_with_content(content=version_1_content)
|
||||||
|
|
||||||
obj_key = os.path.basename(file_name_simple)
|
obj_key = os.path.basename(file_name_simple)
|
||||||
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
||||||
|
@ -254,7 +253,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
|
|
||||||
with allure.step("Put several versions of object into bucket"):
|
with allure.step("Put several versions of object into bucket"):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
||||||
file_name_1 = create_file_with_content(
|
file_name_1 = generate_file_with_content(
|
||||||
file_path=file_name_simple, content=version_2_content
|
file_path=file_name_simple, content=version_2_content
|
||||||
)
|
)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
||||||
|
@ -308,7 +307,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
version_2_content = "Version 2"
|
version_2_content = "Version 2"
|
||||||
version_3_content = "Version 3"
|
version_3_content = "Version 3"
|
||||||
version_4_content = "Version 4"
|
version_4_content = "Version 4"
|
||||||
file_name_1 = create_file_with_content(content=version_1_content)
|
file_name_1 = generate_file_with_content(content=version_1_content)
|
||||||
|
|
||||||
obj_key = os.path.basename(file_name_1)
|
obj_key = os.path.basename(file_name_1)
|
||||||
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
||||||
|
@ -316,11 +315,17 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
|
|
||||||
with allure.step("Put several versions of object into bucket"):
|
with allure.step("Put several versions of object into bucket"):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
||||||
file_name_2 = create_file_with_content(file_path=file_name_1, content=version_2_content)
|
file_name_2 = generate_file_with_content(
|
||||||
|
file_path=file_name_1, content=version_2_content
|
||||||
|
)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_2)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_2)
|
||||||
file_name_3 = create_file_with_content(file_path=file_name_1, content=version_3_content)
|
file_name_3 = generate_file_with_content(
|
||||||
|
file_path=file_name_1, content=version_3_content
|
||||||
|
)
|
||||||
version_id_3 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_3)
|
version_id_3 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_3)
|
||||||
file_name_4 = create_file_with_content(file_path=file_name_1, content=version_4_content)
|
file_name_4 = generate_file_with_content(
|
||||||
|
file_path=file_name_1, content=version_4_content
|
||||||
|
)
|
||||||
version_id_4 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_4)
|
version_id_4 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_4)
|
||||||
version_ids = {version_id_1, version_id_2, version_id_3, version_id_4}
|
version_ids = {version_id_1, version_id_2, version_id_3, version_id_4}
|
||||||
|
|
||||||
|
@ -352,14 +357,14 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
def test_s3_get_versioning(self):
|
def test_s3_get_versioning(self):
|
||||||
version_1_content = "Version 1"
|
version_1_content = "Version 1"
|
||||||
version_2_content = "Version 2"
|
version_2_content = "Version 2"
|
||||||
file_name_simple = create_file_with_content(content=version_1_content)
|
file_name_simple = generate_file_with_content(content=version_1_content)
|
||||||
|
|
||||||
obj_key = os.path.basename(file_name_simple)
|
obj_key = os.path.basename(file_name_simple)
|
||||||
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
||||||
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
||||||
with allure.step("Put several versions of object into bucket"):
|
with allure.step("Put several versions of object into bucket"):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_simple)
|
||||||
file_name_1 = create_file_with_content(
|
file_name_1 = generate_file_with_content(
|
||||||
file_path=file_name_simple, content=version_2_content
|
file_path=file_name_simple, content=version_2_content
|
||||||
)
|
)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
||||||
|
@ -397,7 +402,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
||||||
with allure.step("Put several versions of object into bucket"):
|
with allure.step("Put several versions of object into bucket"):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
|
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
|
||||||
file_name_1 = create_file_with_content(file_path=file_path)
|
file_name_1 = generate_file_with_content(file_path=file_path)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
||||||
|
|
||||||
with allure.step("Get first version of object"):
|
with allure.step("Get first version of object"):
|
||||||
|
@ -479,7 +484,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(
|
version_id_1 = s3_gate_object.put_object_s3(
|
||||||
self.s3_client, bucket, file_path, Metadata=object_metadata
|
self.s3_client, bucket, file_path, Metadata=object_metadata
|
||||||
)
|
)
|
||||||
file_name_1 = create_file_with_content(file_path=file_path)
|
file_name_1 = generate_file_with_content(file_path=file_path)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
||||||
|
|
||||||
with allure.step("Get head of first version of object"):
|
with allure.step("Get head of first version of object"):
|
||||||
|
@ -573,7 +578,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
], "Tags must be the same"
|
], "Tags must be the same"
|
||||||
|
|
||||||
with allure.step("Rewrite file into bucket"):
|
with allure.step("Rewrite file into bucket"):
|
||||||
file_path_2 = create_file_with_content(file_path=file_path_1)
|
file_path_2 = generate_file_with_content(file_path=file_path_1)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client, bucket, file_path_2, Metadata=object_2_metadata, Tagging=tag_2
|
self.s3_client, bucket, file_path_2, Metadata=object_2_metadata, Tagging=tag_2
|
||||||
)
|
)
|
||||||
|
@ -608,7 +613,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
], "Tags must be the same"
|
], "Tags must be the same"
|
||||||
|
|
||||||
with allure.step("Put new version of file into bucket"):
|
with allure.step("Put new version of file into bucket"):
|
||||||
file_path_4 = create_file_with_content(file_path=file_path_3)
|
file_path_4 = generate_file_with_content(file_path=file_path_3)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_4)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_4)
|
||||||
versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket)
|
versions = s3_gate_object.list_objects_versions_s3(self.s3_client, bucket)
|
||||||
obj_versions = {
|
obj_versions = {
|
||||||
|
@ -685,7 +690,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
assert get_file_hash(file_path_1) == get_file_hash(object_1), "Hashes must be the same"
|
assert get_file_hash(file_path_1) == get_file_hash(object_1), "Hashes must be the same"
|
||||||
|
|
||||||
with allure.step("Put object with acl public-read"):
|
with allure.step("Put object with acl public-read"):
|
||||||
file_path_2 = create_file_with_content(file_path=file_path_1)
|
file_path_2 = generate_file_with_content(file_path=file_path_1)
|
||||||
s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2, ACL="public-read")
|
s3_gate_object.put_object_s3(self.s3_client, bucket, file_path_2, ACL="public-read")
|
||||||
obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name)
|
obj_acl = s3_gate_object.get_object_acl_s3(self.s3_client, bucket, file_name)
|
||||||
obj_permission = [permission.get("Permission") for permission in obj_acl]
|
obj_permission = [permission.get("Permission") for permission in obj_acl]
|
||||||
|
@ -697,7 +702,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
assert get_file_hash(file_path_2) == get_file_hash(object_2), "Hashes must be the same"
|
assert get_file_hash(file_path_2) == get_file_hash(object_2), "Hashes must be the same"
|
||||||
|
|
||||||
with allure.step("Put object with acl public-read-write"):
|
with allure.step("Put object with acl public-read-write"):
|
||||||
file_path_3 = create_file_with_content(file_path=file_path_1)
|
file_path_3 = generate_file_with_content(file_path=file_path_1)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client, bucket, file_path_3, ACL="public-read-write"
|
self.s3_client, bucket, file_path_3, ACL="public-read-write"
|
||||||
)
|
)
|
||||||
|
@ -711,7 +716,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
assert get_file_hash(file_path_3) == get_file_hash(object_3), "Hashes must be the same"
|
assert get_file_hash(file_path_3) == get_file_hash(object_3), "Hashes must be the same"
|
||||||
|
|
||||||
with allure.step("Put object with acl authenticated-read"):
|
with allure.step("Put object with acl authenticated-read"):
|
||||||
file_path_4 = create_file_with_content(file_path=file_path_1)
|
file_path_4 = generate_file_with_content(file_path=file_path_1)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client, bucket, file_path_4, ACL="authenticated-read"
|
self.s3_client, bucket, file_path_4, ACL="authenticated-read"
|
||||||
)
|
)
|
||||||
|
@ -729,7 +734,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
file_name_5 = self.object_key_from_file_path(file_path_5)
|
file_name_5 = self.object_key_from_file_path(file_path_5)
|
||||||
|
|
||||||
with allure.step("Put object with --grant-full-control id=mycanonicaluserid"):
|
with allure.step("Put object with --grant-full-control id=mycanonicaluserid"):
|
||||||
file_path_6 = create_file_with_content(file_path=file_path_5)
|
file_path_6 = generate_file_with_content(file_path=file_path_5)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client,
|
self.s3_client,
|
||||||
bucket,
|
bucket,
|
||||||
|
@ -748,7 +753,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
with allure.step(
|
with allure.step(
|
||||||
"Put object with --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers"
|
"Put object with --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers"
|
||||||
):
|
):
|
||||||
file_path_7 = create_file_with_content(file_path=file_path_5)
|
file_path_7 = generate_file_with_content(file_path=file_path_5)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client,
|
self.s3_client,
|
||||||
bucket,
|
bucket,
|
||||||
|
@ -801,7 +806,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
"Put new version of object with [--object-lock-mode COMPLIANCE] и [--object-lock-retain-until-date +3days]"
|
"Put new version of object with [--object-lock-mode COMPLIANCE] и [--object-lock-retain-until-date +3days]"
|
||||||
):
|
):
|
||||||
date_obj = datetime.utcnow() + timedelta(days=2)
|
date_obj = datetime.utcnow() + timedelta(days=2)
|
||||||
file_name_1 = create_file_with_content(file_path=file_path_1)
|
file_name_1 = generate_file_with_content(file_path=file_path_1)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client,
|
self.s3_client,
|
||||||
bucket,
|
bucket,
|
||||||
|
@ -826,7 +831,7 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
"Put new version of object with [--object-lock-mode COMPLIANCE] и [--object-lock-retain-until-date +2days]"
|
"Put new version of object with [--object-lock-mode COMPLIANCE] и [--object-lock-retain-until-date +2days]"
|
||||||
):
|
):
|
||||||
date_obj = datetime.utcnow() + timedelta(days=3)
|
date_obj = datetime.utcnow() + timedelta(days=3)
|
||||||
file_name_1 = create_file_with_content(file_path=file_path_1)
|
file_name_1 = generate_file_with_content(file_path=file_path_1)
|
||||||
s3_gate_object.put_object_s3(
|
s3_gate_object.put_object_s3(
|
||||||
self.s3_client,
|
self.s3_client,
|
||||||
bucket,
|
bucket,
|
||||||
|
@ -884,8 +889,8 @@ class TestS3GateObject(TestS3GateBase):
|
||||||
if not isinstance(self.s3_client, AwsCliClient):
|
if not isinstance(self.s3_client, AwsCliClient):
|
||||||
pytest.skip("This test is not supported with boto3 client")
|
pytest.skip("This test is not supported with boto3 client")
|
||||||
|
|
||||||
create_file_with_content(file_path=file_path_1)
|
generate_file_with_content(file_path=file_path_1)
|
||||||
create_file_with_content(file_path=file_path_2)
|
generate_file_with_content(file_path=file_path_2)
|
||||||
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
bucket = s3_gate_bucket.create_bucket_s3(self.s3_client)
|
||||||
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
set_bucket_versioning(self.s3_client, bucket, s3_gate_bucket.VersioningStatus.ENABLED)
|
||||||
if sync_type == "sync":
|
if sync_type == "sync":
|
||||||
|
|
|
@ -2,9 +2,8 @@ import os
|
||||||
|
|
||||||
import allure
|
import allure
|
||||||
import pytest
|
import pytest
|
||||||
from file_helper import generate_file
|
from file_helper import generate_file, generate_file_with_content
|
||||||
from s3_helper import set_bucket_versioning
|
from s3_helper import set_bucket_versioning
|
||||||
from utility import create_file_with_content
|
|
||||||
|
|
||||||
from steps import s3_gate_bucket, s3_gate_object
|
from steps import s3_gate_bucket, s3_gate_object
|
||||||
from steps.s3_gate_base import TestS3GateBase
|
from steps.s3_gate_base import TestS3GateBase
|
||||||
|
@ -54,7 +53,7 @@ class TestS3GateVersioning(TestS3GateBase):
|
||||||
|
|
||||||
with allure.step("Put several versions of object into bucket"):
|
with allure.step("Put several versions of object into bucket"):
|
||||||
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
|
version_id_1 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_path)
|
||||||
file_name_1 = create_file_with_content(file_path=file_path)
|
file_name_1 = generate_file_with_content(file_path=file_path)
|
||||||
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
version_id_2 = s3_gate_object.put_object_s3(self.s3_client, bucket, file_name_1)
|
||||||
|
|
||||||
with allure.step("Check bucket shows all versions"):
|
with allure.step("Check bucket shows all versions"):
|
||||||
|
|
Loading…
Reference in a new issue