mirror of
https://github.com/ceph/s3-tests.git
synced 2024-11-21 23:29:47 +00:00
Merge branch 'master' into using_get_bucket_name
This commit is contained in:
commit
fa0ea9afe0
10 changed files with 244 additions and 57 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -10,5 +10,6 @@
|
||||||
|
|
||||||
/*.egg-info
|
/*.egg-info
|
||||||
/virtualenv
|
/virtualenv
|
||||||
|
/venv
|
||||||
|
|
||||||
config.yaml
|
config.yaml
|
||||||
|
|
|
@ -52,6 +52,15 @@ You can run only the boto3 tests with::
|
||||||
|
|
||||||
This section contains some basic tests for the AssumeRole, GetSessionToken and AssumeRoleWithWebIdentity API's. The test file is located under ``s3tests_boto3/functional``.
|
This section contains some basic tests for the AssumeRole, GetSessionToken and AssumeRoleWithWebIdentity API's. The test file is located under ``s3tests_boto3/functional``.
|
||||||
|
|
||||||
|
To run the STS tests, the vstart cluster should be started with the following parameter (in addition to any parameters already used with it)::
|
||||||
|
|
||||||
|
vstart.sh -o rgw_sts_key=abcdefghijklmnop -o rgw_s3_auth_use_sts=true
|
||||||
|
|
||||||
|
Note that the ``rgw_sts_key`` can be set to anything that is 128 bits in length.
|
||||||
|
After the cluster is up the following command should be executed::
|
||||||
|
|
||||||
|
radosgw-admin caps add --tenant=testx --uid="9876543210abcdef0123456789abcdef0123456789abcdef0123456789abcdef" --caps="roles=*"
|
||||||
|
|
||||||
You can run only the sts tests (all the three API's) with::
|
You can run only the sts tests (all the three API's) with::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_sts.py
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_sts.py
|
||||||
|
|
|
@ -12,3 +12,4 @@ pytz >=2011k
|
||||||
httplib2
|
httplib2
|
||||||
lxml
|
lxml
|
||||||
pytest
|
pytest
|
||||||
|
tox
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import sys
|
import sys
|
||||||
import collections
|
from collections.abc import Container
|
||||||
import pytest
|
import pytest
|
||||||
import string
|
import string
|
||||||
import random
|
import random
|
||||||
|
@ -159,9 +159,9 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail=
|
||||||
return f
|
return f
|
||||||
|
|
||||||
def __website_expected_reponse_status(res, status, reason):
|
def __website_expected_reponse_status(res, status, reason):
|
||||||
if not isinstance(status, collections.Container):
|
if not isinstance(status, Container):
|
||||||
status = set([status])
|
status = set([status])
|
||||||
if not isinstance(reason, collections.Container):
|
if not isinstance(reason, Container):
|
||||||
reason = set([reason])
|
reason = set([reason])
|
||||||
|
|
||||||
if status is not IGNORE_FIELD:
|
if status is not IGNORE_FIELD:
|
||||||
|
@ -179,7 +179,7 @@ def _website_expected_default_html(**kwargs):
|
||||||
v = kwargs[k]
|
v = kwargs[k]
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
v = [v]
|
v = [v]
|
||||||
elif not isinstance(v, collections.Container):
|
elif not isinstance(v, Container):
|
||||||
v = [v]
|
v = [v]
|
||||||
for v2 in v:
|
for v2 in v:
|
||||||
s = '<li>%s: %s</li>' % (k,v2)
|
s = '<li>%s: %s</li>' % (k,v2)
|
||||||
|
@ -199,7 +199,7 @@ def _website_expected_error_response(res, bucket_name, status, reason, code, con
|
||||||
if code is not IGNORE_FIELD:
|
if code is not IGNORE_FIELD:
|
||||||
assert errorcode == code
|
assert errorcode == code
|
||||||
|
|
||||||
if not isinstance(content, collections.Container):
|
if not isinstance(content, Container):
|
||||||
content = set([content])
|
content = set([content])
|
||||||
for f in content:
|
for f in content:
|
||||||
if f is not IGNORE_FIELD and f is not None:
|
if f is not IGNORE_FIELD and f is not None:
|
||||||
|
|
|
@ -82,18 +82,13 @@ def get_objects_list(bucket, client=None, prefix=None):
|
||||||
# generator function that returns object listings in batches, where each
|
# generator function that returns object listings in batches, where each
|
||||||
# batch is a list of dicts compatible with delete_objects()
|
# batch is a list of dicts compatible with delete_objects()
|
||||||
def list_versions(client, bucket, batch_size):
|
def list_versions(client, bucket, batch_size):
|
||||||
key_marker = ''
|
kwargs = {'Bucket': bucket, 'MaxKeys': batch_size}
|
||||||
version_marker = ''
|
|
||||||
truncated = True
|
truncated = True
|
||||||
while truncated:
|
while truncated:
|
||||||
listing = client.list_object_versions(
|
listing = client.list_object_versions(**kwargs)
|
||||||
Bucket=bucket,
|
|
||||||
KeyMarker=key_marker,
|
|
||||||
VersionIdMarker=version_marker,
|
|
||||||
MaxKeys=batch_size)
|
|
||||||
|
|
||||||
key_marker = listing.get('NextKeyMarker')
|
kwargs['KeyMarker'] = listing.get('NextKeyMarker')
|
||||||
version_marker = listing.get('NextVersionIdMarker')
|
kwargs['VersionIdMarker'] = listing.get('NextVersionIdMarker')
|
||||||
truncated = listing['IsTruncated']
|
truncated = listing['IsTruncated']
|
||||||
|
|
||||||
objs = listing.get('Versions', []) + listing.get('DeleteMarkers', [])
|
objs = listing.get('Versions', []) + listing.get('DeleteMarkers', [])
|
||||||
|
|
|
@ -477,6 +477,7 @@ def test_allow_bucket_actions_in_user_policy():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_deny_bucket_actions_in_user_policy():
|
def test_deny_bucket_actions_in_user_policy():
|
||||||
client = get_iam_client()
|
client = get_iam_client()
|
||||||
s3_client = get_alt_client()
|
s3_client = get_alt_client()
|
||||||
|
@ -551,6 +552,7 @@ def test_allow_object_actions_in_user_policy():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_deny_object_actions_in_user_policy():
|
def test_deny_object_actions_in_user_policy():
|
||||||
client = get_iam_client()
|
client = get_iam_client()
|
||||||
s3_client_alt = get_alt_client()
|
s3_client_alt = get_alt_client()
|
||||||
|
@ -625,6 +627,7 @@ def test_allow_multipart_actions_in_user_policy():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_deny_multipart_actions_in_user_policy():
|
def test_deny_multipart_actions_in_user_policy():
|
||||||
client = get_iam_client()
|
client = get_iam_client()
|
||||||
s3_client = get_alt_client()
|
s3_client = get_alt_client()
|
||||||
|
@ -667,6 +670,7 @@ def test_deny_multipart_actions_in_user_policy():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_allow_tagging_actions_in_user_policy():
|
def test_allow_tagging_actions_in_user_policy():
|
||||||
client = get_iam_client()
|
client = get_iam_client()
|
||||||
s3_client_alt = get_alt_client()
|
s3_client_alt = get_alt_client()
|
||||||
|
@ -712,6 +716,7 @@ def test_allow_tagging_actions_in_user_policy():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_deny_tagging_actions_in_user_policy():
|
def test_deny_tagging_actions_in_user_policy():
|
||||||
client = get_iam_client()
|
client = get_iam_client()
|
||||||
s3_client = get_alt_client()
|
s3_client = get_alt_client()
|
||||||
|
@ -763,6 +768,7 @@ def test_deny_tagging_actions_in_user_policy():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_verify_conflicting_user_policy_statements():
|
def test_verify_conflicting_user_policy_statements():
|
||||||
s3client = get_alt_client()
|
s3client = get_alt_client()
|
||||||
bucket = get_new_bucket(client=s3client)
|
bucket = get_new_bucket(client=s3client)
|
||||||
|
@ -794,6 +800,7 @@ def test_verify_conflicting_user_policy_statements():
|
||||||
|
|
||||||
@pytest.mark.user_policy
|
@pytest.mark.user_policy
|
||||||
@pytest.mark.test_of_iam
|
@pytest.mark.test_of_iam
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_verify_conflicting_user_policies():
|
def test_verify_conflicting_user_policies():
|
||||||
s3client = get_alt_client()
|
s3client = get_alt_client()
|
||||||
bucket = get_new_bucket(client=s3client)
|
bucket = get_new_bucket(client=s3client)
|
||||||
|
|
|
@ -1573,6 +1573,19 @@ def test_object_write_to_nonexist_bucket():
|
||||||
assert error_code == 'NoSuchBucket'
|
assert error_code == 'NoSuchBucket'
|
||||||
|
|
||||||
|
|
||||||
|
def _ev_add_te_header(request, **kwargs):
|
||||||
|
request.headers.add_header('Transfer-Encoding', 'chunked')
|
||||||
|
|
||||||
|
def test_object_write_with_chunked_transfer_encoding():
|
||||||
|
bucket_name = get_new_bucket()
|
||||||
|
client = get_client()
|
||||||
|
|
||||||
|
client.meta.events.register_first('before-sign.*.*', _ev_add_te_header)
|
||||||
|
response = client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
|
||||||
|
|
||||||
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
|
||||||
|
|
||||||
def test_bucket_create_delete():
|
def test_bucket_create_delete():
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
@ -1624,6 +1637,39 @@ def _make_objs_dict(key_names):
|
||||||
objs_dict = {'Objects': objs_list}
|
objs_dict = {'Objects': objs_list}
|
||||||
return objs_dict
|
return objs_dict
|
||||||
|
|
||||||
|
def test_versioning_concurrent_multi_object_delete():
|
||||||
|
num_objects = 5
|
||||||
|
num_threads = 5
|
||||||
|
bucket_name = get_new_bucket()
|
||||||
|
|
||||||
|
check_configure_versioning_retry(bucket_name, "Enabled", "Enabled")
|
||||||
|
|
||||||
|
key_names = ["key_{:d}".format(x) for x in range(num_objects)]
|
||||||
|
bucket = _create_objects(bucket_name=bucket_name, keys=key_names)
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
versions = client.list_object_versions(Bucket=bucket_name)['Versions']
|
||||||
|
assert len(versions) == num_objects
|
||||||
|
objs_dict = {'Objects': [dict((k, v[k]) for k in ["Key", "VersionId"]) for v in versions]}
|
||||||
|
results = [None] * num_threads
|
||||||
|
|
||||||
|
def do_request(n):
|
||||||
|
results[n] = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||||
|
|
||||||
|
t = []
|
||||||
|
for i in range(num_threads):
|
||||||
|
thr = threading.Thread(target = do_request, args=[i])
|
||||||
|
thr.start()
|
||||||
|
t.append(thr)
|
||||||
|
_do_wait_completion(t)
|
||||||
|
|
||||||
|
for response in results:
|
||||||
|
assert len(response['Deleted']) == num_objects
|
||||||
|
assert 'Errors' not in response
|
||||||
|
|
||||||
|
response = client.list_objects(Bucket=bucket_name)
|
||||||
|
assert 'Contents' not in response
|
||||||
|
|
||||||
def test_multi_object_delete():
|
def test_multi_object_delete():
|
||||||
key_names = ['key0', 'key1', 'key2']
|
key_names = ['key0', 'key1', 'key2']
|
||||||
bucket_name = _create_objects(keys=key_names)
|
bucket_name = _create_objects(keys=key_names)
|
||||||
|
@ -2818,6 +2864,53 @@ def test_post_object_upload_size_below_minimum():
|
||||||
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
|
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
|
||||||
assert r.status_code == 400
|
assert r.status_code == 400
|
||||||
|
|
||||||
|
def test_post_object_upload_size_rgw_chunk_size_bug():
|
||||||
|
# Test for https://tracker.ceph.com/issues/58627
|
||||||
|
# TODO: if this value is different in Teuthology runs, this would need tuning
|
||||||
|
# https://github.com/ceph/ceph/blob/main/qa/suites/rgw/verify/striping%24/stripe-greater-than-chunk.yaml
|
||||||
|
_rgw_max_chunk_size = 4 * 2**20 # 4MiB
|
||||||
|
min_size = _rgw_max_chunk_size
|
||||||
|
max_size = _rgw_max_chunk_size * 3
|
||||||
|
# [(chunk),(small)]
|
||||||
|
test_payload_size = _rgw_max_chunk_size + 200 # extra bit to push it over the chunk boundary
|
||||||
|
# it should be valid when we run this test!
|
||||||
|
assert test_payload_size > min_size
|
||||||
|
assert test_payload_size < max_size
|
||||||
|
|
||||||
|
bucket_name = get_new_bucket()
|
||||||
|
client = get_client()
|
||||||
|
|
||||||
|
url = _get_post_url(bucket_name)
|
||||||
|
utc = pytz.utc
|
||||||
|
expires = datetime.datetime.now(utc) + datetime.timedelta(seconds=+6000)
|
||||||
|
|
||||||
|
policy_document = {"expiration": expires.strftime("%Y-%m-%dT%H:%M:%SZ"),\
|
||||||
|
"conditions": [\
|
||||||
|
{"bucket": bucket_name},\
|
||||||
|
["starts-with", "$key", "foo"],\
|
||||||
|
{"acl": "private"},\
|
||||||
|
["starts-with", "$Content-Type", "text/plain"],\
|
||||||
|
["content-length-range", min_size, max_size],\
|
||||||
|
]\
|
||||||
|
}
|
||||||
|
|
||||||
|
test_payload = 'x' * test_payload_size
|
||||||
|
|
||||||
|
json_policy_document = json.JSONEncoder().encode(policy_document)
|
||||||
|
bytes_json_policy_document = bytes(json_policy_document, 'utf-8')
|
||||||
|
policy = base64.b64encode(bytes_json_policy_document)
|
||||||
|
aws_secret_access_key = get_main_aws_secret_key()
|
||||||
|
aws_access_key_id = get_main_aws_access_key()
|
||||||
|
|
||||||
|
signature = base64.b64encode(hmac.new(bytes(aws_secret_access_key, 'utf-8'), policy, hashlib.sha1).digest())
|
||||||
|
|
||||||
|
payload = OrderedDict([ ("key" , "foo.txt"),("AWSAccessKeyId" , aws_access_key_id),\
|
||||||
|
("acl" , "private"),("signature" , signature),("policy" , policy),\
|
||||||
|
("Content-Type" , "text/plain"),('file', (test_payload))])
|
||||||
|
|
||||||
|
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
|
||||||
|
assert r.status_code == 204
|
||||||
|
|
||||||
def test_post_object_empty_conditions():
|
def test_post_object_empty_conditions():
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
@ -7412,20 +7505,17 @@ def test_versioning_multi_object_delete():
|
||||||
num_versions = 2
|
num_versions = 2
|
||||||
|
|
||||||
(version_ids, contents) = create_multiple_versions(client, bucket_name, key, num_versions)
|
(version_ids, contents) = create_multiple_versions(client, bucket_name, key, num_versions)
|
||||||
|
assert len(version_ids) == 2
|
||||||
|
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
# delete both versions
|
||||||
versions = response['Versions']
|
objects = [{'Key': key, 'VersionId': v} for v in version_ids]
|
||||||
versions.reverse()
|
client.delete_objects(Bucket=bucket_name, Delete={'Objects': objects})
|
||||||
|
|
||||||
for version in versions:
|
|
||||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
|
|
||||||
|
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
response = client.list_object_versions(Bucket=bucket_name)
|
||||||
assert not 'Versions' in response
|
assert not 'Versions' in response
|
||||||
|
|
||||||
# now remove again, should all succeed due to idempotency
|
# now remove again, should all succeed due to idempotency
|
||||||
for version in versions:
|
client.delete_objects(Bucket=bucket_name, Delete={'Objects': objects})
|
||||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
|
|
||||||
|
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
response = client.list_object_versions(Bucket=bucket_name)
|
||||||
assert not 'Versions' in response
|
assert not 'Versions' in response
|
||||||
|
@ -7440,33 +7530,24 @@ def test_versioning_multi_object_delete_with_marker():
|
||||||
num_versions = 2
|
num_versions = 2
|
||||||
|
|
||||||
(version_ids, contents) = create_multiple_versions(client, bucket_name, key, num_versions)
|
(version_ids, contents) = create_multiple_versions(client, bucket_name, key, num_versions)
|
||||||
|
assert len(version_ids) == num_versions
|
||||||
|
objects = [{'Key': key, 'VersionId': v} for v in version_ids]
|
||||||
|
|
||||||
client.delete_object(Bucket=bucket_name, Key=key)
|
# create a delete marker
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
response = client.delete_object(Bucket=bucket_name, Key=key)
|
||||||
versions = response['Versions']
|
assert response['DeleteMarker']
|
||||||
delete_markers = response['DeleteMarkers']
|
objects += [{'Key': key, 'VersionId': response['VersionId']}]
|
||||||
|
|
||||||
version_ids.append(delete_markers[0]['VersionId'])
|
# delete all versions
|
||||||
assert len(version_ids) == 3
|
client.delete_objects(Bucket=bucket_name, Delete={'Objects': objects})
|
||||||
assert len(delete_markers) == 1
|
|
||||||
|
|
||||||
for version in versions:
|
|
||||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
|
|
||||||
|
|
||||||
for delete_marker in delete_markers:
|
|
||||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=delete_marker['VersionId'])
|
|
||||||
|
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
response = client.list_object_versions(Bucket=bucket_name)
|
||||||
assert not 'Versions' in response
|
assert not 'Versions' in response
|
||||||
assert not 'DeleteMarkers' in response
|
assert not 'DeleteMarkers' in response
|
||||||
|
|
||||||
for version in versions:
|
|
||||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId'])
|
|
||||||
|
|
||||||
for delete_marker in delete_markers:
|
|
||||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=delete_marker['VersionId'])
|
|
||||||
|
|
||||||
# now remove again, should all succeed due to idempotency
|
# now remove again, should all succeed due to idempotency
|
||||||
|
client.delete_objects(Bucket=bucket_name, Delete={'Objects': objects})
|
||||||
|
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
response = client.list_object_versions(Bucket=bucket_name)
|
||||||
assert not 'Versions' in response
|
assert not 'Versions' in response
|
||||||
assert not 'DeleteMarkers' in response
|
assert not 'DeleteMarkers' in response
|
||||||
|
@ -7480,8 +7561,11 @@ def test_versioning_multi_object_delete_with_marker_create():
|
||||||
|
|
||||||
key = 'key'
|
key = 'key'
|
||||||
|
|
||||||
response = client.delete_object(Bucket=bucket_name, Key=key)
|
# use delete_objects() to create a delete marker
|
||||||
delete_marker_version_id = response['VersionId']
|
response = client.delete_objects(Bucket=bucket_name, Delete={'Objects': [{'Key': key}]})
|
||||||
|
assert len(response['Deleted']) == 1
|
||||||
|
assert response['Deleted'][0]['DeleteMarker']
|
||||||
|
delete_marker_version_id = response['Deleted'][0]['DeleteMarkerVersionId']
|
||||||
|
|
||||||
response = client.list_object_versions(Bucket=bucket_name)
|
response = client.list_object_versions(Bucket=bucket_name)
|
||||||
delete_markers = response['DeleteMarkers']
|
delete_markers = response['DeleteMarkers']
|
||||||
|
@ -8299,10 +8383,11 @@ def test_lifecycle_expiration_header_tags_head():
|
||||||
# stat the object, check header
|
# stat the object, check header
|
||||||
response = client.head_object(Bucket=bucket_name, Key=key1)
|
response = client.head_object(Bucket=bucket_name, Key=key1)
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1)
|
assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) == False
|
||||||
|
|
||||||
@pytest.mark.lifecycle
|
@pytest.mark.lifecycle
|
||||||
@pytest.mark.lifecycle_expiration
|
@pytest.mark.lifecycle_expiration
|
||||||
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_lifecycle_expiration_header_and_tags_head():
|
def test_lifecycle_expiration_header_and_tags_head():
|
||||||
now = datetime.datetime.now(None)
|
now = datetime.datetime.now(None)
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
|
@ -8344,7 +8429,7 @@ def test_lifecycle_expiration_header_and_tags_head():
|
||||||
# stat the object, check header
|
# stat the object, check header
|
||||||
response = client.head_object(Bucket=bucket_name, Key=key1)
|
response = client.head_object(Bucket=bucket_name, Key=key1)
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1)
|
assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) == False
|
||||||
|
|
||||||
@pytest.mark.lifecycle
|
@pytest.mark.lifecycle
|
||||||
def test_lifecycle_set_noncurrent():
|
def test_lifecycle_set_noncurrent():
|
||||||
|
@ -12594,6 +12679,7 @@ def test_sse_s3_default_multipart_upload():
|
||||||
|
|
||||||
assert response['Metadata'] == metadata
|
assert response['Metadata'] == metadata
|
||||||
assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type
|
assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type
|
||||||
|
assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
|
||||||
|
|
||||||
body = _get_body(response)
|
body = _get_body(response)
|
||||||
assert body == data
|
assert body == data
|
||||||
|
@ -12738,3 +12824,24 @@ def test_sse_s3_encrypted_upload_1mb():
|
||||||
@pytest.mark.fails_on_dbstore
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_sse_s3_encrypted_upload_8mb():
|
def test_sse_s3_encrypted_upload_8mb():
|
||||||
_test_sse_s3_encrypted_upload(8*1024*1024)
|
_test_sse_s3_encrypted_upload(8*1024*1024)
|
||||||
|
|
||||||
|
def test_get_object_torrent():
|
||||||
|
client = get_client()
|
||||||
|
bucket_name = get_new_bucket()
|
||||||
|
key = 'Avatar.mpg'
|
||||||
|
|
||||||
|
file_size = 7 * 1024 * 1024
|
||||||
|
data = 'A' * file_size
|
||||||
|
|
||||||
|
client.put_object(Bucket=bucket_name, Key=key, Body=data)
|
||||||
|
|
||||||
|
response = None
|
||||||
|
try:
|
||||||
|
response = client.get_object_torrent(Bucket=bucket_name, Key=key)
|
||||||
|
# if successful, verify the torrent contents are different from the body
|
||||||
|
assert data != _get_body(response)
|
||||||
|
except ClientError as e:
|
||||||
|
# accept 404 errors - torrent support may not be configured
|
||||||
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
|
assert status == 404
|
||||||
|
assert error_code == 'NoSuchKey'
|
||||||
|
|
|
@ -403,6 +403,7 @@ def test_count_operation():
|
||||||
def test_count_json_operation():
|
def test_count_json_operation():
|
||||||
json_obj_name = get_random_string()
|
json_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
num_of_rows = 1
|
num_of_rows = 1
|
||||||
obj_to_load = create_random_json_object(num_of_rows,10)
|
obj_to_load = create_random_json_object(num_of_rows,10)
|
||||||
upload_object(bucket_name,json_obj_name,obj_to_load)
|
upload_object(bucket_name,json_obj_name,obj_to_load)
|
||||||
|
@ -425,6 +426,7 @@ def test_json_column_sum_min_max():
|
||||||
|
|
||||||
json_obj_name = get_random_string()
|
json_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,json_obj_name,json_obj)
|
upload_object(bucket_name,json_obj_name,json_obj)
|
||||||
|
|
||||||
json_obj_name_2 = get_random_string()
|
json_obj_name_2 = get_random_string()
|
||||||
|
@ -491,6 +493,7 @@ def test_json_nullif_expressions():
|
||||||
|
|
||||||
json_obj_name = get_random_string()
|
json_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,json_obj_name,json_obj)
|
upload_object(bucket_name,json_obj_name,json_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where nullif(_1.c1,_1.c2) is null ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select_json(bucket_name,json_obj_name,"select count(0) from s3object[*].root where nullif(_1.c1,_1.c2) is null ;") ).replace("\n","")
|
||||||
|
@ -530,6 +533,7 @@ def test_column_sum_min_max():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
csv_obj_name_2 = get_random_string()
|
csv_obj_name_2 = get_random_string()
|
||||||
|
@ -596,6 +600,7 @@ def test_nullif_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where nullif(_1,_2) is null ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where nullif(_1,_2) is null ;") ).replace("\n","")
|
||||||
|
@ -651,6 +656,7 @@ def test_nulliftrue_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where (nullif(_1,_2) is null) = true ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from s3object where (nullif(_1,_2) is null) = true ;") ).replace("\n","")
|
||||||
|
@ -678,6 +684,7 @@ def test_is_not_null_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_null = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(*) from s3object where nullif(_1,_2) is not null ;") ).replace("\n","")
|
res_s3select_null = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(*) from s3object where nullif(_1,_2) is not null ;") ).replace("\n","")
|
||||||
|
@ -699,6 +706,7 @@ def test_lowerupper_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from s3object ;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from s3object ;') ).replace("\n","")
|
||||||
|
@ -717,6 +725,7 @@ def test_in_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where int(_1) in(1);')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where int(_1) in(1);')).replace("\n","")
|
||||||
|
@ -786,6 +795,7 @@ def test_true_false_in_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where (int(_1) in(1)) = true;')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from s3object where (int(_1) in(1)) = true;')).replace("\n","")
|
||||||
|
@ -831,6 +841,7 @@ def test_like_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%aeio%";')).replace("\n","")
|
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%aeio%";')).replace("\n","")
|
||||||
|
@ -918,6 +929,7 @@ def test_truefalselike_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where (_1 like "%aeio%") = true;')).replace("\n","")
|
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where (_1 like "%aeio%") = true;')).replace("\n","")
|
||||||
|
@ -963,6 +975,7 @@ def test_nullif_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where nullif(_1,_2) is null ;") ).replace("\n","")
|
res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where nullif(_1,_2) is null ;") ).replace("\n","")
|
||||||
|
@ -990,6 +1003,7 @@ def test_lowerupper_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from stdin ;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from stdin ;') ).replace("\n","")
|
||||||
|
@ -1008,6 +1022,7 @@ def test_in_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) in(1);')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) in(1);')).replace("\n","")
|
||||||
|
@ -1047,6 +1062,7 @@ def test_like_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%";')).replace("\n","")
|
res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%";')).replace("\n","")
|
||||||
|
@ -1094,6 +1110,7 @@ def test_complex_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select min(int(_1)),max(int(_2)),min(int(_3))+1 from s3object;")).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select min(int(_1)),max(int(_2)),min(int(_3))+1 from s3object;")).replace("\n","")
|
||||||
|
@ -1130,6 +1147,7 @@ def test_alias():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1, int(_2) as a2 , (a1+a2) as a3 from s3object where a3>100 and a3<300;") ).replace(",","")
|
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1, int(_2) as a2 , (a1+a2) as a3 from s3object where a3>100 and a3<300;") ).replace(",","")
|
||||||
|
@ -1149,6 +1167,7 @@ def test_alias_cyclic_refernce():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1,int(_2) as a2, a1+a4 as a3, a5+a1 as a4, int(_3)+a3 as a5 from s3object;") )
|
res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(_1) as a1,int(_2) as a2, a1+a4 as a3, a5+a1 as a4, int(_3)+a3 as a5 from s3object;") )
|
||||||
|
@ -1334,6 +1353,7 @@ def test_when_then_else_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case when cast(_1 as int)>100 and cast(_1 as int)<200 then "(100-200)" when cast(_1 as int)>200 and cast(_1 as int)<300 then "(200-300)" else "NONE" end from s3object;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case when cast(_1 as int)>100 and cast(_1 as int)<200 then "(100-200)" when cast(_1 as int)>200 and cast(_1 as int)<300 then "(200-300)" else "NONE" end from s3object;') ).replace("\n","")
|
||||||
|
@ -1363,6 +1383,7 @@ def test_coalesce_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where char_length(_3)>2 and char_length(_4)>2 and cast(substring(_3,1,2) as int) = cast(substring(_4,1,2) as int);') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where char_length(_3)>2 and char_length(_4)>2 and cast(substring(_3,1,2) as int) = cast(substring(_4,1,2) as int);') ).replace("\n","")
|
||||||
|
@ -1385,6 +1406,7 @@ def test_cast_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(_3 as int)>999;') ).replace("\n","")
|
res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(_3 as int)>999;') ).replace("\n","")
|
||||||
|
@ -1424,6 +1446,7 @@ def test_trim_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou";')).replace("\n","")
|
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou";')).replace("\n","")
|
||||||
|
@ -1463,6 +1486,7 @@ def test_truefalse_trim_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou" = true;')).replace("\n","")
|
res_s3select_trim = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where trim(_1) = "aeiou" = true;')).replace("\n","")
|
||||||
|
@ -1502,6 +1526,7 @@ def test_escape_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_escape = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%_ar" escape "%";')).replace("\n","")
|
res_s3select_escape = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where _1 like "%_ar" escape "%";')).replace("\n","")
|
||||||
|
@ -1523,6 +1548,7 @@ def test_case_value_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_case = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case cast(_1 as int) when cast(_2 as int) then "case_1_1" else "case_2_2" end from s3object;')).replace("\n","")
|
res_s3select_case = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select case cast(_1 as int) when cast(_2 as int) then "case_1_1" else "case_2_2" end from s3object;')).replace("\n","")
|
||||||
|
@ -1538,6 +1564,7 @@ def test_bool_cast_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_cast = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(int(_1) as bool) = true ;')).replace("\n","")
|
res_s3select_cast = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from s3object where cast(int(_1) as bool) = true ;')).replace("\n","")
|
||||||
|
@ -1553,6 +1580,7 @@ def test_progress_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
obj_size = len(csv_obj.encode('utf-8'))
|
obj_size = len(csv_obj.encode('utf-8'))
|
||||||
|
@ -1586,6 +1614,7 @@ def test_output_serial_expressions():
|
||||||
|
|
||||||
csv_obj_name = get_random_string()
|
csv_obj_name = get_random_string()
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
|
||||||
upload_object(bucket_name,csv_obj_name,csv_obj)
|
upload_object(bucket_name,csv_obj_name,csv_obj)
|
||||||
|
|
||||||
res_s3select_1 = remove_xml_tags_from_result( run_s3select_output(bucket_name,csv_obj_name,"select _1, _2 from s3object where nullif(_1,_2) is null ;", "ALWAYS") ).replace("\n",",").replace(",","")
|
res_s3select_1 = remove_xml_tags_from_result( run_s3select_output(bucket_name,csv_obj_name,"select _1, _2 from s3object where nullif(_1,_2) is null ;", "ALWAYS") ).replace("\n",",").replace(",","")
|
||||||
|
|
|
@ -56,6 +56,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
def create_role(iam_client,path,rolename,policy_document,description,sessionduration,permissionboundary,tag_list=None):
|
def create_role(iam_client,path,rolename,policy_document,description,sessionduration,permissionboundary,tag_list=None):
|
||||||
role_err=None
|
role_err=None
|
||||||
|
role_response = None
|
||||||
if rolename is None:
|
if rolename is None:
|
||||||
rolename=get_parameter_name()
|
rolename=get_parameter_name()
|
||||||
if tag_list is None:
|
if tag_list is None:
|
||||||
|
@ -68,6 +69,7 @@ def create_role(iam_client,path,rolename,policy_document,description,sessiondura
|
||||||
|
|
||||||
def put_role_policy(iam_client,rolename,policyname,role_policy):
|
def put_role_policy(iam_client,rolename,policyname,role_policy):
|
||||||
role_err=None
|
role_err=None
|
||||||
|
role_response = None
|
||||||
if policyname is None:
|
if policyname is None:
|
||||||
policyname=get_parameter_name()
|
policyname=get_parameter_name()
|
||||||
try:
|
try:
|
||||||
|
@ -78,6 +80,7 @@ def put_role_policy(iam_client,rolename,policyname,role_policy):
|
||||||
|
|
||||||
def put_user_policy(iam_client,username,policyname,policy_document):
|
def put_user_policy(iam_client,username,policyname,policy_document):
|
||||||
role_err=None
|
role_err=None
|
||||||
|
role_response = None
|
||||||
if policyname is None:
|
if policyname is None:
|
||||||
policyname=get_parameter_name()
|
policyname=get_parameter_name()
|
||||||
try:
|
try:
|
||||||
|
@ -222,11 +225,17 @@ def test_assume_role_allow():
|
||||||
|
|
||||||
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
|
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
|
||||||
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
||||||
|
if role_response:
|
||||||
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
|
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
|
||||||
|
else:
|
||||||
|
assert False, role_error
|
||||||
|
|
||||||
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
||||||
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
||||||
|
if response:
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
else:
|
||||||
|
assert False, role_err
|
||||||
|
|
||||||
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
||||||
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
@ -256,11 +265,17 @@ def test_assume_role_deny():
|
||||||
|
|
||||||
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
|
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
|
||||||
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
||||||
|
if role_response:
|
||||||
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
|
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
|
||||||
|
else:
|
||||||
|
assert False, role_error
|
||||||
|
|
||||||
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
||||||
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
||||||
|
if response:
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
else:
|
||||||
|
assert False, role_err
|
||||||
|
|
||||||
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
||||||
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
@ -290,11 +305,17 @@ def test_assume_role_creds_expiry():
|
||||||
|
|
||||||
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
|
policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}"
|
||||||
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
||||||
|
if role_response:
|
||||||
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
|
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+''
|
||||||
|
else:
|
||||||
|
assert False, role_error
|
||||||
|
|
||||||
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
||||||
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
||||||
|
if response:
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
else:
|
||||||
|
assert False, role_err
|
||||||
|
|
||||||
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,DurationSeconds=900)
|
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,DurationSeconds=900)
|
||||||
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
@ -329,12 +350,18 @@ def test_assume_role_deny_head_nonexistent():
|
||||||
|
|
||||||
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
|
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
|
||||||
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
||||||
|
if role_response:
|
||||||
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name
|
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name
|
||||||
|
else:
|
||||||
|
assert False, role_error
|
||||||
|
|
||||||
# allow GetObject but deny ListBucket
|
# allow GetObject but deny ListBucket
|
||||||
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":"s3:GetObject","Principal":"*","Resource":"arn:aws:s3:::*"}}'
|
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":"s3:GetObject","Principal":"*","Resource":"arn:aws:s3:::*"}}'
|
||||||
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
||||||
|
if response:
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
else:
|
||||||
|
assert False, role_err
|
||||||
|
|
||||||
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
||||||
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
@ -367,12 +394,18 @@ def test_assume_role_allow_head_nonexistent():
|
||||||
|
|
||||||
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
|
policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}'
|
||||||
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
(role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None)
|
||||||
|
if role_response:
|
||||||
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name
|
assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name
|
||||||
|
else:
|
||||||
|
assert False, role_error
|
||||||
|
|
||||||
# allow GetObject and ListBucket
|
# allow GetObject and ListBucket
|
||||||
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":["s3:GetObject","s3:ListBucket"],"Principal":"*","Resource":"arn:aws:s3:::*"}}'
|
role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":["s3:GetObject","s3:ListBucket"],"Principal":"*","Resource":"arn:aws:s3:::*"}}'
|
||||||
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
||||||
|
if response:
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
else:
|
||||||
|
assert False, role_err
|
||||||
|
|
||||||
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name)
|
||||||
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
@ -418,7 +451,10 @@ def test_assume_role_with_web_identity():
|
||||||
|
|
||||||
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}"
|
||||||
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
(role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy)
|
||||||
|
if response:
|
||||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
else:
|
||||||
|
assert False, role_err
|
||||||
|
|
||||||
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
|
resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token)
|
||||||
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
assert resp['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||||
|
|
4
tox.ini
4
tox.ini
|
@ -3,5 +3,7 @@ envlist = py
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
deps = -rrequirements.txt
|
deps = -rrequirements.txt
|
||||||
passenv = S3TEST_CONF S3_USE_SIGV4
|
passenv =
|
||||||
|
S3TEST_CONF
|
||||||
|
S3_USE_SIGV4
|
||||||
commands = pytest {posargs}
|
commands = pytest {posargs}
|
||||||
|
|
Loading…
Reference in a new issue