forked from TrueCloudLab/s3-tests
Compare commits
341 commits
ceph-squid
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
08df9352f9 | ||
|
cba8047c7e | ||
|
acc8ef43c9 | ||
|
999d39d4db | ||
|
ac71900ffb | ||
|
aa82bd16ae | ||
|
e8db6c2c16 | ||
|
6a775cb445 | ||
|
0d85ed2dda | ||
|
9444c29674 | ||
|
bc8c14ac12 | ||
|
ecf7a8a7a9 | ||
|
3458971054 | ||
|
2e41494293 | ||
|
f61129e432 | ||
|
218f90063f | ||
|
82fedef5a5 | ||
|
c9aded48e5 | ||
|
87b496f25f | ||
|
a83396cda7 | ||
|
93a3b6c704 | ||
|
474c1404e2 | ||
|
2e395d78ea | ||
|
4eda9c0626 | ||
|
38ab4c5638 | ||
|
36fb297e48 | ||
|
8277a9fb9a | ||
|
c0f0b679db | ||
|
95df503ced | ||
|
9577cde013 | ||
|
a3dbac7115 | ||
|
73ed9121f4 | ||
|
bebdfd1ba7 | ||
|
658fc699a8 | ||
|
27f24ee4d7 | ||
|
00b9a2a291 | ||
|
e9c5cc29e9 | ||
|
77f1334571 | ||
|
c4c5a247eb | ||
|
54c1488a43 | ||
|
88fd867007 | ||
|
a28d46fa2a | ||
|
46f60d3029 | ||
|
d48530a294 | ||
|
dfabbf5a8d | ||
|
7bd4b0ee14 | ||
|
96d658444a | ||
|
a3a16eb66a | ||
|
7ebc530e04 | ||
|
4ca7967ae7 | ||
|
d5791d8da6 | ||
|
ba292fbf59 | ||
|
ed4a8e2244 | ||
|
46217fcf81 | ||
|
cefea0fd26 | ||
|
d4ada317e1 | ||
|
c6e40b4ffa | ||
|
364f29d087 | ||
|
0377466704 | ||
|
db76dfe791 | ||
|
d8becad96a | ||
|
7cd4613883 | ||
|
5f3353e6b5 | ||
|
a35b3c609a | ||
|
83af25722c | ||
|
8e01f2315c | ||
|
ade849b90f | ||
|
aecd282a11 | ||
|
3ef85406f9 | ||
|
12abc78b9b | ||
|
b46d16467c | ||
|
4744808eda | ||
|
a87f0b63e7 | ||
|
3af42312bf | ||
|
3056e6d039 | ||
|
997f78d58a | ||
|
1d5764d569 | ||
|
055451f666 | ||
|
1866f04d81 | ||
|
a2acdbfdda | ||
|
da91ad8bbf | ||
|
6861c3d810 | ||
|
519f8a4b0c | ||
|
d552124680 | ||
|
19c17fa49a | ||
|
40182ce26f | ||
|
e29d6246fc | ||
|
95677d85bc | ||
|
9c50cd1539 | ||
|
e9e3374827 | ||
|
e54f0a4508 | ||
|
b1efd0477a | ||
|
9961af4bd2 | ||
|
c0a1880d4c | ||
|
0e1bf6f652 | ||
|
73b340a0e2 | ||
|
b75b89c94b | ||
|
c252440614 | ||
|
f624165ec9 | ||
|
10f3f7620d | ||
|
188b392131 | ||
|
28009bf7d3 | ||
|
4476773180 | ||
|
928eb7a90f | ||
|
b904ef08bc | ||
|
fa0ea9afe0 | ||
|
2998ea91eb | ||
|
00cdcaf056 | ||
|
741f2cbc9e | ||
|
b05a394738 | ||
|
13e0d736a8 | ||
|
e18ea7fac4 | ||
|
7e35765dd4 | ||
|
2535dd695d | ||
|
008f5025f7 | ||
|
bc2a3b0b70 | ||
|
c445361c2e | ||
|
89bbe654ca | ||
|
b045323900 | ||
|
febbcc12c2 | ||
|
818443e916 | ||
|
992e193d81 | ||
|
787dc6bd43 | ||
|
97c0338adf | ||
|
bb27e04c45 | ||
|
6d2ed19c18 | ||
|
13a9bfc00a | ||
|
29b0e27e49 | ||
|
d158edb201 | ||
|
5b9652caa4 | ||
|
d976f47d74 | ||
|
359bde7e87 | ||
|
3a0f1f0ead | ||
|
42aff3e8fd | ||
|
5219b86db9 | ||
|
43b957792b | ||
|
2087c1ba26 | ||
|
5914eb2005 | ||
|
a536dd0e88 | ||
|
3437cda73d | ||
|
2c710811fa | ||
|
819dd5aa32 | ||
|
b1472019d7 | ||
|
18a41ab63f | ||
|
b8422a2055 | ||
|
7993dd02a5 | ||
|
5e9f6e5ffb | ||
|
d13ed28a5c | ||
|
494379c2ff | ||
|
4c75fba0de | ||
|
f5d0bc9be3 | ||
|
7e7e8d5a42 | ||
|
c80e9d2118 | ||
|
4864dbc340 | ||
|
3652cfe2ec | ||
|
672a123348 | ||
|
9319a41b24 | ||
|
114397c358 | ||
|
6ff8cf27a2 | ||
|
e4953a3b76 | ||
|
60b26f210e | ||
|
64e919a13b | ||
|
defb8eb977 | ||
|
b200013565 | ||
|
89f97ed35c | ||
|
d89ab9d862 | ||
|
774172ad43 | ||
|
ad999de7c4 | ||
|
44069ff062 | ||
|
a8ee732732 | ||
|
79156f3d3d | ||
|
8063cd68c9 | ||
|
c8fc8cd7c8 | ||
|
a3100af70a | ||
|
5d63ebf83d | ||
|
be7ab936cd | ||
|
952beb9ebd | ||
|
bf889041c9 | ||
|
88a8d1c66f | ||
|
4cf38b4138 | ||
|
97be0d44c6 | ||
|
5b08b26453 | ||
|
ef570220f9 | ||
|
33afb4eb88 | ||
|
25d05a194b | ||
|
75e4e4f631 | ||
|
c03108418f | ||
|
9f1f9c9273 | ||
|
16834645a6 | ||
|
8af8f96740 | ||
|
dd7cac25f5 | ||
|
101dfc104a | ||
|
bacab3cadf | ||
|
6eb0e15711 | ||
|
d20d8d2207 | ||
|
76beb672d1 | ||
|
cb830ebae1 | ||
|
cf77d5c560 | ||
|
0f3f35ef01 | ||
|
47292aee17 | ||
|
a38cdb1dd5 | ||
|
13a477d096 | ||
|
c03fd082cc | ||
|
540b28fa20 | ||
|
a4d282c1db | ||
|
f7f0799ceb | ||
|
60593c99dd | ||
|
5f96a32045 | ||
|
6019ec1ef3 | ||
|
a3b849e4db | ||
|
93099c1fb0 | ||
|
9a6a1e9f19 | ||
|
23be1160f5 | ||
|
47ece6e861 | ||
|
eef8d0fa67 | ||
|
f51101d752 | ||
|
490d0a4c4f | ||
|
749e29185b | ||
|
7c07bad930 | ||
|
687ab24e7d | ||
|
d073b991aa | ||
|
99d4b329e2 | ||
|
55d8ef2a7e | ||
|
9ac8aef12b | ||
|
4a89a9a5b2 | ||
|
71266fede9 | ||
|
5dcc3dd689 | ||
|
bf43a4a10a | ||
|
86fecf83b9 | ||
|
64068d7bf9 | ||
|
d466b7bd09 | ||
|
96438f44e4 | ||
|
a6004fe43b | ||
|
b252638369 | ||
|
5476c709c8 | ||
|
ea3caaa76b | ||
|
95fd91df2b | ||
|
7fe0304e9c | ||
|
8662815ebe | ||
|
9c4f15a47e | ||
|
bb995c2aeb | ||
|
41ebef2540 | ||
|
513ecdfdd0 | ||
|
723853fd18 | ||
|
44643af0b0 | ||
|
245a93326e | ||
|
700a04737a | ||
|
d2a7ed88f1 | ||
|
459e3c870a | ||
|
20aa9aa071 | ||
|
d868058d0c | ||
|
e229d1aaf6 | ||
|
64bdc3beec | ||
|
ba9525f425 | ||
|
a3447c50df | ||
|
aaa355f20b | ||
|
a0ef4be7fc | ||
|
7bd3c432fc | ||
|
2851712901 | ||
|
2ce7e15cca | ||
|
cfdf914c4b | ||
|
1572fbc87b | ||
|
b1815c25dc | ||
|
c6a4ab9d12 | ||
|
7276bee050 | ||
|
e7102e8cb0 | ||
|
60dd3444b3 | ||
|
4a86ebbe8b | ||
|
66ced9af1d | ||
|
8893cc49c5 | ||
|
ea7d5fb563 | ||
|
59a3aff3de | ||
|
6a63d0cf91 | ||
|
5d6166bf53 | ||
|
6c885bb39a | ||
|
ef8f65d917 | ||
|
f4f7812efd | ||
|
cd1794f3c7 | ||
|
26b43ccb02 | ||
|
26f06011ee | ||
|
d7c243ba83 | ||
|
c08de72d55 | ||
|
b72bff16d1 | ||
|
bf23251357 | ||
|
f4a052dfcf | ||
|
62395eb872 | ||
|
8638017020 | ||
|
ae3052fa8a | ||
|
a48a9bf6d1 | ||
|
16266d1590 | ||
|
0b2d7f729d | ||
|
daf9062a22 | ||
|
4e3fd5ff41 | ||
|
0eed4a551d | ||
|
30db28e775 | ||
|
f0868651fd | ||
|
6ff497d908 | ||
|
e79dffa731 | ||
|
c2b59fb714 | ||
|
9d526d1a76 | ||
|
979e739eff | ||
|
4948f8b009 | ||
|
f6218fa1de | ||
|
54103207e4 | ||
|
5a8d0b8b0d | ||
|
350fcbb4ec | ||
|
8dbe896f89 | ||
|
982d15c30e | ||
|
fce9a52ef4 | ||
|
0985cc11d7 | ||
|
72e251ed69 | ||
|
fb39ac4829 | ||
|
6d8c0059db | ||
|
b63229b110 | ||
|
b7f47c2a31 | ||
|
e006dd4753 | ||
|
1a9d3677f7 | ||
|
4c8bbbef0a | ||
|
a0c15c80ad | ||
|
b6db7bdd8a | ||
|
7f8a12423f | ||
|
713012c178 | ||
|
5dc8bc75ab | ||
|
94b1986228 | ||
|
4c7c279f70 | ||
|
5925f0fb3f | ||
|
c1bce6ac70 | ||
|
d543619e71 | ||
|
f42872fd53 | ||
|
74daf86fe5 | ||
|
dac38694ef | ||
|
47a3755378 | ||
|
4d675235dd | ||
|
3b1571ace6 | ||
|
b4516725f2 | ||
|
d02c1819f6 | ||
|
4996430709 | ||
|
6d3f574a8e | ||
|
1ad38530e0 | ||
|
3f9d31c6c7 | ||
|
02b1d50ca7 |
7 changed files with 702 additions and 111 deletions
|
@ -22,7 +22,7 @@ Once you have that file copied and edited, you can run the tests with::
|
|||
|
||||
You can specify which directory of tests to run::
|
||||
|
||||
S3TEST_CONF=your.conf tox s3tests_boto3/functional
|
||||
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional
|
||||
|
||||
You can specify which file of tests to run::
|
||||
|
||||
|
@ -44,7 +44,7 @@ located in the ``s3test_boto3`` directory.
|
|||
|
||||
You can run only the boto3 tests with::
|
||||
|
||||
S3TEST_CONF=your.conf tox -- -m 'not fails_on_aws' s3tests_boto3/functional
|
||||
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional
|
||||
|
||||
========================
|
||||
STS compatibility tests
|
||||
|
|
|
@ -7,6 +7,7 @@ markers =
|
|||
auth_common
|
||||
bucket_policy
|
||||
bucket_encryption
|
||||
checksum
|
||||
cloud_transition
|
||||
encryption
|
||||
fails_on_aws
|
||||
|
|
|
@ -117,6 +117,9 @@ secret_key = opqrstuvwxyzabcdefghijklmnopqrstuvwxyzab
|
|||
# tenant email set in vstart.sh
|
||||
email = tenanteduser@example.com
|
||||
|
||||
# tenant name
|
||||
tenant = testx
|
||||
|
||||
#following section needs to be added for all sts-tests
|
||||
[iam]
|
||||
#used for iam operations in sts-tests
|
||||
|
|
|
@ -174,7 +174,7 @@ def configured_storage_classes():
|
|||
|
||||
return sc
|
||||
|
||||
def setup():
|
||||
def configure():
|
||||
cfg = configparser.RawConfigParser()
|
||||
try:
|
||||
path = os.environ['S3TEST_CONF']
|
||||
|
@ -259,6 +259,7 @@ def setup():
|
|||
config.tenant_display_name = cfg.get('s3 tenant',"display_name")
|
||||
config.tenant_user_id = cfg.get('s3 tenant',"user_id")
|
||||
config.tenant_email = cfg.get('s3 tenant',"email")
|
||||
config.tenant_name = cfg.get('s3 tenant',"tenant")
|
||||
|
||||
config.iam_access_key = cfg.get('iam',"access_key")
|
||||
config.iam_secret_key = cfg.get('iam',"secret_key")
|
||||
|
@ -284,17 +285,17 @@ def setup():
|
|||
template = cfg.get('fixtures', "iam path prefix", fallback="/s3-tests/")
|
||||
config.iam_path_prefix = choose_bucket_prefix(template=template)
|
||||
|
||||
alt_client = get_alt_client()
|
||||
tenant_client = get_tenant_client()
|
||||
nuke_prefixed_buckets(prefix=prefix)
|
||||
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
||||
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
||||
|
||||
if cfg.has_section("s3 cloud"):
|
||||
get_cloud_config(cfg)
|
||||
else:
|
||||
config.cloud_storage_class = None
|
||||
|
||||
def setup():
|
||||
alt_client = get_alt_client()
|
||||
tenant_client = get_tenant_client()
|
||||
nuke_prefixed_buckets(prefix=prefix)
|
||||
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
||||
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
||||
|
||||
def teardown():
|
||||
alt_client = get_alt_client()
|
||||
|
@ -323,11 +324,12 @@ def teardown():
|
|||
|
||||
@pytest.fixture(scope="package")
|
||||
def configfile():
|
||||
setup()
|
||||
configure()
|
||||
return config
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_teardown(configfile):
|
||||
setup()
|
||||
yield
|
||||
teardown()
|
||||
|
||||
|
@ -509,6 +511,17 @@ def get_tenant_client(client_config=None):
|
|||
config=client_config)
|
||||
return client
|
||||
|
||||
def get_v2_tenant_client():
|
||||
client_config = Config(signature_version='s3')
|
||||
client = boto3.client(service_name='s3',
|
||||
aws_access_key_id=config.tenant_access_key,
|
||||
aws_secret_access_key=config.tenant_secret_key,
|
||||
endpoint_url=config.default_endpoint,
|
||||
use_ssl=config.default_is_secure,
|
||||
verify=config.default_ssl_verify,
|
||||
config=client_config)
|
||||
return client
|
||||
|
||||
def get_tenant_iam_client():
|
||||
|
||||
client = boto3.client(service_name='iam',
|
||||
|
@ -693,6 +706,9 @@ def get_tenant_aws_secret_key():
|
|||
def get_tenant_display_name():
|
||||
return config.tenant_display_name
|
||||
|
||||
def get_tenant_name():
|
||||
return config.tenant_name
|
||||
|
||||
def get_tenant_user_id():
|
||||
return config.tenant_user_id
|
||||
|
||||
|
|
|
@ -2445,6 +2445,124 @@ def test_account_oidc_provider(iam_root):
|
|||
iam_root.delete_open_id_connect_provider(OpenIDConnectProviderArn=arn)
|
||||
|
||||
|
||||
@pytest.mark.iam_account
|
||||
def test_verify_add_new_client_id_to_oidc(iam_root):
|
||||
url_host = get_iam_path_prefix()[1:] + 'example.com'
|
||||
url = 'http://' + url_host
|
||||
|
||||
response = iam_root.create_open_id_connect_provider(
|
||||
Url=url,
|
||||
ClientIDList=[
|
||||
'app-jee-jsp',
|
||||
],
|
||||
ThumbprintList=[
|
||||
'3768084dfb3d2b68b7897bf5f565da8efEXAMPLE'
|
||||
]
|
||||
)
|
||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
get_response = iam_root.get_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert get_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
assert len(get_response['ClientIDList']) == 1
|
||||
assert get_response['ClientIDList'][0] == 'app-jee-jsp'
|
||||
assert url == get_response['Url']
|
||||
|
||||
add_response = iam_root.add_client_id_to_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn'],
|
||||
ClientID='app-profile-jsp'
|
||||
)
|
||||
assert add_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
get_response = iam_root.get_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert len(get_response['ClientIDList']) == 2
|
||||
assert get_response['ClientIDList'][0] == 'app-jee-jsp'
|
||||
assert get_response['ClientIDList'][1] == 'app-profile-jsp'
|
||||
assert get_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
del_response = iam_root.delete_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert del_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
|
||||
def test_verify_add_existing_client_id_to_oidc(iam_root):
|
||||
url_host = get_iam_path_prefix()[1:] + 'example.com'
|
||||
url = 'http://' + url_host
|
||||
|
||||
response = iam_root.create_open_id_connect_provider(
|
||||
Url=url,
|
||||
ClientIDList=[
|
||||
'app-jee-jsp',
|
||||
'app-profile-jsp'
|
||||
],
|
||||
ThumbprintList=[
|
||||
'3768084dfb3d2b68b7897bf5f565da8efEXAMPLE'
|
||||
]
|
||||
)
|
||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
get_response = iam_root.get_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert get_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
assert len(get_response['ClientIDList']) == 2
|
||||
assert get_response['ClientIDList'][0] == 'app-jee-jsp'
|
||||
assert get_response['ClientIDList'][1] == 'app-profile-jsp'
|
||||
add_response = iam_root.add_client_id_to_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn'],
|
||||
ClientID='app-profile-jsp'
|
||||
)
|
||||
assert add_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
get_response = iam_root.get_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert len(get_response['ClientIDList']) == 2
|
||||
assert get_response['ClientIDList'][0] == 'app-jee-jsp'
|
||||
assert get_response['ClientIDList'][1] == 'app-profile-jsp'
|
||||
assert get_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
del_response = iam_root.delete_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert del_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
|
||||
def test_verify_update_thumbprintlist_of_oidc(iam_root):
|
||||
url_host = get_iam_path_prefix()[1:] + 'example.com'
|
||||
url = 'http://' + url_host
|
||||
|
||||
response = iam_root.create_open_id_connect_provider(
|
||||
Url=url,
|
||||
ClientIDList=[
|
||||
'app-jee-jsp',
|
||||
'app-profile-jsp'
|
||||
],
|
||||
ThumbprintList=[
|
||||
'3768084dfb3d2b68b7897bf5f565da8efEXAMPLE'
|
||||
]
|
||||
)
|
||||
assert response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
get_response = iam_root.get_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert get_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
assert len(get_response['ThumbprintList']) == 1
|
||||
assert get_response['ThumbprintList'][0] == '3768084dfb3d2b68b7897bf5f565da8efEXAMPLE'
|
||||
update_response = iam_root.update_open_id_connect_provider_thumbprint(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn'],
|
||||
ThumbprintList=[
|
||||
'3768084dfb3d2b68b7897bf5f565da8efSAMPLE1'
|
||||
]
|
||||
)
|
||||
assert update_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
get_response = iam_root.get_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert get_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
assert len(get_response['ThumbprintList']) == 1
|
||||
assert get_response['ThumbprintList'][0] == '3768084dfb3d2b68b7897bf5f565da8efSAMPLE1'
|
||||
del_response = iam_root.delete_open_id_connect_provider(
|
||||
OpenIDConnectProviderArn=response['OpenIDConnectProviderArn']
|
||||
)
|
||||
assert del_response['ResponseMetadata']['HTTPStatusCode'] == 200
|
||||
|
||||
# test cross-account access, adding user policy before the bucket policy
|
||||
def _test_cross_account_user_bucket_policy(roots3, alt_root, alt_name, alt_arn):
|
||||
# add a user policy that allows s3 actions
|
||||
|
@ -2801,3 +2919,4 @@ def test_cross_account_root_bucket_acl_grant_account_email(iam_root, iam_alt_roo
|
|||
alts3 = get_iam_alt_root_client(service_name='s3')
|
||||
grantee = 'emailAddress=' + get_iam_alt_root_email()
|
||||
_test_cross_account_root_bucket_acl(roots3, alts3, grantee)
|
||||
|
||||
|
|
|
@ -67,7 +67,9 @@ from . import (
|
|||
get_alt_email,
|
||||
get_alt_client,
|
||||
get_tenant_client,
|
||||
get_v2_tenant_client,
|
||||
get_tenant_iam_client,
|
||||
get_tenant_name,
|
||||
get_tenant_user_id,
|
||||
get_buckets_list,
|
||||
get_objects_list,
|
||||
|
@ -5266,11 +5268,15 @@ def test_buckets_list_ctime():
|
|||
before = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=1)
|
||||
|
||||
client = get_client()
|
||||
buckets = []
|
||||
for i in range(5):
|
||||
client.create_bucket(Bucket=get_new_bucket_name())
|
||||
name = get_new_bucket_name()
|
||||
client.create_bucket(Bucket=name)
|
||||
buckets.append(name)
|
||||
|
||||
response = client.list_buckets()
|
||||
for bucket in response['Buckets']:
|
||||
if bucket['Name'] in buckets:
|
||||
ctime = bucket['CreationDate']
|
||||
assert before <= ctime, '%r > %r' % (before, ctime)
|
||||
|
||||
|
@ -6454,6 +6460,61 @@ def test_multipart_get_part():
|
|||
assert status == 400
|
||||
assert error_code == 'InvalidPart'
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_multipart_sse_c_get_part():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
key = "mymultipart"
|
||||
|
||||
part_size = 5*1024*1024
|
||||
part_sizes = 3 * [part_size] + [1*1024*1024]
|
||||
part_count = len(part_sizes)
|
||||
total_size = sum(part_sizes)
|
||||
|
||||
enc_headers = {
|
||||
'x-amz-server-side-encryption-customer-algorithm': 'AES256',
|
||||
'x-amz-server-side-encryption-customer-key': 'pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
||||
'x-amz-server-side-encryption-customer-key-md5': 'DWygnHRtgiJ77HCm+1rvHw==',
|
||||
}
|
||||
get_args = {
|
||||
'SSECustomerAlgorithm': 'AES256',
|
||||
'SSECustomerKey': 'pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
||||
'SSECustomerKeyMD5': 'DWygnHRtgiJ77HCm+1rvHw==',
|
||||
}
|
||||
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, total_size,
|
||||
part_size, init_headers=enc_headers, part_headers=enc_headers, metadata=None, resend_parts=[2])
|
||||
|
||||
# request part before complete
|
||||
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key, PartNumber=1)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 404
|
||||
assert error_code == 'NoSuchKey'
|
||||
|
||||
client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
assert len(parts) == part_count
|
||||
|
||||
for part, size in zip(parts, part_sizes):
|
||||
response = client.head_object(Bucket=bucket_name, Key=key, PartNumber=part['PartNumber'], **get_args)
|
||||
assert response['PartsCount'] == part_count
|
||||
assert response['ETag'] == '"{}"'.format(part['ETag'])
|
||||
|
||||
response = client.get_object(Bucket=bucket_name, Key=key, PartNumber=part['PartNumber'], **get_args)
|
||||
assert response['PartsCount'] == part_count
|
||||
assert response['ETag'] == '"{}"'.format(part['ETag'])
|
||||
assert response['ContentLength'] == size
|
||||
# compare contents
|
||||
for chunk in response['Body'].iter_chunks():
|
||||
assert chunk.decode() == data[0:len(chunk)]
|
||||
data = data[len(chunk):]
|
||||
|
||||
# request PartNumber out of range
|
||||
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key, PartNumber=5)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 400
|
||||
assert error_code == 'InvalidPart'
|
||||
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_multipart_single_get_part():
|
||||
bucket_name = get_new_bucket()
|
||||
|
@ -6516,6 +6577,33 @@ def test_non_multipart_get_part():
|
|||
assert response['ETag'] == etag
|
||||
assert _get_body(response) == 'body'
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_non_multipart_sse_c_get_part():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
key = "singlepart"
|
||||
|
||||
sse_args = {
|
||||
'SSECustomerAlgorithm': 'AES256',
|
||||
'SSECustomerKey': 'pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
||||
'SSECustomerKeyMD5': 'DWygnHRtgiJ77HCm+1rvHw=='
|
||||
}
|
||||
|
||||
response = client.put_object(Bucket=bucket_name, Key=key, Body='body', **sse_args)
|
||||
etag = response['ETag']
|
||||
|
||||
# request for PartNumber > 1 results in InvalidPart
|
||||
e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key, PartNumber=2, **sse_args)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 400
|
||||
assert error_code == 'InvalidPart'
|
||||
|
||||
# request for PartNumber = 1 gives back the entire object
|
||||
response = client.get_object(Bucket=bucket_name, Key=key, PartNumber=1, **sse_args)
|
||||
assert response['ETag'] == etag
|
||||
assert _get_body(response) == 'body'
|
||||
|
||||
|
||||
def _simple_http_req_100_cont(host, port, is_secure, method, resource):
|
||||
"""
|
||||
|
@ -6783,6 +6871,18 @@ def test_cors_presigned_get_object_tenant():
|
|||
method='get_object',
|
||||
)
|
||||
|
||||
def test_cors_presigned_get_object_v2():
|
||||
_test_cors_options_presigned_method(
|
||||
client=get_v2_client(),
|
||||
method='get_object',
|
||||
)
|
||||
|
||||
def test_cors_presigned_get_object_tenant_v2():
|
||||
_test_cors_options_presigned_method(
|
||||
client=get_v2_tenant_client(),
|
||||
method='get_object',
|
||||
)
|
||||
|
||||
def test_cors_presigned_put_object():
|
||||
_test_cors_options_presigned_method(
|
||||
client=get_client(),
|
||||
|
@ -6796,6 +6896,18 @@ def test_cors_presigned_put_object_with_acl():
|
|||
cannedACL='private',
|
||||
)
|
||||
|
||||
def test_cors_presigned_put_object_v2():
|
||||
_test_cors_options_presigned_method(
|
||||
client=get_v2_client(),
|
||||
method='put_object',
|
||||
)
|
||||
|
||||
def test_cors_presigned_put_object_tenant_v2():
|
||||
_test_cors_options_presigned_method(
|
||||
client=get_v2_tenant_client(),
|
||||
method='put_object',
|
||||
)
|
||||
|
||||
def test_cors_presigned_put_object_tenant():
|
||||
_test_cors_options_presigned_method(
|
||||
client=get_tenant_client(),
|
||||
|
@ -10073,6 +10185,67 @@ def test_encryption_sse_c_post_object_authenticated_request():
|
|||
body = _get_body(response)
|
||||
assert body == 'bar'
|
||||
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_encryption_sse_c_enforced_with_bucket_policy():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
deny_unencrypted_obj = {
|
||||
"Null" : {
|
||||
"s3:x-amz-server-side-encryption-customer-algorithm": "true"
|
||||
}
|
||||
}
|
||||
|
||||
p = Policy()
|
||||
resource = _make_arn_resource("{}/{}".format(bucket_name, "*"))
|
||||
|
||||
s = Statement("s3:PutObject", resource, effect="Deny", condition=deny_unencrypted_obj)
|
||||
policy_document = p.add_statement(s).to_json()
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
check_access_denied(client.put_object, Bucket=bucket_name, Key='foo', Body='bar')
|
||||
|
||||
client.put_object(
|
||||
Bucket=bucket_name, Key='foo', Body='bar',
|
||||
SSECustomerAlgorithm='AES256',
|
||||
SSECustomerKey='pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
||||
SSECustomerKeyMD5='DWygnHRtgiJ77HCm+1rvHw=='
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_encryption_sse_c_deny_algo_with_bucket_policy():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
deny_incorrect_algo = {
|
||||
"StringNotEquals": {
|
||||
"s3:x-amz-server-side-encryption-customer-algorithm": "AES256"
|
||||
}
|
||||
}
|
||||
|
||||
p = Policy()
|
||||
resource = _make_arn_resource("{}/{}".format(bucket_name, "*"))
|
||||
|
||||
s = Statement("s3:PutObject", resource, effect="Deny", condition=deny_incorrect_algo)
|
||||
policy_document = p.add_statement(s).to_json()
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
check_access_denied(client.put_object, Bucket=bucket_name, Key='foo', Body='bar', SSECustomerAlgorithm='AES192')
|
||||
|
||||
client.put_object(
|
||||
Bucket=bucket_name, Key='foo', Body='bar',
|
||||
SSECustomerAlgorithm='AES256',
|
||||
SSECustomerKey='pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
||||
SSECustomerKeyMD5='DWygnHRtgiJ77HCm+1rvHw=='
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'):
|
||||
|
@ -10098,10 +10271,6 @@ def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'):
|
|||
assert body == data
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_sse_kms_method_head():
|
||||
|
@ -10512,17 +10681,29 @@ def test_bucketv2_policy_acl():
|
|||
client.delete_bucket_policy(Bucket=bucket_name)
|
||||
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
|
||||
|
||||
def tenanted_bucket_name(tenant):
|
||||
def change_bucket_name(params, **kwargs):
|
||||
old_name = params['context']['signing']['bucket']
|
||||
new_name = "{}:{}".format(tenant, old_name)
|
||||
params['Bucket'] = new_name
|
||||
params['context']['signing']['bucket'] = new_name
|
||||
|
||||
# the : needs to be url-encoded for urls
|
||||
new_name_url = "{}%3A{}".format(tenant, old_name)
|
||||
params['url'] = params['url'].replace(old_name, new_name_url)
|
||||
params['url_path'] = params['url_path'].replace(old_name, new_name_url)
|
||||
|
||||
return change_bucket_name
|
||||
|
||||
@pytest.mark.bucket_policy
|
||||
# TODO: remove this fails_on_rgw when I fix it
|
||||
@pytest.mark.fails_on_rgw
|
||||
def test_bucket_policy_different_tenant():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
key = 'asdf'
|
||||
client.put_object(Bucket=bucket_name, Key=key, Body='asdf')
|
||||
|
||||
resource1 = "arn:aws:s3::*:" + bucket_name
|
||||
resource2 = "arn:aws:s3::*:" + bucket_name + "/*"
|
||||
resource1 = "arn:aws:s3:::" + bucket_name
|
||||
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
|
||||
policy_document = json.dumps(
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
|
@ -10539,35 +10720,22 @@ def test_bucket_policy_different_tenant():
|
|||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
# TODO: figure out how to change the bucketname
|
||||
def change_bucket_name(**kwargs):
|
||||
kwargs['params']['url'] = "http://localhost:8000/:{bucket_name}?encoding-type=url".format(bucket_name=bucket_name)
|
||||
kwargs['params']['url_path'] = "/:{bucket_name}".format(bucket_name=bucket_name)
|
||||
kwargs['params']['context']['signing']['bucket'] = ":{bucket_name}".format(bucket_name=bucket_name)
|
||||
print(kwargs['request_signer'])
|
||||
print(kwargs)
|
||||
|
||||
#bucket_name = ":" + bucket_name
|
||||
# use the tenanted client to list the global tenant's bucket
|
||||
tenant_client = get_tenant_client()
|
||||
tenant_client.meta.events.register('before-call.s3.ListObjects', change_bucket_name)
|
||||
tenant_client.meta.events.register('before-call.s3.ListObjects', tenanted_bucket_name(''))
|
||||
response = tenant_client.list_objects(Bucket=bucket_name)
|
||||
#alt_client = get_alt_client()
|
||||
#response = alt_client.list_objects(Bucket=bucket_name)
|
||||
|
||||
assert len(response['Contents']) == 1
|
||||
|
||||
@pytest.mark.bucket_policy
|
||||
# TODO: remove this fails_on_rgw when I fix it
|
||||
@pytest.mark.fails_on_rgw
|
||||
@pytest.mark.list_objects_v2
|
||||
def test_bucketv2_policy_different_tenant():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
def test_bucket_policy_tenanted_bucket():
|
||||
tenant_client = get_tenant_client()
|
||||
bucket_name = get_new_bucket(tenant_client)
|
||||
key = 'asdf'
|
||||
client.put_object(Bucket=bucket_name, Key=key, Body='asdf')
|
||||
tenant_client.put_object(Bucket=bucket_name, Key=key, Body='asdf')
|
||||
|
||||
resource1 = "arn:aws:s3::*:" + bucket_name
|
||||
resource2 = "arn:aws:s3::*:" + bucket_name + "/*"
|
||||
resource1 = "arn:aws:s3:::" + bucket_name
|
||||
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
|
||||
policy_document = json.dumps(
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
|
@ -10582,23 +10750,15 @@ def test_bucketv2_policy_different_tenant():
|
|||
}]
|
||||
})
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
tenant_client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
# TODO: figure out how to change the bucketname
|
||||
def change_bucket_name(**kwargs):
|
||||
kwargs['params']['url'] = "http://localhost:8000/:{bucket_name}?encoding-type=url".format(bucket_name=bucket_name)
|
||||
kwargs['params']['url_path'] = "/:{bucket_name}".format(bucket_name=bucket_name)
|
||||
kwargs['params']['context']['signing']['bucket'] = ":{bucket_name}".format(bucket_name=bucket_name)
|
||||
print(kwargs['request_signer'])
|
||||
print(kwargs)
|
||||
tenant = get_tenant_name()
|
||||
|
||||
#bucket_name = ":" + bucket_name
|
||||
tenant_client = get_tenant_client()
|
||||
tenant_client.meta.events.register('before-call.s3.ListObjects', change_bucket_name)
|
||||
response = tenant_client.list_objects_v2(Bucket=bucket_name)
|
||||
#alt_client = get_alt_client()
|
||||
#response = alt_client.list_objects_v2(Bucket=bucket_name)
|
||||
# use the global tenant's client to list the tenanted bucket
|
||||
client = get_client()
|
||||
client.meta.events.register('before-call.s3.ListObjects', tenanted_bucket_name(tenant))
|
||||
|
||||
response = client.list_objects(Bucket=bucket_name)
|
||||
assert len(response['Contents']) == 1
|
||||
|
||||
@pytest.mark.bucket_policy
|
||||
|
@ -11357,6 +11517,63 @@ def test_bucket_policy_put_obj_tagging_existing_tag():
|
|||
assert status == 403
|
||||
|
||||
|
||||
@pytest.mark.bucket_policy
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_bucket_policy_upload_part_copy():
|
||||
bucket_name = _create_objects(keys=['public/foo', 'public/bar', 'private/foo'])
|
||||
client = get_client()
|
||||
|
||||
src_resource = _make_arn_resource("{}/{}".format(bucket_name, "public/*"))
|
||||
policy_document = make_json_policy("s3:GetObject", src_resource)
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
alt_client = get_alt_client()
|
||||
bucket_name2 = get_new_bucket(alt_client)
|
||||
|
||||
copy_source = {'Bucket': bucket_name, 'Key': 'public/foo'}
|
||||
|
||||
# Create a multipart upload
|
||||
response = alt_client.create_multipart_upload(Bucket=bucket_name2, Key='new_foo')
|
||||
upload_id = response['UploadId']
|
||||
# Upload a part
|
||||
response = alt_client.upload_part_copy(Bucket=bucket_name2, Key='new_foo', PartNumber=1, UploadId=upload_id, CopySource=copy_source)
|
||||
# Complete the multipart upload
|
||||
response = alt_client.complete_multipart_upload(
|
||||
Bucket=bucket_name2, Key='new_foo', UploadId=upload_id,
|
||||
MultipartUpload={'Parts': [{'PartNumber': 1, 'ETag': response['CopyPartResult']['ETag']}]},
|
||||
)
|
||||
|
||||
response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo')
|
||||
body = _get_body(response)
|
||||
assert body == 'public/foo'
|
||||
|
||||
copy_source = {'Bucket': bucket_name, 'Key': 'public/bar'}
|
||||
# Create a multipart upload
|
||||
response = alt_client.create_multipart_upload(Bucket=bucket_name2, Key='new_foo2')
|
||||
upload_id = response['UploadId']
|
||||
# Upload a part
|
||||
response = alt_client.upload_part_copy(Bucket=bucket_name2, Key='new_foo2', PartNumber=1, UploadId=upload_id, CopySource=copy_source)
|
||||
# Complete the multipart upload
|
||||
response = alt_client.complete_multipart_upload(
|
||||
Bucket=bucket_name2, Key='new_foo2', UploadId=upload_id,
|
||||
MultipartUpload={'Parts': [{'PartNumber': 1, 'ETag': response['CopyPartResult']['ETag']}]},
|
||||
)
|
||||
|
||||
response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo2')
|
||||
body = _get_body(response)
|
||||
assert body == 'public/bar'
|
||||
|
||||
copy_source = {'Bucket': bucket_name, 'Key': 'private/foo'}
|
||||
# Create a multipart upload
|
||||
response = alt_client.create_multipart_upload(Bucket=bucket_name2, Key='new_foo2')
|
||||
upload_id = response['UploadId']
|
||||
# Upload a part
|
||||
check_access_denied(alt_client.upload_part_copy, Bucket=bucket_name2, Key='new_foo2', PartNumber=1, UploadId=upload_id, CopySource=copy_source)
|
||||
# Abort the multipart upload
|
||||
alt_client.abort_multipart_upload(Bucket=bucket_name2, Key='new_foo2', UploadId=upload_id)
|
||||
|
||||
|
||||
@pytest.mark.tagging
|
||||
@pytest.mark.bucket_policy
|
||||
@pytest.mark.fails_on_dbstore
|
||||
|
@ -11663,12 +11880,6 @@ def test_bucket_policy_put_obj_s3_noenc():
|
|||
bucket_name = get_new_bucket()
|
||||
client = get_v2_client()
|
||||
|
||||
deny_incorrect_algo = {
|
||||
"StringNotEquals": {
|
||||
"s3:x-amz-server-side-encryption": "AES256"
|
||||
}
|
||||
}
|
||||
|
||||
deny_unencrypted_obj = {
|
||||
"Null" : {
|
||||
"s3:x-amz-server-side-encryption": "true"
|
||||
|
@ -11678,36 +11889,46 @@ def test_bucket_policy_put_obj_s3_noenc():
|
|||
p = Policy()
|
||||
resource = _make_arn_resource("{}/{}".format(bucket_name, "*"))
|
||||
|
||||
s1 = Statement("s3:PutObject", resource, effect="Deny", condition=deny_incorrect_algo)
|
||||
s2 = Statement("s3:PutObject", resource, effect="Deny", condition=deny_unencrypted_obj)
|
||||
policy_document = p.add_statement(s1).add_statement(s2).to_json()
|
||||
|
||||
# boto3.set_stream_logger(name='botocore')
|
||||
s = Statement("s3:PutObject", resource, effect="Deny", condition=deny_unencrypted_obj)
|
||||
policy_document = p.add_statement(s).to_json()
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
key1_str ='testobj'
|
||||
|
||||
#response = client.get_bucket_policy(Bucket=bucket_name)
|
||||
#print response
|
||||
check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str)
|
||||
|
||||
response = client.put_object(Bucket=bucket_name, Key=key1_str, ServerSideEncryption='AES256')
|
||||
assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
|
||||
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.bucket_policy
|
||||
@pytest.mark.sse_s3
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_bucket_policy_put_obj_s3_incorrect_algo_sse_s3():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_v2_client()
|
||||
|
||||
deny_incorrect_algo = {
|
||||
"StringNotEquals": {
|
||||
"s3:x-amz-server-side-encryption": "AES256"
|
||||
}
|
||||
}
|
||||
|
||||
p = Policy()
|
||||
resource = _make_arn_resource("{}/{}".format(bucket_name, "*"))
|
||||
|
||||
s = Statement("s3:PutObject", resource, effect="Deny", condition=deny_incorrect_algo)
|
||||
policy_document = p.add_statement(s).to_json()
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
key1_str ='testobj'
|
||||
|
||||
check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str, ServerSideEncryption='AES192')
|
||||
|
||||
|
||||
# doing this here breaks the next request w/ 400 (non-sse bug). Do it last.
|
||||
#check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str)
|
||||
|
||||
#TODO: why is this a 400 and not passing, it appears boto3 is not parsing the 200 response the rgw sends back properly
|
||||
# DEBUGGING: run the boto2 and compare the requests
|
||||
# DEBUGGING: try to run this with v2 auth (figure out why get_v2_client isn't working) to make the requests similar to what boto2 is doing
|
||||
# DEBUGGING: try to add other options to put_object to see if that makes the response better
|
||||
|
||||
# first validate that writing a sse-s3 object works
|
||||
response = client.put_object(Bucket=bucket_name, Key=key1_str, ServerSideEncryption='AES256')
|
||||
response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption']
|
||||
assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256'
|
||||
|
||||
# then validate that a non-encrypted object fails.
|
||||
# (this also breaks the connection--non-sse bug, probably because the server
|
||||
# errors out before it consumes the data...)
|
||||
check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str)
|
||||
|
||||
@pytest.mark.encryption
|
||||
@pytest.mark.bucket_policy
|
||||
|
@ -12789,13 +13010,35 @@ def test_get_nonpublicpolicy_acl_bucket_policy_status():
|
|||
assert resp['PolicyStatus']['IsPublic'] == False
|
||||
|
||||
|
||||
def test_get_nonpublicpolicy_deny_bucket_policy_status():
|
||||
def test_get_nonpublicpolicy_principal_bucket_policy_status():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
resource1 = "arn:aws:s3:::" + bucket_name
|
||||
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
|
||||
policy_document = json.dumps(
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"AWS": "arn:aws:iam::s3tenant1:root"},
|
||||
"Action": "s3:ListBucket",
|
||||
"Resource": [
|
||||
"{}".format(resource1),
|
||||
"{}".format(resource2)
|
||||
],
|
||||
}]
|
||||
})
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
resp = client.get_bucket_policy_status(Bucket=bucket_name)
|
||||
assert resp['PolicyStatus']['IsPublic'] == False
|
||||
|
||||
|
||||
def test_bucket_policy_allow_notprincipal():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
resource1 = "arn:aws:s3:::" + bucket_name
|
||||
resource2 = "arn:aws:s3:::" + bucket_name + "/*"
|
||||
policy_document = json.dumps(
|
||||
|
@ -12812,9 +13055,12 @@ def test_get_nonpublicpolicy_deny_bucket_policy_status():
|
|||
}]
|
||||
})
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
resp = client.get_bucket_policy_status(Bucket=bucket_name)
|
||||
assert resp['PolicyStatus']['IsPublic'] == True
|
||||
e = assert_raises(ClientError,
|
||||
client.put_bucket_policy, Bucket=bucket_name, Policy=policy_document)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 400
|
||||
assert error_code == 'InvalidArgument' or error_code == 'MalformedPolicy'
|
||||
|
||||
|
||||
def test_get_undefined_public_block():
|
||||
bucket_name = get_new_bucket()
|
||||
|
@ -12955,6 +13201,23 @@ def test_block_public_policy():
|
|||
check_access_denied(client.put_bucket_policy, Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
|
||||
def test_block_public_policy_with_principal():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
access_conf = {'BlockPublicAcls': False,
|
||||
'IgnorePublicAcls': False,
|
||||
'BlockPublicPolicy': True,
|
||||
'RestrictPublicBuckets': False}
|
||||
|
||||
client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf)
|
||||
resource = _make_arn_resource("{}/{}".format(bucket_name, "*"))
|
||||
policy_document = make_json_policy("s3:GetObject",
|
||||
resource, principal={"AWS": "arn:aws:iam::s3tenant1:root"})
|
||||
|
||||
client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document)
|
||||
|
||||
|
||||
def test_ignore_public_acls():
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
@ -13472,3 +13735,183 @@ def test_get_object_torrent():
|
|||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 404
|
||||
assert error_code == 'NoSuchKey'
|
||||
|
||||
@pytest.mark.checksum
|
||||
def test_object_checksum_sha256():
|
||||
bucket = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
key = "myobj"
|
||||
size = 1024
|
||||
body = FakeWriteFile(size, 'A')
|
||||
sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0='
|
||||
response = client.put_object(Bucket=bucket, Key=key, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=sha256sum)
|
||||
assert sha256sum == response['ChecksumSHA256']
|
||||
|
||||
response = client.head_object(Bucket=bucket, Key=key)
|
||||
assert 'ChecksumSHA256' not in response
|
||||
response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED')
|
||||
assert sha256sum == response['ChecksumSHA256']
|
||||
|
||||
e = assert_raises(ClientError, client.put_object, Bucket=bucket, Key=key, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256='bad')
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 400
|
||||
assert error_code == 'InvalidRequest'
|
||||
|
||||
@pytest.mark.checksum
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_multipart_checksum_sha256():
|
||||
bucket = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
key = "mymultipart"
|
||||
response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256')
|
||||
assert 'SHA256' == response['ChecksumAlgorithm']
|
||||
upload_id = response['UploadId']
|
||||
|
||||
size = 1024
|
||||
body = FakeWriteFile(size, 'A')
|
||||
part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0='
|
||||
response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum)
|
||||
|
||||
# should reject the bad request checksum
|
||||
e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256='bad', MultipartUpload={'Parts': [
|
||||
{'ETag': response['ETag'].strip('"'), 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 1}]})
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 400
|
||||
assert error_code == 'InvalidRequest'
|
||||
|
||||
# XXXX re-trying the complete is failing in RGW due to an internal error that appears not caused
|
||||
# checksums;
|
||||
# 2024-04-25T17:47:47.991-0400 7f78e3a006c0 0 req 4931907640780566174 0.011000143s s3:complete_multipart check_previously_completed() ERROR: get_obj_attrs() returned ret=-2
|
||||
# 2024-04-25T17:47:47.991-0400 7f78e3a006c0 2 req 4931907640780566174 0.011000143s s3:complete_multipart completing
|
||||
# 2024-04-25T17:47:47.991-0400 7f78e3a006c0 1 req 4931907640780566174 0.011000143s s3:complete_multipart ERROR: either op_ret is negative (execute failed) or target_obj is null, op_ret: -2200
|
||||
# -2200 turns into 500, InternalError
|
||||
|
||||
key = "mymultipart2"
|
||||
response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256')
|
||||
assert 'SHA256' == response['ChecksumAlgorithm']
|
||||
upload_id = response['UploadId']
|
||||
|
||||
body = FakeWriteFile(size, 'A')
|
||||
part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0='
|
||||
response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum)
|
||||
|
||||
# should reject the missing part checksum
|
||||
e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256='bad', MultipartUpload={'Parts': [
|
||||
{'ETag': response['ETag'].strip('"'), 'PartNumber': 1}]})
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
assert status == 400
|
||||
assert error_code == 'InvalidRequest'
|
||||
|
||||
key = "mymultipart3"
|
||||
response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256')
|
||||
assert 'SHA256' == response['ChecksumAlgorithm']
|
||||
upload_id = response['UploadId']
|
||||
|
||||
body = FakeWriteFile(size, 'A')
|
||||
part_sha256sum = 'arcu6553sHVAiX4MjW0j7I7vD4w6R+Gz9Ok0Q9lTa+0='
|
||||
response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part_sha256sum)
|
||||
|
||||
composite_sha256sum = 'Ok6Cs5b96ux6+MWQkJO7UBT5sKPBeXBLwvj/hK89smg=-1'
|
||||
response = client.complete_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256=composite_sha256sum, MultipartUpload={'Parts': [
|
||||
{'ETag': response['ETag'].strip('"'), 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 1}]})
|
||||
assert composite_sha256sum == response['ChecksumSHA256']
|
||||
|
||||
response = client.head_object(Bucket=bucket, Key=key)
|
||||
assert 'ChecksumSHA256' not in response
|
||||
response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED')
|
||||
assert composite_sha256sum == response['ChecksumSHA256']
|
||||
|
||||
@pytest.mark.checksum
|
||||
@pytest.mark.fails_on_dbstore
|
||||
def test_multipart_checksum_3parts():
|
||||
bucket = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
key = "mymultipart3"
|
||||
response = client.create_multipart_upload(Bucket=bucket, Key=key, ChecksumAlgorithm='SHA256')
|
||||
assert 'SHA256' == response['ChecksumAlgorithm']
|
||||
upload_id = response['UploadId']
|
||||
|
||||
size = 5 * 1024 * 1024 # each part but the last must be at least 5M
|
||||
body = FakeWriteFile(size, 'A')
|
||||
part1_sha256sum = '275VF5loJr1YYawit0XSHREhkFXYkkPKGuoK0x9VKxI='
|
||||
response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=1, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part1_sha256sum)
|
||||
etag1 = response['ETag'].strip('"')
|
||||
|
||||
body = FakeWriteFile(size, 'B')
|
||||
part2_sha256sum = 'mrHwOfjTL5Zwfj74F05HOQGLdUb7E5szdCbxgUSq6NM='
|
||||
response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=2, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part2_sha256sum)
|
||||
etag2 = response['ETag'].strip('"')
|
||||
|
||||
body = FakeWriteFile(size, 'C')
|
||||
part3_sha256sum = 'Vw7oB/nKQ5xWb3hNgbyfkvDiivl+U+/Dft48nfJfDow='
|
||||
response = client.upload_part(UploadId=upload_id, Bucket=bucket, Key=key, PartNumber=3, Body=body, ChecksumAlgorithm='SHA256', ChecksumSHA256=part3_sha256sum)
|
||||
etag3 = response['ETag'].strip('"')
|
||||
|
||||
composite_sha256sum = 'uWBwpe1dxI4Vw8Gf0X9ynOdw/SS6VBzfWm9giiv1sf4=-3'
|
||||
response = client.complete_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id, ChecksumSHA256=composite_sha256sum, MultipartUpload={'Parts': [
|
||||
{'ETag': etag1, 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 1},
|
||||
{'ETag': etag2, 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 2},
|
||||
{'ETag': etag3, 'ChecksumSHA256': response['ChecksumSHA256'], 'PartNumber': 3}]})
|
||||
assert composite_sha256sum == response['ChecksumSHA256']
|
||||
|
||||
response = client.head_object(Bucket=bucket, Key=key)
|
||||
assert 'ChecksumSHA256' not in response
|
||||
response = client.head_object(Bucket=bucket, Key=key, ChecksumMode='ENABLED')
|
||||
assert composite_sha256sum == response['ChecksumSHA256']
|
||||
|
||||
@pytest.mark.checksum
|
||||
def test_post_object_upload_checksum():
|
||||
megabytes = 1024 * 1024
|
||||
min_size = 0
|
||||
max_size = 5 * megabytes
|
||||
test_payload_size = 2 * megabytes
|
||||
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
url = _get_post_url(bucket_name)
|
||||
utc = pytz.utc
|
||||
expires = datetime.datetime.now(utc) + datetime.timedelta(seconds=+6000)
|
||||
|
||||
policy_document = {"expiration": expires.strftime("%Y-%m-%dT%H:%M:%SZ"),\
|
||||
"conditions": [\
|
||||
{"bucket": bucket_name},\
|
||||
["starts-with", "$key", "foo_cksum_test"],\
|
||||
{"acl": "private"},\
|
||||
["starts-with", "$Content-Type", "text/plain"],\
|
||||
["content-length-range", min_size, max_size],\
|
||||
]\
|
||||
}
|
||||
|
||||
test_payload = b'x' * test_payload_size
|
||||
|
||||
json_policy_document = json.JSONEncoder().encode(policy_document)
|
||||
bytes_json_policy_document = bytes(json_policy_document, 'utf-8')
|
||||
policy = base64.b64encode(bytes_json_policy_document)
|
||||
aws_secret_access_key = get_main_aws_secret_key()
|
||||
aws_access_key_id = get_main_aws_access_key()
|
||||
|
||||
signature = base64.b64encode(hmac.new(bytes(aws_secret_access_key, 'utf-8'), policy, hashlib.sha1).digest())
|
||||
|
||||
# good checksum payload (checked via upload from awscli)
|
||||
payload = OrderedDict([ ("key" , "foo_cksum_test.txt"),("AWSAccessKeyId" , aws_access_key_id),\
|
||||
("acl" , "private"),("signature" , signature),("policy" , policy),\
|
||||
("Content-Type" , "text/plain"),\
|
||||
('x-amz-checksum-sha256', 'aTL9MeXa9HObn6eP93eygxsJlcwdCwCTysgGAZAgE7w='),\
|
||||
('file', (test_payload)),])
|
||||
|
||||
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
|
||||
assert r.status_code == 204
|
||||
|
||||
# bad checksum payload
|
||||
payload = OrderedDict([ ("key" , "foo_cksum_test.txt"),("AWSAccessKeyId" , aws_access_key_id),\
|
||||
("acl" , "private"),("signature" , signature),("policy" , policy),\
|
||||
("Content-Type" , "text/plain"),\
|
||||
('x-amz-checksum-sha256', 'sailorjerry'),\
|
||||
('file', (test_payload)),])
|
||||
|
||||
r = requests.post(url, files=payload, verify=get_config_ssl_verify())
|
||||
assert r.status_code == 400
|
||||
|
|
|
@ -4,6 +4,7 @@ import string
|
|||
import re
|
||||
import json
|
||||
from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import EventStreamError
|
||||
|
||||
import uuid
|
||||
import warnings
|
||||
|
@ -294,6 +295,7 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
|
|||
s3 = get_client()
|
||||
result = ""
|
||||
result_status = {}
|
||||
|
||||
try:
|
||||
r = s3.select_object_content(
|
||||
Bucket=bucket,
|
||||
|
@ -309,10 +311,17 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
|
|||
return result
|
||||
|
||||
if progress == False:
|
||||
|
||||
try:
|
||||
for event in r['Payload']:
|
||||
if 'Records' in event:
|
||||
records = event['Records']['Payload'].decode('utf-8')
|
||||
result += records
|
||||
|
||||
except EventStreamError as c:
|
||||
result = str(c)
|
||||
return result
|
||||
|
||||
else:
|
||||
result = []
|
||||
max_progress_scanned = 0
|
||||
|
@ -330,6 +339,7 @@ def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',
|
|||
if 'End' in event:
|
||||
result_status['End'] = event['End']
|
||||
|
||||
|
||||
if progress == False:
|
||||
return result
|
||||
else:
|
||||
|
@ -884,7 +894,7 @@ def test_like_expressions():
|
|||
|
||||
res_s3select_like = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "%aeio%" like;')).replace("\n","")
|
||||
|
||||
find_like = res_s3select_like.find("s3select-Syntax-Error")
|
||||
find_like = res_s3select_like.find("UnsupportedSyntax")
|
||||
|
||||
assert int(find_like) >= 0
|
||||
|
||||
|
@ -1349,7 +1359,6 @@ def test_schema_definition():
|
|||
|
||||
# using the scheme on first line, query is using the attach schema
|
||||
res_use = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select c1,c3 from s3object;",csv_header_info="USE") ).replace("\n","")
|
||||
|
||||
# result of both queries should be the same
|
||||
s3select_assert_result( res_ignore, res_use)
|
||||
|
||||
|
@ -1358,8 +1367,8 @@ def test_schema_definition():
|
|||
|
||||
assert ((res_multiple_defintion.find("alias {c11} or column not exist in schema")) >= 0)
|
||||
|
||||
#find_processing_error = res_multiple_defintion.find("s3select-ProcessingTime-Error")
|
||||
assert ((res_multiple_defintion.find("s3select-ProcessingTime-Error")) >= 0)
|
||||
#find_processing_error = res_multiple_defintion.find("ProcessingTimeError")
|
||||
assert ((res_multiple_defintion.find("ProcessingTimeError")) >= 0)
|
||||
|
||||
# alias-name is identical to column-name
|
||||
res_multiple_defintion = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select int(c1)+int(c2) as c4,c4 from s3object;",csv_header_info="USE") ).replace("\n","")
|
||||
|
|
Loading…
Reference in a new issue