forked from TrueCloudLab/s3-tests
Compare commits
114 commits
master
...
ceph-quinc
Author | SHA1 | Date | |
---|---|---|---|
|
0aa44280c1 | ||
|
5ac04c4f26 | ||
|
02542a0353 | ||
|
92bbc4ad05 | ||
|
722c2a90bf | ||
|
42edac2aa7 | ||
|
1642d548cd | ||
|
57cf91a748 | ||
|
d8cf627b69 | ||
|
f93722a1e9 | ||
|
852af40fd0 | ||
|
9f1bda4568 | ||
|
cfabb8ede7 | ||
|
b1cfda067e | ||
|
36fae0e4e0 | ||
|
f81eb9daaf | ||
|
001b8c14a1 | ||
|
1af1880b7a | ||
|
0b8e554dd1 | ||
|
70b928269f | ||
|
cfa805efe9 | ||
|
090fbfbead | ||
|
4143ec30f3 | ||
|
41006d68c2 | ||
|
ae981dd3a8 | ||
|
5e0cea1c07 | ||
|
d44879544d | ||
|
7f21baaea6 | ||
|
3c1de4acc3 | ||
|
de683cd665 | ||
|
6f9becf5b9 | ||
|
f4c19c9389 | ||
|
89363e8fef | ||
|
d1a38df407 | ||
|
e95809024c | ||
|
93a6d89681 | ||
|
907331d90c | ||
|
6e426d3291 | ||
|
80763520dc | ||
|
27364bf5f6 | ||
|
0fa29f6c8e | ||
|
47fff36c36 | ||
|
95154bf0ce | ||
|
e00e3a77b0 | ||
|
864902a3ad | ||
|
ec530d049c | ||
|
f004493dcc | ||
|
9eacf29594 | ||
|
8090ea4629 | ||
|
df426ea041 | ||
|
057432b9f5 | ||
|
e22a689a44 | ||
|
f344fd6ca7 | ||
|
97fb5a7ee3 | ||
|
0fef1637ae | ||
|
a81ad3515e | ||
|
0d7111ffc2 | ||
|
2ad7f81917 | ||
|
72957ece35 | ||
|
8423389033 | ||
|
aea3f6b4c3 | ||
|
6f74f2af07 | ||
|
6b412b509b | ||
|
87993f147d | ||
|
ecc4cbc5c4 | ||
|
c4635f9463 | ||
|
287acbc6e7 | ||
|
68f1939942 | ||
|
a0aa55d4ae | ||
|
e3d31ef6eb | ||
|
3fe80dc877 | ||
|
a5108a7d69 | ||
|
3be10d722f | ||
|
adad16121f | ||
|
dd163877d4 | ||
|
86bc2a191f | ||
|
3698d093bf | ||
|
65b067486e | ||
|
0e36699571 | ||
|
3dc4ff5da8 | ||
|
c9792cb975 | ||
|
b930f194e4 | ||
|
253b63aa11 | ||
|
6bd75be1d6 | ||
|
61804bcf91 | ||
|
ea9f07a2bf | ||
|
7d14452035 | ||
|
6ea6cb6467 | ||
|
10c801a2e0 | ||
|
9d670846a3 | ||
|
bb801b8625 | ||
|
652619f46f | ||
|
b1ddeee6eb | ||
|
ca9cb5cc2c | ||
|
e010c4cfec | ||
|
edea887e9c | ||
|
cd4f7e1a7a | ||
|
048f9297a1 | ||
|
8bd6158054 | ||
|
aca68a9d39 | ||
|
537431c686 | ||
|
8ca96c4519 | ||
|
34040769ff | ||
|
8ebb504159 | ||
|
9092d1ac61 | ||
|
7b3df700cc | ||
|
4fc133b1b5 | ||
|
0a495efc8c | ||
|
a48cf75391 | ||
|
a20e0d47f2 | ||
|
19947bd541 | ||
|
94168194fd | ||
|
0e3084c995 | ||
|
1d39198872 |
12 changed files with 6858 additions and 489 deletions
23
README.rst
23
README.rst
|
@ -26,15 +26,15 @@ Once you have that file copied and edited, you can run the tests with::
|
||||||
|
|
||||||
You can specify which directory of tests to run::
|
You can specify which directory of tests to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests.functional
|
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional
|
||||||
|
|
||||||
You can specify which file of tests to run::
|
You can specify which file of tests to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests.functional.test_s3
|
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_s3
|
||||||
|
|
||||||
You can specify which test to run::
|
You can specify which test to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests.functional.test_s3:test_bucket_list_empty
|
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_s3:test_bucket_list_empty
|
||||||
|
|
||||||
To gather a list of tests being run, use the flags::
|
To gather a list of tests being run, use the flags::
|
||||||
|
|
||||||
|
@ -54,3 +54,20 @@ You can run only the boto3 tests with::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'not fails_on_rgw' s3tests_boto3.functional
|
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'not fails_on_rgw' s3tests_boto3.functional
|
||||||
|
|
||||||
|
========================
|
||||||
|
STS compatibility tests
|
||||||
|
========================
|
||||||
|
|
||||||
|
This section contains some basic tests for the AssumeRole, GetSessionToken and AssumeRoleWithWebIdentity API's. The test file is located under ``s3tests_boto3/functional``.
|
||||||
|
|
||||||
|
You can run only the sts tests (all the three API's) with::
|
||||||
|
|
||||||
|
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_sts
|
||||||
|
|
||||||
|
You can filter tests based on the attributes. There is a attribute named ``test_of_sts`` to run AssumeRole and GetSessionToken tests and ``webidentity_test`` to run the AssumeRoleWithWebIdentity tests. If you want to execute only ``test_of_sts`` tests you can apply that filter as below::
|
||||||
|
|
||||||
|
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'test_of_sts' s3tests_boto3.functional.test_sts
|
||||||
|
|
||||||
|
For running ``webidentity_test`` you'll need have Keycloak running.
|
||||||
|
|
||||||
|
In order to run any STS test you'll need to add "iam" section to the config file. For further reference on how your config file should look check ``s3tests.conf.SAMPLE``.
|
||||||
|
|
12
bootstrap
12
bootstrap
|
@ -22,12 +22,8 @@ case "$ID" in
|
||||||
;;
|
;;
|
||||||
centos|fedora|rhel|ol|virtuozzo)
|
centos|fedora|rhel|ol|virtuozzo)
|
||||||
|
|
||||||
packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
|
packages=(which python3-virtualenv python3-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
|
||||||
for package in ${packages[@]}; do
|
for package in ${packages[@]}; do
|
||||||
# When the package is python36-devel we change it to python3-devel on Fedora
|
|
||||||
if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then
|
|
||||||
package=python36
|
|
||||||
fi
|
|
||||||
if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then
|
if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then
|
||||||
missing="${missing:+$missing }$package"
|
missing="${missing:+$missing }$package"
|
||||||
fi
|
fi
|
||||||
|
@ -59,13 +55,13 @@ esac
|
||||||
|
|
||||||
|
|
||||||
# s3-tests only works on python 3.6 not newer versions of python3
|
# s3-tests only works on python 3.6 not newer versions of python3
|
||||||
${virtualenv} --python=$(which python3.6) --no-site-packages --distribute virtualenv
|
${virtualenv} --python=$(which python3.6) virtualenv
|
||||||
|
|
||||||
# avoid pip bugs
|
# avoid pip bugs
|
||||||
./virtualenv/bin/pip3 install --upgrade pip
|
./virtualenv/bin/pip3 install --upgrade pip
|
||||||
|
|
||||||
# slightly old version of setuptools; newer fails w/ requests 0.14.0
|
# latest setuptools supporting python 2.7
|
||||||
./virtualenv/bin/pip3 install setuptools==32.3.1
|
./virtualenv/bin/pip install setuptools==44.1.0
|
||||||
|
|
||||||
./virtualenv/bin/pip3 install -r requirements.txt
|
./virtualenv/bin/pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
|
|
@ -2,11 +2,13 @@ PyYAML
|
||||||
nose >=1.0.0
|
nose >=1.0.0
|
||||||
boto >=2.6.0
|
boto >=2.6.0
|
||||||
boto3 >=1.0.0
|
boto3 >=1.0.0
|
||||||
|
# botocore-1.28 broke v2 signatures, see https://tracker.ceph.com/issues/58059
|
||||||
|
botocore <1.28.0
|
||||||
munch >=2.0.0
|
munch >=2.0.0
|
||||||
# 0.14 switches to libev, that means bootstrap needs to change too
|
# 0.14 switches to libev, that means bootstrap needs to change too
|
||||||
gevent >=1.0
|
gevent >=1.0
|
||||||
isodate >=0.4.4
|
isodate >=0.4.4
|
||||||
requests >=0.14.0
|
requests >=2.23.0
|
||||||
pytz >=2011k
|
pytz
|
||||||
httplib2
|
httplib2
|
||||||
lxml
|
lxml
|
||||||
|
|
|
@ -10,6 +10,9 @@ port = 8000
|
||||||
## say "False" to disable TLS
|
## say "False" to disable TLS
|
||||||
is_secure = False
|
is_secure = False
|
||||||
|
|
||||||
|
## say "False" to disable SSL Verify
|
||||||
|
ssl_verify = False
|
||||||
|
|
||||||
[fixtures]
|
[fixtures]
|
||||||
## all the buckets created will start with this prefix;
|
## all the buckets created will start with this prefix;
|
||||||
## {random} will be filled with random characters to pad
|
## {random} will be filled with random characters to pad
|
||||||
|
@ -38,6 +41,12 @@ secret_key = h7GhxuBLTrlhVUyxSPUKUV8r/2EI4ngqJxD7iBdBYLhwluN30JaT3Q==
|
||||||
## replace with key id obtained when secret is created, or delete if KMS not tested
|
## replace with key id obtained when secret is created, or delete if KMS not tested
|
||||||
#kms_keyid = 01234567-89ab-cdef-0123-456789abcdef
|
#kms_keyid = 01234567-89ab-cdef-0123-456789abcdef
|
||||||
|
|
||||||
|
## Storage classes
|
||||||
|
#storage_classes = "LUKEWARM, FROZEN"
|
||||||
|
|
||||||
|
## Lifecycle debug interval (default: 10)
|
||||||
|
#lc_debug_interval = 20
|
||||||
|
|
||||||
[s3 alt]
|
[s3 alt]
|
||||||
# alt display_name set in vstart.sh
|
# alt display_name set in vstart.sh
|
||||||
display_name = john.doe
|
display_name = john.doe
|
||||||
|
@ -53,6 +62,37 @@ access_key = NOPQRSTUVWXYZABCDEFG
|
||||||
# alt AWS secret key set in vstart.sh
|
# alt AWS secret key set in vstart.sh
|
||||||
secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm
|
secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm
|
||||||
|
|
||||||
|
#[s3 cloud]
|
||||||
|
## to run the testcases with "cloud_transition" attribute.
|
||||||
|
## Note: the waiting time may have to tweaked depending on
|
||||||
|
## the I/O latency to the cloud endpoint.
|
||||||
|
|
||||||
|
## host set for cloud endpoint
|
||||||
|
# host = localhost
|
||||||
|
|
||||||
|
## port set for cloud endpoint
|
||||||
|
# port = 8001
|
||||||
|
|
||||||
|
## say "False" to disable TLS
|
||||||
|
# is_secure = False
|
||||||
|
|
||||||
|
## cloud endpoint credentials
|
||||||
|
# access_key = 0555b35654ad1656d804
|
||||||
|
# secret_key = h7GhxuBLTrlhVUyxSPUKUV8r/2EI4ngqJxD7iBdBYLhwluN30JaT3Q==
|
||||||
|
|
||||||
|
## storage class configured as cloud tier on local rgw server
|
||||||
|
# cloud_storage_class = CLOUDTIER
|
||||||
|
|
||||||
|
## Below are optional -
|
||||||
|
|
||||||
|
## Above configured cloud storage class config options
|
||||||
|
# retain_head_object = false
|
||||||
|
# target_storage_class = Target_SC
|
||||||
|
# target_path = cloud-bucket
|
||||||
|
|
||||||
|
## another regular storage class to test multiple transition rules,
|
||||||
|
# storage_class = S1
|
||||||
|
|
||||||
[s3 tenant]
|
[s3 tenant]
|
||||||
# tenant display_name set in vstart.sh
|
# tenant display_name set in vstart.sh
|
||||||
display_name = testx$tenanteduser
|
display_name = testx$tenanteduser
|
||||||
|
@ -68,3 +108,39 @@ secret_key = opqrstuvwxyzabcdefghijklmnopqrstuvwxyzab
|
||||||
|
|
||||||
# tenant email set in vstart.sh
|
# tenant email set in vstart.sh
|
||||||
email = tenanteduser@example.com
|
email = tenanteduser@example.com
|
||||||
|
|
||||||
|
#following section needs to be added for all sts-tests
|
||||||
|
[iam]
|
||||||
|
#used for iam operations in sts-tests
|
||||||
|
#email from vstart.sh
|
||||||
|
email = s3@example.com
|
||||||
|
|
||||||
|
#user_id from vstart.sh
|
||||||
|
user_id = 0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
|
||||||
|
|
||||||
|
#access_key from vstart.sh
|
||||||
|
access_key = ABCDEFGHIJKLMNOPQRST
|
||||||
|
|
||||||
|
#secret_key vstart.sh
|
||||||
|
secret_key = abcdefghijklmnopqrstuvwxyzabcdefghijklmn
|
||||||
|
|
||||||
|
#display_name from vstart.sh
|
||||||
|
display_name = youruseridhere
|
||||||
|
|
||||||
|
#following section needs to be added when you want to run Assume Role With Webidentity test
|
||||||
|
[webidentity]
|
||||||
|
#used for assume role with web identity test in sts-tests
|
||||||
|
#all parameters will be obtained from ceph/qa/tasks/keycloak.py
|
||||||
|
token=<access_token>
|
||||||
|
|
||||||
|
aud=<obtained after introspecting token>
|
||||||
|
|
||||||
|
sub=<obtained after introspecting token>
|
||||||
|
|
||||||
|
azp=<obtained after introspecting token>
|
||||||
|
|
||||||
|
user_token=<access token for a user, with attribute Department=[Engineering, Marketing>]
|
||||||
|
|
||||||
|
thumbprint=<obtained from x509 certificate>
|
||||||
|
|
||||||
|
KC_REALM=<name of the realm>
|
||||||
|
|
|
@ -182,6 +182,7 @@ def tag(*tags):
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no content length')
|
@attr(operation='create w/no content length')
|
||||||
@attr(assertion='fails 411')
|
@attr(assertion='fails 411')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_bad_contentlength_none():
|
def test_object_create_bad_contentlength_none():
|
||||||
key = _setup_bad_object(remove=('Content-Length',))
|
key = _setup_bad_object(remove=('Content-Length',))
|
||||||
|
@ -221,6 +222,7 @@ def test_object_create_bad_contentlength_mismatch_above():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/empty authorization')
|
@attr(operation='create w/empty authorization')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_bad_authorization_empty():
|
def test_object_create_bad_authorization_empty():
|
||||||
key = _setup_bad_object({'Authorization': ''})
|
key = _setup_bad_object({'Authorization': ''})
|
||||||
|
@ -235,6 +237,7 @@ def test_object_create_bad_authorization_empty():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/date and x-amz-date')
|
@attr(operation='create w/date and x-amz-date')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_date_and_amz_date():
|
def test_object_create_date_and_amz_date():
|
||||||
date = formatdate(usegmt=True)
|
date = formatdate(usegmt=True)
|
||||||
|
@ -246,6 +249,7 @@ def test_object_create_date_and_amz_date():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/x-amz-date and no date')
|
@attr(operation='create w/x-amz-date and no date')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_amz_date_and_no_date():
|
def test_object_create_amz_date_and_no_date():
|
||||||
date = formatdate(usegmt=True)
|
date = formatdate(usegmt=True)
|
||||||
|
@ -259,6 +263,7 @@ def test_object_create_amz_date_and_no_date():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no authorization')
|
@attr(operation='create w/no authorization')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_bad_authorization_none():
|
def test_object_create_bad_authorization_none():
|
||||||
key = _setup_bad_object(remove=('Authorization',))
|
key = _setup_bad_object(remove=('Authorization',))
|
||||||
|
@ -274,6 +279,7 @@ def test_object_create_bad_authorization_none():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no content length')
|
@attr(operation='create w/no content length')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_bucket_create_contentlength_none():
|
def test_bucket_create_contentlength_none():
|
||||||
_add_custom_headers(remove=('Content-Length',))
|
_add_custom_headers(remove=('Content-Length',))
|
||||||
|
@ -285,6 +291,7 @@ def test_bucket_create_contentlength_none():
|
||||||
@attr(method='acls')
|
@attr(method='acls')
|
||||||
@attr(operation='set w/no content length')
|
@attr(operation='set w/no content length')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_acl_create_contentlength_none():
|
def test_object_acl_create_contentlength_none():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -328,6 +335,7 @@ def test_bucket_create_bad_contentlength_empty():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no content length')
|
@attr(operation='create w/no content length')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_bucket_create_bad_contentlength_none():
|
def test_bucket_create_bad_contentlength_none():
|
||||||
_add_custom_headers(remove=('Content-Length',))
|
_add_custom_headers(remove=('Content-Length',))
|
||||||
|
@ -339,6 +347,7 @@ def test_bucket_create_bad_contentlength_none():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/empty authorization')
|
@attr(operation='create w/empty authorization')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_bucket_create_bad_authorization_empty():
|
def test_bucket_create_bad_authorization_empty():
|
||||||
_add_custom_headers({'Authorization': ''})
|
_add_custom_headers({'Authorization': ''})
|
||||||
|
@ -354,6 +363,7 @@ def test_bucket_create_bad_authorization_empty():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no authorization')
|
@attr(operation='create w/no authorization')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_bucket_create_bad_authorization_none():
|
def test_bucket_create_bad_authorization_none():
|
||||||
_add_custom_headers(remove=('Authorization',))
|
_add_custom_headers(remove=('Authorization',))
|
||||||
|
@ -371,6 +381,7 @@ def test_bucket_create_bad_authorization_none():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/content length too short')
|
@attr(operation='create w/content length too short')
|
||||||
@attr(assertion='fails 400')
|
@attr(assertion='fails 400')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_bad_contentlength_mismatch_below_aws2():
|
def test_object_create_bad_contentlength_mismatch_below_aws2():
|
||||||
check_aws2_support()
|
check_aws2_support()
|
||||||
|
@ -388,6 +399,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/incorrect authorization')
|
@attr(operation='create w/incorrect authorization')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_bad_authorization_incorrect_aws2():
|
def test_object_create_bad_authorization_incorrect_aws2():
|
||||||
check_aws2_support()
|
check_aws2_support()
|
||||||
|
@ -404,6 +416,7 @@ def test_object_create_bad_authorization_incorrect_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/invalid authorization')
|
@attr(operation='create w/invalid authorization')
|
||||||
@attr(assertion='fails 400')
|
@attr(assertion='fails 400')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_authorization_invalid_aws2():
|
def test_object_create_bad_authorization_invalid_aws2():
|
||||||
check_aws2_support()
|
check_aws2_support()
|
||||||
key = _setup_bad_object({'Authorization': 'AWS HAHAHA'})
|
key = _setup_bad_object({'Authorization': 'AWS HAHAHA'})
|
||||||
|
@ -417,6 +430,7 @@ def test_object_create_bad_authorization_invalid_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no date')
|
@attr(operation='create w/no date')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_object_create_bad_date_none_aws2():
|
def test_object_create_bad_date_none_aws2():
|
||||||
check_aws2_support()
|
check_aws2_support()
|
||||||
|
@ -446,6 +460,7 @@ def test_bucket_create_bad_authorization_invalid_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no date')
|
@attr(operation='create w/no date')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(teardown=_clear_custom_headers)
|
@nose.with_setup(teardown=_clear_custom_headers)
|
||||||
def test_bucket_create_bad_date_none_aws2():
|
def test_bucket_create_bad_date_none_aws2():
|
||||||
check_aws2_support()
|
check_aws2_support()
|
||||||
|
|
|
@ -133,6 +133,7 @@ def check_configure_versioning_retry(bucket, status, expected_string):
|
||||||
@attr(operation='create versioned object, read not exist null version')
|
@attr(operation='create versioned object, read not exist null version')
|
||||||
@attr(assertion='read null version behaves correctly')
|
@attr(assertion='read null version behaves correctly')
|
||||||
@attr('versioning')
|
@attr('versioning')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_versioning_obj_read_not_exist_null():
|
def test_versioning_obj_read_not_exist_null():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
check_versioning(bucket, None)
|
check_versioning(bucket, None)
|
||||||
|
@ -155,6 +156,7 @@ def test_versioning_obj_read_not_exist_null():
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
@attr('fails_with_subdomain')
|
@attr('fails_with_subdomain')
|
||||||
@attr('appendobject')
|
@attr('appendobject')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_append_object():
|
def test_append_object():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
|
@ -179,6 +181,7 @@ def test_append_object():
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
@attr('fails_with_subdomain')
|
@attr('fails_with_subdomain')
|
||||||
@attr('appendobject')
|
@attr('appendobject')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_append_normal_object():
|
def test_append_normal_object():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
|
@ -199,6 +202,7 @@ def test_append_normal_object():
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
@attr('fails_with_subdomain')
|
@attr('fails_with_subdomain')
|
||||||
@attr('appendobject')
|
@attr('appendobject')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_append_object_position_wrong():
|
def test_append_object_position_wrong():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
|
@ -236,242 +240,6 @@ def list_bucket_storage_class(bucket):
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# The test harness for lifecycle is configured to treat days as 10 second intervals.
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='test lifecycle expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_lifecycle_transition():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = set_lifecycle(rules=[{'id': 'rule1', 'transition': lc_transition(days=1, storage_class=sc[1]), 'prefix': 'expire1/', 'status': 'Enabled'},
|
|
||||||
{'id':'rule2', 'transition': lc_transition(days=4, storage_class=sc[2]), 'prefix': 'expire3/', 'status': 'Enabled'}])
|
|
||||||
_create_keys(bucket=bucket, keys=['expire1/foo', 'expire1/bar', 'keep2/foo',
|
|
||||||
'keep2/bar', 'expire3/foo', 'expire3/bar'])
|
|
||||||
# Get list of all keys
|
|
||||||
init_keys = bucket.get_all_keys()
|
|
||||||
eq(len(init_keys), 6)
|
|
||||||
|
|
||||||
# Wait for first expiration (plus fudge to handle the timer window)
|
|
||||||
time.sleep(25)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 4)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 2)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for next expiration cycle
|
|
||||||
time.sleep(10)
|
|
||||||
keep2_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(keep2_keys['STANDARD']), 4)
|
|
||||||
eq(len(keep2_keys[sc[1]]), 2)
|
|
||||||
eq(len(keep2_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for final expiration cycle
|
|
||||||
time.sleep(20)
|
|
||||||
expire3_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire3_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire3_keys[sc[1]]), 2)
|
|
||||||
eq(len(expire3_keys[sc[2]]), 2)
|
|
||||||
|
|
||||||
# The test harness for lifecycle is configured to treat days as 10 second intervals.
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='test lifecycle expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_lifecycle_transition_single_rule_multi_trans():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = set_lifecycle(rules=[
|
|
||||||
{'id': 'rule1',
|
|
||||||
'transition': lc_transitions([
|
|
||||||
lc_transition(days=1, storage_class=sc[1]),
|
|
||||||
lc_transition(days=4, storage_class=sc[2])]),
|
|
||||||
'prefix': 'expire1/',
|
|
||||||
'status': 'Enabled'}])
|
|
||||||
|
|
||||||
_create_keys(bucket=bucket, keys=['expire1/foo', 'expire1/bar', 'keep2/foo',
|
|
||||||
'keep2/bar', 'expire3/foo', 'expire3/bar'])
|
|
||||||
# Get list of all keys
|
|
||||||
init_keys = bucket.get_all_keys()
|
|
||||||
eq(len(init_keys), 6)
|
|
||||||
|
|
||||||
# Wait for first expiration (plus fudge to handle the timer window)
|
|
||||||
time.sleep(25)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 4)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 2)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for next expiration cycle
|
|
||||||
time.sleep(10)
|
|
||||||
keep2_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(keep2_keys['STANDARD']), 4)
|
|
||||||
eq(len(keep2_keys[sc[1]]), 2)
|
|
||||||
eq(len(keep2_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for final expiration cycle
|
|
||||||
time.sleep(20)
|
|
||||||
expire3_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire3_keys['STANDARD']), 4)
|
|
||||||
eq(len(expire3_keys[sc[1]]), 0)
|
|
||||||
eq(len(expire3_keys[sc[2]]), 2)
|
|
||||||
|
|
||||||
def generate_lifecycle_body(rules):
|
|
||||||
body = '<?xml version="1.0" encoding="UTF-8"?><LifecycleConfiguration>'
|
|
||||||
for rule in rules:
|
|
||||||
body += '<Rule><ID>%s</ID><Status>%s</Status>' % (rule['ID'], rule['Status'])
|
|
||||||
if 'Prefix' in list(rule.keys()):
|
|
||||||
body += '<Prefix>%s</Prefix>' % rule['Prefix']
|
|
||||||
if 'Filter' in list(rule.keys()):
|
|
||||||
prefix_str= '' # AWS supports empty filters
|
|
||||||
if 'Prefix' in list(rule['Filter'].keys()):
|
|
||||||
prefix_str = '<Prefix>%s</Prefix>' % rule['Filter']['Prefix']
|
|
||||||
body += '<Filter>%s</Filter>' % prefix_str
|
|
||||||
|
|
||||||
if 'Expiration' in list(rule.keys()):
|
|
||||||
if 'ExpiredObjectDeleteMarker' in list(rule['Expiration'].keys()):
|
|
||||||
body += '<Expiration><ExpiredObjectDeleteMarker>%s</ExpiredObjectDeleteMarker></Expiration>' \
|
|
||||||
% rule['Expiration']['ExpiredObjectDeleteMarker']
|
|
||||||
elif 'Date' in list(rule['Expiration'].keys()):
|
|
||||||
body += '<Expiration><Date>%s</Date></Expiration>' % rule['Expiration']['Date']
|
|
||||||
else:
|
|
||||||
body += '<Expiration><Days>%d</Days></Expiration>' % rule['Expiration']['Days']
|
|
||||||
if 'NoncurrentVersionExpiration' in list(rule.keys()):
|
|
||||||
body += '<NoncurrentVersionExpiration><NoncurrentDays>%d</NoncurrentDays></NoncurrentVersionExpiration>' % \
|
|
||||||
rule['NoncurrentVersionExpiration']['NoncurrentDays']
|
|
||||||
if 'NoncurrentVersionTransition' in list(rule.keys()):
|
|
||||||
for t in rule['NoncurrentVersionTransition']:
|
|
||||||
body += '<NoncurrentVersionTransition>'
|
|
||||||
body += '<NoncurrentDays>%d</NoncurrentDays>' % \
|
|
||||||
t['NoncurrentDays']
|
|
||||||
body += '<StorageClass>%s</StorageClass>' % \
|
|
||||||
t['StorageClass']
|
|
||||||
body += '</NoncurrentVersionTransition>'
|
|
||||||
if 'AbortIncompleteMultipartUpload' in list(rule.keys()):
|
|
||||||
body += '<AbortIncompleteMultipartUpload><DaysAfterInitiation>%d</DaysAfterInitiation>' \
|
|
||||||
'</AbortIncompleteMultipartUpload>' % rule['AbortIncompleteMultipartUpload']['DaysAfterInitiation']
|
|
||||||
body += '</Rule>'
|
|
||||||
body += '</LifecycleConfiguration>'
|
|
||||||
return body
|
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='set lifecycle config with noncurrent version expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
def test_lifecycle_set_noncurrent_transition():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
|
||||||
rules = [
|
|
||||||
{
|
|
||||||
'ID': 'rule1',
|
|
||||||
'Prefix': 'test1/',
|
|
||||||
'Status': 'Enabled',
|
|
||||||
'NoncurrentVersionTransition': [
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 2,
|
|
||||||
'StorageClass': sc[1]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 4,
|
|
||||||
'StorageClass': sc[2]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'NoncurrentVersionExpiration': {
|
|
||||||
'NoncurrentDays': 6
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{'ID': 'rule2', 'Prefix': 'test2/', 'Status': 'Disabled', 'NoncurrentVersionExpiration': {'NoncurrentDays': 3}}
|
|
||||||
]
|
|
||||||
body = generate_lifecycle_body(rules)
|
|
||||||
fp = StringIO(body)
|
|
||||||
md5 = boto.utils.compute_md5(fp)
|
|
||||||
headers = {'Content-MD5': md5[1], 'Content-Type': 'text/xml'}
|
|
||||||
res = bucket.connection.make_request('PUT', bucket.name, data=fp.getvalue(), query_args='lifecycle',
|
|
||||||
headers=headers)
|
|
||||||
eq(res.status, 200)
|
|
||||||
eq(res.reason, 'OK')
|
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='test lifecycle non-current version expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_expiration')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_lifecycle_noncur_transition():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
|
||||||
check_configure_versioning_retry(bucket, True, "Enabled")
|
|
||||||
|
|
||||||
rules = [
|
|
||||||
{
|
|
||||||
'ID': 'rule1',
|
|
||||||
'Prefix': 'test1/',
|
|
||||||
'Status': 'Enabled',
|
|
||||||
'NoncurrentVersionTransition': [
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 1,
|
|
||||||
'StorageClass': sc[1]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 3,
|
|
||||||
'StorageClass': sc[2]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'NoncurrentVersionExpiration': {
|
|
||||||
'NoncurrentDays': 5
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
body = generate_lifecycle_body(rules)
|
|
||||||
fp = StringIO(body)
|
|
||||||
md5 = boto.utils.compute_md5(fp)
|
|
||||||
headers = {'Content-MD5': md5[1], 'Content-Type': 'text/xml'}
|
|
||||||
bucket.connection.make_request('PUT', bucket.name, data=fp.getvalue(), query_args='lifecycle',
|
|
||||||
headers=headers)
|
|
||||||
|
|
||||||
create_multiple_versions(bucket, "test1/a", 3)
|
|
||||||
create_multiple_versions(bucket, "test1/b", 3)
|
|
||||||
init_keys = bucket.get_all_versions()
|
|
||||||
eq(len(init_keys), 6)
|
|
||||||
|
|
||||||
time.sleep(25)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 4)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
time.sleep(20)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 0)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 4)
|
|
||||||
|
|
||||||
time.sleep(20)
|
|
||||||
expire_keys = bucket.get_all_versions()
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 0)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
|
|
||||||
def transfer_part(bucket, mp_id, mp_keyname, i, part, headers=None):
|
def transfer_part(bucket, mp_id, mp_keyname, i, part, headers=None):
|
||||||
"""Transfer a part of a multipart upload. Designed to be run in parallel.
|
"""Transfer a part of a multipart upload. Designed to be run in parallel.
|
||||||
"""
|
"""
|
||||||
|
@ -638,6 +406,7 @@ def lc_transitions(transitions=None):
|
||||||
@attr(operation='test create object with storage class')
|
@attr(operation='test create object with storage class')
|
||||||
@attr('storage_class')
|
@attr('storage_class')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_storage_class():
|
def test_object_storage_class():
|
||||||
sc = configured_storage_classes()
|
sc = configured_storage_classes()
|
||||||
if len(sc) < 2:
|
if len(sc) < 2:
|
||||||
|
@ -656,6 +425,7 @@ def test_object_storage_class():
|
||||||
@attr(operation='test create multipart object with storage class')
|
@attr(operation='test create multipart object with storage class')
|
||||||
@attr('storage_class')
|
@attr('storage_class')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_storage_class_multipart():
|
def test_object_storage_class_multipart():
|
||||||
sc = configured_storage_classes()
|
sc = configured_storage_classes()
|
||||||
if len(sc) < 2:
|
if len(sc) < 2:
|
||||||
|
@ -697,6 +467,7 @@ def _do_test_object_modify_storage_class(obj_write_func, size):
|
||||||
@attr(operation='test changing objects storage class')
|
@attr(operation='test changing objects storage class')
|
||||||
@attr('storage_class')
|
@attr('storage_class')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_modify_storage_class():
|
def test_object_modify_storage_class():
|
||||||
_do_test_object_modify_storage_class(_populate_key, size=9*1024*1024)
|
_do_test_object_modify_storage_class(_populate_key, size=9*1024*1024)
|
||||||
|
|
||||||
|
@ -706,6 +477,7 @@ def test_object_modify_storage_class():
|
||||||
@attr(operation='test changing objects storage class')
|
@attr(operation='test changing objects storage class')
|
||||||
@attr('storage_class')
|
@attr('storage_class')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_modify_storage_class_multipart():
|
def test_object_modify_storage_class_multipart():
|
||||||
_do_test_object_modify_storage_class(_populate_multipart_key, size=11*1024*1024)
|
_do_test_object_modify_storage_class(_populate_multipart_key, size=11*1024*1024)
|
||||||
|
|
||||||
|
@ -735,6 +507,7 @@ def _do_test_object_storage_class_copy(obj_write_func, size):
|
||||||
@attr(operation='test copy object to object with different storage class')
|
@attr(operation='test copy object to object with different storage class')
|
||||||
@attr('storage_class')
|
@attr('storage_class')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_storage_class_copy():
|
def test_object_storage_class_copy():
|
||||||
_do_test_object_storage_class_copy(_populate_key, size=9*1024*1024)
|
_do_test_object_storage_class_copy(_populate_key, size=9*1024*1024)
|
||||||
|
|
||||||
|
@ -743,6 +516,7 @@ def test_object_storage_class_copy():
|
||||||
@attr(operation='test changing objects storage class')
|
@attr(operation='test changing objects storage class')
|
||||||
@attr('storage_class')
|
@attr('storage_class')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_storage_class_copy_multipart():
|
def test_object_storage_class_copy_multipart():
|
||||||
_do_test_object_storage_class_copy(_populate_multipart_key, size=9*1024*1024)
|
_do_test_object_storage_class_copy(_populate_multipart_key, size=9*1024*1024)
|
||||||
|
|
||||||
|
@ -849,6 +623,7 @@ def _test_atomic_dual_conditional_write(file_size):
|
||||||
@attr(operation='write one or the other')
|
@attr(operation='write one or the other')
|
||||||
@attr(assertion='1MB successful')
|
@attr(assertion='1MB successful')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_atomic_dual_conditional_write_1mb():
|
def test_atomic_dual_conditional_write_1mb():
|
||||||
_test_atomic_dual_conditional_write(1024*1024)
|
_test_atomic_dual_conditional_write(1024*1024)
|
||||||
|
|
||||||
|
@ -857,6 +632,7 @@ def test_atomic_dual_conditional_write_1mb():
|
||||||
@attr(operation='write file in deleted bucket')
|
@attr(operation='write file in deleted bucket')
|
||||||
@attr(assertion='fail 404')
|
@attr(assertion='fail 404')
|
||||||
@attr('fails_on_aws')
|
@attr('fails_on_aws')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_atomic_write_bucket_gone():
|
def test_atomic_write_bucket_gone():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
|
||||||
|
@ -901,6 +677,7 @@ def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024,
|
||||||
@attr(operation='multipart upload with bad key for uploading chunks')
|
@attr(operation='multipart upload with bad key for uploading chunks')
|
||||||
@attr(assertion='successful')
|
@attr(assertion='successful')
|
||||||
@attr('encryption')
|
@attr('encryption')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_encryption_sse_c_multipart_invalid_chunks_1():
|
def test_encryption_sse_c_multipart_invalid_chunks_1():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = "multipart_enc"
|
key = "multipart_enc"
|
||||||
|
@ -928,6 +705,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_1():
|
||||||
@attr(operation='multipart upload with bad md5 for chunks')
|
@attr(operation='multipart upload with bad md5 for chunks')
|
||||||
@attr(assertion='successful')
|
@attr(assertion='successful')
|
||||||
@attr('encryption')
|
@attr('encryption')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_encryption_sse_c_multipart_invalid_chunks_2():
|
def test_encryption_sse_c_multipart_invalid_chunks_2():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = "multipart_enc"
|
key = "multipart_enc"
|
||||||
|
@ -956,6 +734,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_2():
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
@attr('fails_with_subdomain')
|
@attr('fails_with_subdomain')
|
||||||
@attr('bucket-policy')
|
@attr('bucket-policy')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_bucket_policy_different_tenant():
|
def test_bucket_policy_different_tenant():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('asdf')
|
key = bucket.new_key('asdf')
|
||||||
|
@ -994,6 +773,7 @@ def test_bucket_policy_different_tenant():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='Test put condition operator end with ifExists')
|
@attr(operation='Test put condition operator end with ifExists')
|
||||||
@attr('bucket-policy')
|
@attr('bucket-policy')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
||||||
bucket = _create_keys(keys=['foo'])
|
bucket = _create_keys(keys=['foo'])
|
||||||
policy = '''{
|
policy = '''{
|
||||||
|
@ -1028,57 +808,13 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
||||||
def _make_arn_resource(path="*"):
|
def _make_arn_resource(path="*"):
|
||||||
return "arn:aws:s3:::{}".format(path)
|
return "arn:aws:s3:::{}".format(path)
|
||||||
|
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='Deny put obj requests without encryption')
|
|
||||||
@attr(assertion='success')
|
|
||||||
@attr('encryption')
|
|
||||||
@attr('bucket-policy')
|
|
||||||
def test_bucket_policy_put_obj_enc():
|
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
|
||||||
|
|
||||||
deny_incorrect_algo = {
|
|
||||||
"StringNotEquals": {
|
|
||||||
"s3:x-amz-server-side-encryption": "AES256"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
deny_unencrypted_obj = {
|
|
||||||
"Null" : {
|
|
||||||
"s3:x-amz-server-side-encryption": "true"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p = Policy()
|
|
||||||
resource = _make_arn_resource("{}/{}".format(bucket.name, "*"))
|
|
||||||
|
|
||||||
s1 = Statement("s3:PutObject", resource, effect="Deny", condition=deny_incorrect_algo)
|
|
||||||
s2 = Statement("s3:PutObject", resource, effect="Deny", condition=deny_unencrypted_obj)
|
|
||||||
policy_document = p.add_statement(s1).add_statement(s2).to_json()
|
|
||||||
|
|
||||||
bucket.set_policy(policy_document)
|
|
||||||
|
|
||||||
key1_str ='testobj'
|
|
||||||
key1 = bucket.new_key(key1_str)
|
|
||||||
check_access_denied(key1.set_contents_from_string, key1_str)
|
|
||||||
|
|
||||||
sse_client_headers = {
|
|
||||||
'x-amz-server-side-encryption' : 'AES256',
|
|
||||||
'x-amz-server-side-encryption-customer-algorithm': 'AES256',
|
|
||||||
'x-amz-server-side-encryption-customer-key': 'pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
|
||||||
'x-amz-server-side-encryption-customer-key-md5': 'DWygnHRtgiJ77HCm+1rvHw=='
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
key1.set_contents_from_string(key1_str, headers=sse_client_headers)
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@attr(resource='object')
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='put obj with RequestObjectTag')
|
@attr(operation='put obj with RequestObjectTag')
|
||||||
@attr(assertion='success')
|
@attr(assertion='success')
|
||||||
@attr('tagging')
|
@attr('tagging')
|
||||||
@attr('bucket-policy')
|
@attr('bucket-policy')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_bucket_policy_put_obj_request_obj_tag():
|
def test_bucket_policy_put_obj_request_obj_tag():
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
|
|
@ -7,6 +7,7 @@ import random
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
import time
|
import time
|
||||||
import boto.exception
|
import boto.exception
|
||||||
|
import socket
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
@ -42,6 +43,7 @@ ERRORDOC_TEMPLATE = '<html><h1>ErrorDoc</h1><body>{random}</body></html>'
|
||||||
|
|
||||||
CAN_WEBSITE = None
|
CAN_WEBSITE = None
|
||||||
|
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def check_can_test_website():
|
def check_can_test_website():
|
||||||
global CAN_WEBSITE
|
global CAN_WEBSITE
|
||||||
# This is a bit expensive, so we cache this
|
# This is a bit expensive, so we cache this
|
||||||
|
@ -264,6 +266,7 @@ def test_website_nonexistant_bucket_s3():
|
||||||
@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket')
|
@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@attr('fails_on_s3')
|
@attr('fails_on_s3')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_nonexistant_bucket_rgw():
|
def test_website_nonexistant_bucket_rgw():
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
|
@ -277,6 +280,7 @@ def test_website_nonexistant_bucket_rgw():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is public')
|
@attr(assertion='non-empty public buckets via s3website return page for /, where page is public')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
@timed(10)
|
@timed(10)
|
||||||
def test_website_public_bucket_list_public_index():
|
def test_website_public_bucket_list_public_index():
|
||||||
|
@ -305,6 +309,7 @@ def test_website_public_bucket_list_public_index():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_public_index():
|
def test_website_private_bucket_list_public_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -335,6 +340,7 @@ def test_website_private_bucket_list_public_index():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /')
|
@attr(assertion='empty private buckets via s3website return a 403 for /')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_empty():
|
def test_website_private_bucket_list_empty():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -351,6 +357,7 @@ def test_website_private_bucket_list_empty():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /')
|
@attr(assertion='empty public buckets via s3website return a 404 for /')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_empty():
|
def test_website_public_bucket_list_empty():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -366,6 +373,7 @@ def test_website_public_bucket_list_empty():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private')
|
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_private_index():
|
def test_website_public_bucket_list_private_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -391,6 +399,7 @@ def test_website_public_bucket_list_private_index():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_private_index():
|
def test_website_private_bucket_list_private_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -417,6 +426,7 @@ def test_website_private_bucket_list_private_index():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc')
|
@attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_empty_missingerrordoc():
|
def test_website_private_bucket_list_empty_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -433,6 +443,7 @@ def test_website_private_bucket_list_empty_missingerrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc')
|
@attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_empty_missingerrordoc():
|
def test_website_public_bucket_list_empty_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -448,6 +459,7 @@ def test_website_public_bucket_list_empty_missingerrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc')
|
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_private_index_missingerrordoc():
|
def test_website_public_bucket_list_private_index_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -472,6 +484,7 @@ def test_website_public_bucket_list_private_index_missingerrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc')
|
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_private_index_missingerrordoc():
|
def test_website_private_bucket_list_private_index_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -497,6 +510,7 @@ def test_website_private_bucket_list_private_index_missingerrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc')
|
@attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_empty_blockederrordoc():
|
def test_website_private_bucket_list_empty_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -520,11 +534,64 @@ def test_website_private_bucket_list_empty_blockederrordoc():
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
|
@attr(resource='bucket')
|
||||||
|
@attr(method='get')
|
||||||
|
@attr(operation='list')
|
||||||
|
@attr(assertion='check if there is an invalid payload after serving error doc')
|
||||||
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
|
def test_website_public_bucket_list_pubilc_errordoc():
|
||||||
|
bucket = get_new_bucket()
|
||||||
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
bucket.make_public()
|
||||||
|
errorhtml = bucket.new_key(f['ErrorDocument_Key'])
|
||||||
|
errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256)
|
||||||
|
errorhtml.set_contents_from_string(errorstring)
|
||||||
|
errorhtml.set_canned_acl('public-read')
|
||||||
|
|
||||||
|
url = get_website_url(proto='http', bucket=bucket.name, path='')
|
||||||
|
o = urlparse(url)
|
||||||
|
host = o.hostname
|
||||||
|
port = s3.main.port
|
||||||
|
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.connect((host, port))
|
||||||
|
|
||||||
|
request = "GET / HTTP/1.1\r\nHost:%s.%s:%s\r\n\r\n" % (bucket.name, host, port)
|
||||||
|
sock.send(request.encode())
|
||||||
|
|
||||||
|
#receive header
|
||||||
|
resp = sock.recv(4096)
|
||||||
|
print(resp)
|
||||||
|
|
||||||
|
#receive body
|
||||||
|
resp = sock.recv(4096)
|
||||||
|
print('payload length=%d' % len(resp))
|
||||||
|
print(resp)
|
||||||
|
|
||||||
|
#check if any additional payload is left
|
||||||
|
resp_len = 0
|
||||||
|
sock.settimeout(2)
|
||||||
|
try:
|
||||||
|
resp = sock.recv(4096)
|
||||||
|
resp_len = len(resp)
|
||||||
|
print('invalid payload length=%d' % resp_len)
|
||||||
|
print(resp)
|
||||||
|
except socket.timeout:
|
||||||
|
print('no invalid payload')
|
||||||
|
|
||||||
|
ok(resp_len == 0, 'invalid payload')
|
||||||
|
|
||||||
|
errorhtml.delete()
|
||||||
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@attr(resource='bucket')
|
||||||
@attr(method='get')
|
@attr(method='get')
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc')
|
@attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_empty_blockederrordoc():
|
def test_website_public_bucket_list_empty_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -552,6 +619,7 @@ def test_website_public_bucket_list_empty_blockederrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc')
|
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_private_index_blockederrordoc():
|
def test_website_public_bucket_list_private_index_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -585,6 +653,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc')
|
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_private_index_blockederrordoc():
|
def test_website_private_bucket_list_private_index_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -620,6 +689,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc():
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc')
|
@attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_website_private_bucket_list_empty_gooderrordoc():
|
def test_website_private_bucket_list_empty_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -642,6 +712,7 @@ def test_website_private_bucket_list_empty_gooderrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc')
|
@attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_empty_gooderrordoc():
|
def test_website_public_bucket_list_empty_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -666,6 +737,7 @@ def test_website_public_bucket_list_empty_gooderrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private')
|
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_public_bucket_list_private_index_gooderrordoc():
|
def test_website_public_bucket_list_private_index_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -695,6 +767,7 @@ def test_website_public_bucket_list_private_index_gooderrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_private_bucket_list_private_index_gooderrordoc():
|
def test_website_private_bucket_list_private_index_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -725,6 +798,7 @@ def test_website_private_bucket_list_private_index_gooderrordoc():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_bucket_private_redirectall_base():
|
def test_website_bucket_private_redirectall_base():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -742,6 +816,7 @@ def test_website_bucket_private_redirectall_base():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_bucket_private_redirectall_path():
|
def test_website_bucket_private_redirectall_path():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -761,6 +836,7 @@ def test_website_bucket_private_redirectall_path():
|
||||||
@attr(operation='list')
|
@attr(operation='list')
|
||||||
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_bucket_private_redirectall_path_upgrade():
|
def test_website_bucket_private_redirectall_path_upgrade():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -783,6 +859,7 @@ def test_website_bucket_private_redirectall_path_upgrade():
|
||||||
@attr(assertion='x-amz-website-redirect-location should not fire without websiteconf')
|
@attr(assertion='x-amz-website-redirect-location should not fire without websiteconf')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@attr('x-amz-website-redirect-location')
|
@attr('x-amz-website-redirect-location')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_xredirect_nonwebsite():
|
def test_website_xredirect_nonwebsite():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -815,6 +892,7 @@ def test_website_xredirect_nonwebsite():
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key')
|
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@attr('x-amz-website-redirect-location')
|
@attr('x-amz-website-redirect-location')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_xredirect_public_relative():
|
def test_website_xredirect_public_relative():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -842,6 +920,7 @@ def test_website_xredirect_public_relative():
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key')
|
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@attr('x-amz-website-redirect-location')
|
@attr('x-amz-website-redirect-location')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_xredirect_public_abs():
|
def test_website_xredirect_public_abs():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -869,6 +948,7 @@ def test_website_xredirect_public_abs():
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key')
|
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@attr('x-amz-website-redirect-location')
|
@attr('x-amz-website-redirect-location')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_xredirect_private_relative():
|
def test_website_xredirect_private_relative():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -896,6 +976,7 @@ def test_website_xredirect_private_relative():
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key')
|
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
@attr('x-amz-website-redirect-location')
|
@attr('x-amz-website-redirect-location')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_website_xredirect_private_abs():
|
def test_website_xredirect_private_abs():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
@ -1159,6 +1240,7 @@ def routing_check(*args, **kwargs):
|
||||||
|
|
||||||
@attr('s3website_RoutingRules')
|
@attr('s3website_RoutingRules')
|
||||||
@attr('s3website')
|
@attr('s3website')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
||||||
def test_routing_generator():
|
def test_routing_generator():
|
||||||
for t in ROUTING_RULES_TESTS:
|
for t in ROUTING_RULES_TESTS:
|
||||||
|
|
|
@ -4,11 +4,15 @@ from botocore.client import Config
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from botocore.handlers import disable_signing
|
from botocore.handlers import disable_signing
|
||||||
import configparser
|
import configparser
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
import os
|
import os
|
||||||
import munch
|
import munch
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import itertools
|
import itertools
|
||||||
|
import urllib3
|
||||||
|
import re
|
||||||
|
|
||||||
config = munch.Munch
|
config = munch.Munch
|
||||||
|
|
||||||
|
@ -74,38 +78,69 @@ def get_objects_list(bucket, client=None, prefix=None):
|
||||||
|
|
||||||
return objects_list
|
return objects_list
|
||||||
|
|
||||||
def get_versioned_objects_list(bucket, client=None):
|
# generator function that returns object listings in batches, where each
|
||||||
if client == None:
|
# batch is a list of dicts compatible with delete_objects()
|
||||||
client = get_client()
|
def list_versions(client, bucket, batch_size):
|
||||||
response = client.list_object_versions(Bucket=bucket)
|
key_marker = ''
|
||||||
versioned_objects_list = []
|
version_marker = ''
|
||||||
|
truncated = True
|
||||||
|
while truncated:
|
||||||
|
listing = client.list_object_versions(
|
||||||
|
Bucket=bucket,
|
||||||
|
KeyMarker=key_marker,
|
||||||
|
VersionIdMarker=version_marker,
|
||||||
|
MaxKeys=batch_size)
|
||||||
|
|
||||||
if 'Versions' in response:
|
key_marker = listing.get('NextKeyMarker')
|
||||||
contents = response['Versions']
|
version_marker = listing.get('NextVersionIdMarker')
|
||||||
for obj in contents:
|
truncated = listing['IsTruncated']
|
||||||
key = obj['Key']
|
|
||||||
version_id = obj['VersionId']
|
|
||||||
versioned_obj = (key,version_id)
|
|
||||||
versioned_objects_list.append(versioned_obj)
|
|
||||||
|
|
||||||
return versioned_objects_list
|
objs = listing.get('Versions', []) + listing.get('DeleteMarkers', [])
|
||||||
|
if len(objs):
|
||||||
|
yield [{'Key': o['Key'], 'VersionId': o['VersionId']} for o in objs]
|
||||||
|
|
||||||
def get_delete_markers_list(bucket, client=None):
|
def nuke_bucket(client, bucket):
|
||||||
if client == None:
|
batch_size = 128
|
||||||
client = get_client()
|
max_retain_date = None
|
||||||
response = client.list_object_versions(Bucket=bucket)
|
|
||||||
delete_markers = []
|
|
||||||
|
|
||||||
if 'DeleteMarkers' in response:
|
# list and delete objects in batches
|
||||||
contents = response['DeleteMarkers']
|
for objects in list_versions(client, bucket, batch_size):
|
||||||
for obj in contents:
|
delete = client.delete_objects(Bucket=bucket,
|
||||||
key = obj['Key']
|
Delete={'Objects': objects, 'Quiet': True},
|
||||||
version_id = obj['VersionId']
|
BypassGovernanceRetention=True)
|
||||||
versioned_obj = (key,version_id)
|
|
||||||
delete_markers.append(versioned_obj)
|
|
||||||
|
|
||||||
return delete_markers
|
# check for object locks on 403 AccessDenied errors
|
||||||
|
for err in delete.get('Errors', []):
|
||||||
|
if err.get('Code') != 'AccessDenied':
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
res = client.get_object_retention(Bucket=bucket,
|
||||||
|
Key=err['Key'], VersionId=err['VersionId'])
|
||||||
|
retain_date = res['Retention']['RetainUntilDate']
|
||||||
|
if not max_retain_date or max_retain_date < retain_date:
|
||||||
|
max_retain_date = retain_date
|
||||||
|
except ClientError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if max_retain_date:
|
||||||
|
# wait out the retention period (up to 60 seconds)
|
||||||
|
now = datetime.datetime.now(max_retain_date.tzinfo)
|
||||||
|
if max_retain_date > now:
|
||||||
|
delta = max_retain_date - now
|
||||||
|
if delta.total_seconds() > 60:
|
||||||
|
raise RuntimeError('bucket {} still has objects \
|
||||||
|
locked for {} more seconds, not waiting for \
|
||||||
|
bucket cleanup'.format(bucket, delta.total_seconds()))
|
||||||
|
print('nuke_bucket', bucket, 'waiting', delta.total_seconds(),
|
||||||
|
'seconds for object locks to expire')
|
||||||
|
time.sleep(delta.total_seconds())
|
||||||
|
|
||||||
|
for objects in list_versions(client, bucket, batch_size):
|
||||||
|
client.delete_objects(Bucket=bucket,
|
||||||
|
Delete={'Objects': objects, 'Quiet': True},
|
||||||
|
BypassGovernanceRetention=True)
|
||||||
|
|
||||||
|
client.delete_bucket(Bucket=bucket)
|
||||||
|
|
||||||
def nuke_prefixed_buckets(prefix, client=None):
|
def nuke_prefixed_buckets(prefix, client=None):
|
||||||
if client == None:
|
if client == None:
|
||||||
|
@ -113,27 +148,36 @@ def nuke_prefixed_buckets(prefix, client=None):
|
||||||
|
|
||||||
buckets = get_buckets_list(client, prefix)
|
buckets = get_buckets_list(client, prefix)
|
||||||
|
|
||||||
if buckets != []:
|
err = None
|
||||||
for bucket_name in buckets:
|
for bucket_name in buckets:
|
||||||
objects_list = get_objects_list(bucket_name, client)
|
|
||||||
for obj in objects_list:
|
|
||||||
response = client.delete_object(Bucket=bucket_name,Key=obj)
|
|
||||||
versioned_objects_list = get_versioned_objects_list(bucket_name, client)
|
|
||||||
for obj in versioned_objects_list:
|
|
||||||
response = client.delete_object(Bucket=bucket_name,Key=obj[0],VersionId=obj[1])
|
|
||||||
delete_markers = get_delete_markers_list(bucket_name, client)
|
|
||||||
for obj in delete_markers:
|
|
||||||
response = client.delete_object(Bucket=bucket_name,Key=obj[0],VersionId=obj[1])
|
|
||||||
try:
|
try:
|
||||||
response = client.delete_bucket(Bucket=bucket_name)
|
nuke_bucket(client, bucket_name)
|
||||||
except ClientError:
|
except Exception as e:
|
||||||
# if DELETE times out, the retry may see NoSuchBucket
|
# The exception shouldn't be raised when doing cleanup. Pass and continue
|
||||||
if response['Error']['Code'] != 'NoSuchBucket':
|
# the bucket cleanup process. Otherwise left buckets wouldn't be cleared
|
||||||
raise ClientError
|
# resulting in some kind of resource leak. err is used to hint user some
|
||||||
|
# exception once occurred.
|
||||||
|
err = e
|
||||||
pass
|
pass
|
||||||
|
if err:
|
||||||
|
raise err
|
||||||
|
|
||||||
print('Done with cleanup of buckets in tests.')
|
print('Done with cleanup of buckets in tests.')
|
||||||
|
|
||||||
|
def configured_storage_classes():
|
||||||
|
sc = ['STANDARD']
|
||||||
|
|
||||||
|
extra_sc = re.split(r"[\b\W\b]+", config.storage_classes)
|
||||||
|
|
||||||
|
for item in extra_sc:
|
||||||
|
if item != 'STANDARD':
|
||||||
|
sc.append(item)
|
||||||
|
|
||||||
|
sc = [i for i in sc if i]
|
||||||
|
print("storage classes configured: " + str(sc))
|
||||||
|
|
||||||
|
return sc
|
||||||
|
|
||||||
def setup():
|
def setup():
|
||||||
cfg = configparser.RawConfigParser()
|
cfg = configparser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
|
@ -166,6 +210,15 @@ def setup():
|
||||||
proto = 'https' if config.default_is_secure else 'http'
|
proto = 'https' if config.default_is_secure else 'http'
|
||||||
config.default_endpoint = "%s://%s:%d" % (proto, config.default_host, config.default_port)
|
config.default_endpoint = "%s://%s:%d" % (proto, config.default_host, config.default_port)
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.default_ssl_verify = cfg.getboolean('DEFAULT', "ssl_verify")
|
||||||
|
except configparser.NoOptionError:
|
||||||
|
config.default_ssl_verify = False
|
||||||
|
|
||||||
|
# Disable InsecureRequestWarning reported by urllib3 when ssl_verify is False
|
||||||
|
if not config.default_ssl_verify:
|
||||||
|
urllib3.disable_warnings()
|
||||||
|
|
||||||
# vars from the main section
|
# vars from the main section
|
||||||
config.main_access_key = cfg.get('s3 main',"access_key")
|
config.main_access_key = cfg.get('s3 main',"access_key")
|
||||||
config.main_secret_key = cfg.get('s3 main',"secret_key")
|
config.main_secret_key = cfg.get('s3 main',"secret_key")
|
||||||
|
@ -188,6 +241,17 @@ def setup():
|
||||||
config.main_api_name = ""
|
config.main_api_name = ""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.storage_classes = cfg.get('s3 main',"storage_classes")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.storage_classes = ""
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.lc_debug_interval = int(cfg.get('s3 main',"lc_debug_interval"))
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.lc_debug_interval = 10
|
||||||
|
|
||||||
config.alt_access_key = cfg.get('s3 alt',"access_key")
|
config.alt_access_key = cfg.get('s3 alt',"access_key")
|
||||||
config.alt_secret_key = cfg.get('s3 alt',"secret_key")
|
config.alt_secret_key = cfg.get('s3 alt',"secret_key")
|
||||||
config.alt_display_name = cfg.get('s3 alt',"display_name")
|
config.alt_display_name = cfg.get('s3 alt',"display_name")
|
||||||
|
@ -213,12 +277,94 @@ def setup():
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
||||||
|
|
||||||
|
if cfg.has_section("s3 cloud"):
|
||||||
|
get_cloud_config(cfg)
|
||||||
|
else:
|
||||||
|
config.cloud_storage_class = None
|
||||||
|
|
||||||
|
|
||||||
def teardown():
|
def teardown():
|
||||||
alt_client = get_alt_client()
|
alt_client = get_alt_client()
|
||||||
tenant_client = get_tenant_client()
|
tenant_client = get_tenant_client()
|
||||||
nuke_prefixed_buckets(prefix=prefix)
|
nuke_prefixed_buckets(prefix=prefix)
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
||||||
|
try:
|
||||||
|
iam_client = get_iam_client()
|
||||||
|
list_roles_resp = iam_client.list_roles()
|
||||||
|
for role in list_roles_resp['Roles']:
|
||||||
|
list_policies_resp = iam_client.list_role_policies(RoleName=role['RoleName'])
|
||||||
|
for policy in list_policies_resp['PolicyNames']:
|
||||||
|
del_policy_resp = iam_client.delete_role_policy(
|
||||||
|
RoleName=role['RoleName'],
|
||||||
|
PolicyName=policy
|
||||||
|
)
|
||||||
|
del_role_resp = iam_client.delete_role(RoleName=role['RoleName'])
|
||||||
|
list_oidc_resp = iam_client.list_open_id_connect_providers()
|
||||||
|
for oidcprovider in list_oidc_resp['OpenIDConnectProviderList']:
|
||||||
|
del_oidc_resp = iam_client.delete_open_id_connect_provider(
|
||||||
|
OpenIDConnectProviderArn=oidcprovider['Arn']
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check_webidentity():
|
||||||
|
cfg = configparser.RawConfigParser()
|
||||||
|
try:
|
||||||
|
path = os.environ['S3TEST_CONF']
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
'To run tests, point environment '
|
||||||
|
+ 'variable S3TEST_CONF to a config file.',
|
||||||
|
)
|
||||||
|
cfg.read(path)
|
||||||
|
if not cfg.has_section("webidentity"):
|
||||||
|
raise RuntimeError('Your config file is missing the "webidentity" section!')
|
||||||
|
|
||||||
|
config.webidentity_thumbprint = cfg.get('webidentity', "thumbprint")
|
||||||
|
config.webidentity_aud = cfg.get('webidentity', "aud")
|
||||||
|
config.webidentity_token = cfg.get('webidentity', "token")
|
||||||
|
config.webidentity_realm = cfg.get('webidentity', "KC_REALM")
|
||||||
|
config.webidentity_sub = cfg.get('webidentity', "sub")
|
||||||
|
config.webidentity_azp = cfg.get('webidentity', "azp")
|
||||||
|
config.webidentity_user_token = cfg.get('webidentity', "user_token")
|
||||||
|
|
||||||
|
def get_cloud_config(cfg):
|
||||||
|
config.cloud_host = cfg.get('s3 cloud',"host")
|
||||||
|
config.cloud_port = int(cfg.get('s3 cloud',"port"))
|
||||||
|
config.cloud_is_secure = cfg.getboolean('s3 cloud', "is_secure")
|
||||||
|
|
||||||
|
proto = 'https' if config.cloud_is_secure else 'http'
|
||||||
|
config.cloud_endpoint = "%s://%s:%d" % (proto, config.cloud_host, config.cloud_port)
|
||||||
|
|
||||||
|
config.cloud_access_key = cfg.get('s3 cloud',"access_key")
|
||||||
|
config.cloud_secret_key = cfg.get('s3 cloud',"secret_key")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_storage_class = cfg.get('s3 cloud', "cloud_storage_class")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_storage_class = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_retain_head_object = cfg.get('s3 cloud',"retain_head_object")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_retain_head_object = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_target_path = cfg.get('s3 cloud',"target_path")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_target_path = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_target_storage_class = cfg.get('s3 cloud',"target_storage_class")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_target_storage_class = 'STANDARD'
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_regular_storage_class = cfg.get('s3 cloud', "storage_class")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_regular_storage_class = None
|
||||||
|
|
||||||
|
|
||||||
def get_client(client_config=None):
|
def get_client(client_config=None):
|
||||||
if client_config == None:
|
if client_config == None:
|
||||||
|
@ -229,6 +375,7 @@ def get_client(client_config=None):
|
||||||
aws_secret_access_key=config.main_secret_key,
|
aws_secret_access_key=config.main_secret_key,
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
config=client_config)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
@ -238,9 +385,56 @@ def get_v2_client():
|
||||||
aws_secret_access_key=config.main_secret_key,
|
aws_secret_access_key=config.main_secret_key,
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
config=Config(signature_version='s3'))
|
config=Config(signature_version='s3'))
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def get_sts_client(client_config=None):
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
|
|
||||||
|
client = boto3.client(service_name='sts',
|
||||||
|
aws_access_key_id=config.alt_access_key,
|
||||||
|
aws_secret_access_key=config.alt_secret_key,
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
region_name='',
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
|
def get_iam_client(client_config=None):
|
||||||
|
cfg = configparser.RawConfigParser()
|
||||||
|
try:
|
||||||
|
path = os.environ['S3TEST_CONF']
|
||||||
|
except KeyError:
|
||||||
|
raise RuntimeError(
|
||||||
|
'To run tests, point environment '
|
||||||
|
+ 'variable S3TEST_CONF to a config file.',
|
||||||
|
)
|
||||||
|
cfg.read(path)
|
||||||
|
if not cfg.has_section("iam"):
|
||||||
|
raise RuntimeError('Your config file is missing the "iam" section!')
|
||||||
|
|
||||||
|
config.iam_access_key = cfg.get('iam',"access_key")
|
||||||
|
config.iam_secret_key = cfg.get('iam',"secret_key")
|
||||||
|
config.iam_display_name = cfg.get('iam',"display_name")
|
||||||
|
config.iam_user_id = cfg.get('iam',"user_id")
|
||||||
|
config.iam_email = cfg.get('iam',"email")
|
||||||
|
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
|
|
||||||
|
client = boto3.client(service_name='iam',
|
||||||
|
aws_access_key_id=config.iam_access_key,
|
||||||
|
aws_secret_access_key=config.iam_secret_key,
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
region_name='',
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
def get_alt_client(client_config=None):
|
def get_alt_client(client_config=None):
|
||||||
if client_config == None:
|
if client_config == None:
|
||||||
client_config = Config(signature_version='s3v4')
|
client_config = Config(signature_version='s3v4')
|
||||||
|
@ -250,6 +444,19 @@ def get_alt_client(client_config=None):
|
||||||
aws_secret_access_key=config.alt_secret_key,
|
aws_secret_access_key=config.alt_secret_key,
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
|
def get_cloud_client(client_config=None):
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
|
|
||||||
|
client = boto3.client(service_name='s3',
|
||||||
|
aws_access_key_id=config.cloud_access_key,
|
||||||
|
aws_secret_access_key=config.cloud_secret_key,
|
||||||
|
endpoint_url=config.cloud_endpoint,
|
||||||
|
use_ssl=config.cloud_is_secure,
|
||||||
config=client_config)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
@ -262,6 +469,7 @@ def get_tenant_client(client_config=None):
|
||||||
aws_secret_access_key=config.tenant_secret_key,
|
aws_secret_access_key=config.tenant_secret_key,
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
config=client_config)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
@ -272,6 +480,7 @@ def get_tenant_iam_client():
|
||||||
aws_access_key_id=config.tenant_access_key,
|
aws_access_key_id=config.tenant_access_key,
|
||||||
aws_secret_access_key=config.tenant_secret_key,
|
aws_secret_access_key=config.tenant_secret_key,
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
use_ssl=config.default_is_secure)
|
use_ssl=config.default_is_secure)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
@ -281,6 +490,7 @@ def get_unauthenticated_client():
|
||||||
aws_secret_access_key='',
|
aws_secret_access_key='',
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
config=Config(signature_version=UNSIGNED))
|
config=Config(signature_version=UNSIGNED))
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
@ -290,9 +500,23 @@ def get_bad_auth_client(aws_access_key_id='badauth'):
|
||||||
aws_secret_access_key='roflmao',
|
aws_secret_access_key='roflmao',
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
config=Config(signature_version='s3v4'))
|
config=Config(signature_version='s3v4'))
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def get_svc_client(client_config=None, svc='s3'):
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
|
|
||||||
|
client = boto3.client(service_name=svc,
|
||||||
|
aws_access_key_id=config.main_access_key,
|
||||||
|
aws_secret_access_key=config.main_secret_key,
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
bucket_counter = itertools.count(1)
|
bucket_counter = itertools.count(1)
|
||||||
|
|
||||||
def get_new_bucket_name():
|
def get_new_bucket_name():
|
||||||
|
@ -320,7 +544,8 @@ def get_new_bucket_resource(name=None):
|
||||||
aws_access_key_id=config.main_access_key,
|
aws_access_key_id=config.main_access_key,
|
||||||
aws_secret_access_key=config.main_secret_key,
|
aws_secret_access_key=config.main_secret_key,
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
use_ssl=config.default_is_secure)
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify)
|
||||||
if name is None:
|
if name is None:
|
||||||
name = get_new_bucket_name()
|
name = get_new_bucket_name()
|
||||||
bucket = s3.Bucket(name)
|
bucket = s3.Bucket(name)
|
||||||
|
@ -342,6 +567,21 @@ def get_new_bucket(client=None, name=None):
|
||||||
client.create_bucket(Bucket=name)
|
client.create_bucket(Bucket=name)
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
def get_parameter_name():
|
||||||
|
parameter_name=""
|
||||||
|
rand = ''.join(
|
||||||
|
random.choice(string.ascii_lowercase + string.digits)
|
||||||
|
for c in range(255)
|
||||||
|
)
|
||||||
|
while rand:
|
||||||
|
parameter_name = '{random}'.format(random=rand)
|
||||||
|
if len(parameter_name) <= 10:
|
||||||
|
return parameter_name
|
||||||
|
rand = rand[:-1]
|
||||||
|
return parameter_name
|
||||||
|
|
||||||
|
def get_sts_user_id():
|
||||||
|
return config.alt_user_id
|
||||||
|
|
||||||
def get_config_is_secure():
|
def get_config_is_secure():
|
||||||
return config.default_is_secure
|
return config.default_is_secure
|
||||||
|
@ -355,6 +595,9 @@ def get_config_port():
|
||||||
def get_config_endpoint():
|
def get_config_endpoint():
|
||||||
return config.default_endpoint
|
return config.default_endpoint
|
||||||
|
|
||||||
|
def get_config_ssl_verify():
|
||||||
|
return config.default_ssl_verify
|
||||||
|
|
||||||
def get_main_aws_access_key():
|
def get_main_aws_access_key():
|
||||||
return config.main_access_key
|
return config.main_access_key
|
||||||
|
|
||||||
|
@ -408,3 +651,48 @@ def get_tenant_user_id():
|
||||||
|
|
||||||
def get_tenant_email():
|
def get_tenant_email():
|
||||||
return config.tenant_email
|
return config.tenant_email
|
||||||
|
|
||||||
|
def get_thumbprint():
|
||||||
|
return config.webidentity_thumbprint
|
||||||
|
|
||||||
|
def get_aud():
|
||||||
|
return config.webidentity_aud
|
||||||
|
|
||||||
|
def get_sub():
|
||||||
|
return config.webidentity_sub
|
||||||
|
|
||||||
|
def get_azp():
|
||||||
|
return config.webidentity_azp
|
||||||
|
|
||||||
|
def get_token():
|
||||||
|
return config.webidentity_token
|
||||||
|
|
||||||
|
def get_realm_name():
|
||||||
|
return config.webidentity_realm
|
||||||
|
|
||||||
|
def get_iam_access_key():
|
||||||
|
return config.iam_access_key
|
||||||
|
|
||||||
|
def get_iam_secret_key():
|
||||||
|
return config.iam_secret_key
|
||||||
|
|
||||||
|
def get_user_token():
|
||||||
|
return config.webidentity_user_token
|
||||||
|
|
||||||
|
def get_cloud_storage_class():
|
||||||
|
return config.cloud_storage_class
|
||||||
|
|
||||||
|
def get_cloud_retain_head_object():
|
||||||
|
return config.cloud_retain_head_object
|
||||||
|
|
||||||
|
def get_cloud_regular_storage_class():
|
||||||
|
return config.cloud_regular_storage_class
|
||||||
|
|
||||||
|
def get_cloud_target_path():
|
||||||
|
return config.cloud_target_path
|
||||||
|
|
||||||
|
def get_cloud_target_storage_class():
|
||||||
|
return config.cloud_target_storage_class
|
||||||
|
|
||||||
|
def get_lc_debug_interval():
|
||||||
|
return config.lc_debug_interval
|
||||||
|
|
|
@ -520,6 +520,7 @@ def test_bucket_create_bad_authorization_none():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/invalid MD5')
|
@attr(operation='create w/invalid MD5')
|
||||||
@attr(assertion='fails 400')
|
@attr(assertion='fails 400')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_md5_invalid_garbage_aws2():
|
def test_object_create_bad_md5_invalid_garbage_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'Content-MD5': 'AWS HAHAHA'}
|
headers = {'Content-MD5': 'AWS HAHAHA'}
|
||||||
|
@ -580,6 +581,7 @@ def test_object_create_bad_authorization_invalid_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/empty user agent')
|
@attr(operation='create w/empty user agent')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_ua_empty_aws2():
|
def test_object_create_bad_ua_empty_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'User-Agent': ''}
|
headers = {'User-Agent': ''}
|
||||||
|
@ -591,6 +593,7 @@ def test_object_create_bad_ua_empty_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/no user agent')
|
@attr(operation='create w/no user agent')
|
||||||
@attr(assertion='succeeds')
|
@attr(assertion='succeeds')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_ua_none_aws2():
|
def test_object_create_bad_ua_none_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
remove = 'User-Agent'
|
remove = 'User-Agent'
|
||||||
|
@ -602,6 +605,7 @@ def test_object_create_bad_ua_none_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/invalid date')
|
@attr(operation='create w/invalid date')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_date_invalid_aws2():
|
def test_object_create_bad_date_invalid_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Bad Date'}
|
headers = {'x-amz-date': 'Bad Date'}
|
||||||
|
@ -615,6 +619,7 @@ def test_object_create_bad_date_invalid_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/empty date')
|
@attr(operation='create w/empty date')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_date_empty_aws2():
|
def test_object_create_bad_date_empty_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': ''}
|
headers = {'x-amz-date': ''}
|
||||||
|
@ -643,6 +648,7 @@ def test_object_create_bad_date_none_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/date in past')
|
@attr(operation='create w/date in past')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_date_before_today_aws2():
|
def test_object_create_bad_date_before_today_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'}
|
||||||
|
@ -656,6 +662,7 @@ def test_object_create_bad_date_before_today_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/date before epoch')
|
@attr(operation='create w/date before epoch')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_date_before_epoch_aws2():
|
def test_object_create_bad_date_before_epoch_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}
|
||||||
|
@ -669,6 +676,7 @@ def test_object_create_bad_date_before_epoch_aws2():
|
||||||
@attr(method='put')
|
@attr(method='put')
|
||||||
@attr(operation='create w/date after 9999')
|
@attr(operation='create w/date after 9999')
|
||||||
@attr(assertion='fails 403')
|
@attr(assertion='fails 403')
|
||||||
|
@attr('fails_on_dbstore')
|
||||||
def test_object_create_bad_date_after_end_aws2():
|
def test_object_create_bad_date_after_end_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
1276
s3tests_boto3/functional/test_s3select.py
Normal file
1276
s3tests_boto3/functional/test_s3select.py
Normal file
File diff suppressed because it is too large
Load diff
2209
s3tests_boto3/functional/test_sts.py
Normal file
2209
s3tests_boto3/functional/test_sts.py
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue