forked from TrueCloudLab/s3-tests
Compare commits
341 commits
ceph-maste
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
08df9352f9 | ||
|
cba8047c7e | ||
|
acc8ef43c9 | ||
|
999d39d4db | ||
|
ac71900ffb | ||
|
aa82bd16ae | ||
|
e8db6c2c16 | ||
|
6a775cb445 | ||
|
0d85ed2dda | ||
|
9444c29674 | ||
|
bc8c14ac12 | ||
|
ecf7a8a7a9 | ||
|
3458971054 | ||
|
2e41494293 | ||
|
f61129e432 | ||
|
218f90063f | ||
|
82fedef5a5 | ||
|
c9aded48e5 | ||
|
87b496f25f | ||
|
a83396cda7 | ||
|
93a3b6c704 | ||
|
474c1404e2 | ||
|
2e395d78ea | ||
|
4eda9c0626 | ||
|
38ab4c5638 | ||
|
36fb297e48 | ||
|
8277a9fb9a | ||
|
c0f0b679db | ||
|
95df503ced | ||
|
9577cde013 | ||
|
a3dbac7115 | ||
|
73ed9121f4 | ||
|
bebdfd1ba7 | ||
|
658fc699a8 | ||
|
27f24ee4d7 | ||
|
00b9a2a291 | ||
|
e9c5cc29e9 | ||
|
77f1334571 | ||
|
c4c5a247eb | ||
|
54c1488a43 | ||
|
88fd867007 | ||
|
a28d46fa2a | ||
|
46f60d3029 | ||
|
d48530a294 | ||
|
dfabbf5a8d | ||
|
7bd4b0ee14 | ||
|
96d658444a | ||
|
a3a16eb66a | ||
|
7ebc530e04 | ||
|
4ca7967ae7 | ||
|
d5791d8da6 | ||
|
ba292fbf59 | ||
|
ed4a8e2244 | ||
|
46217fcf81 | ||
|
cefea0fd26 | ||
|
d4ada317e1 | ||
|
c6e40b4ffa | ||
|
364f29d087 | ||
|
0377466704 | ||
|
db76dfe791 | ||
|
d8becad96a | ||
|
7cd4613883 | ||
|
5f3353e6b5 | ||
|
a35b3c609a | ||
|
83af25722c | ||
|
8e01f2315c | ||
|
ade849b90f | ||
|
aecd282a11 | ||
|
3ef85406f9 | ||
|
12abc78b9b | ||
|
b46d16467c | ||
|
4744808eda | ||
|
a87f0b63e7 | ||
|
3af42312bf | ||
|
3056e6d039 | ||
|
997f78d58a | ||
|
1d5764d569 | ||
|
055451f666 | ||
|
1866f04d81 | ||
|
a2acdbfdda | ||
|
da91ad8bbf | ||
|
6861c3d810 | ||
|
519f8a4b0c | ||
|
d552124680 | ||
|
19c17fa49a | ||
|
40182ce26f | ||
|
e29d6246fc | ||
|
95677d85bc | ||
|
9c50cd1539 | ||
|
e9e3374827 | ||
|
e54f0a4508 | ||
|
b1efd0477a | ||
|
9961af4bd2 | ||
|
c0a1880d4c | ||
|
0e1bf6f652 | ||
|
73b340a0e2 | ||
|
b75b89c94b | ||
|
c252440614 | ||
|
f624165ec9 | ||
|
10f3f7620d | ||
|
188b392131 | ||
|
28009bf7d3 | ||
|
4476773180 | ||
|
928eb7a90f | ||
|
b904ef08bc | ||
|
fa0ea9afe0 | ||
|
2998ea91eb | ||
|
00cdcaf056 | ||
|
741f2cbc9e | ||
|
b05a394738 | ||
|
13e0d736a8 | ||
|
e18ea7fac4 | ||
|
7e35765dd4 | ||
|
2535dd695d | ||
|
008f5025f7 | ||
|
bc2a3b0b70 | ||
|
c445361c2e | ||
|
89bbe654ca | ||
|
b045323900 | ||
|
febbcc12c2 | ||
|
818443e916 | ||
|
992e193d81 | ||
|
787dc6bd43 | ||
|
97c0338adf | ||
|
bb27e04c45 | ||
|
6d2ed19c18 | ||
|
13a9bfc00a | ||
|
29b0e27e49 | ||
|
d158edb201 | ||
|
5b9652caa4 | ||
|
d976f47d74 | ||
|
359bde7e87 | ||
|
3a0f1f0ead | ||
|
42aff3e8fd | ||
|
5219b86db9 | ||
|
43b957792b | ||
|
2087c1ba26 | ||
|
5914eb2005 | ||
|
a536dd0e88 | ||
|
3437cda73d | ||
|
2c710811fa | ||
|
819dd5aa32 | ||
|
b1472019d7 | ||
|
18a41ab63f | ||
|
b8422a2055 | ||
|
7993dd02a5 | ||
|
5e9f6e5ffb | ||
|
d13ed28a5c | ||
|
494379c2ff | ||
|
4c75fba0de | ||
|
f5d0bc9be3 | ||
|
7e7e8d5a42 | ||
|
c80e9d2118 | ||
|
4864dbc340 | ||
|
3652cfe2ec | ||
|
672a123348 | ||
|
9319a41b24 | ||
|
114397c358 | ||
|
6ff8cf27a2 | ||
|
e4953a3b76 | ||
|
60b26f210e | ||
|
64e919a13b | ||
|
defb8eb977 | ||
|
b200013565 | ||
|
89f97ed35c | ||
|
d89ab9d862 | ||
|
774172ad43 | ||
|
ad999de7c4 | ||
|
44069ff062 | ||
|
a8ee732732 | ||
|
79156f3d3d | ||
|
8063cd68c9 | ||
|
c8fc8cd7c8 | ||
|
a3100af70a | ||
|
5d63ebf83d | ||
|
be7ab936cd | ||
|
952beb9ebd | ||
|
bf889041c9 | ||
|
88a8d1c66f | ||
|
4cf38b4138 | ||
|
97be0d44c6 | ||
|
5b08b26453 | ||
|
ef570220f9 | ||
|
33afb4eb88 | ||
|
25d05a194b | ||
|
75e4e4f631 | ||
|
c03108418f | ||
|
9f1f9c9273 | ||
|
16834645a6 | ||
|
8af8f96740 | ||
|
dd7cac25f5 | ||
|
101dfc104a | ||
|
bacab3cadf | ||
|
6eb0e15711 | ||
|
d20d8d2207 | ||
|
76beb672d1 | ||
|
cb830ebae1 | ||
|
cf77d5c560 | ||
|
0f3f35ef01 | ||
|
47292aee17 | ||
|
a38cdb1dd5 | ||
|
13a477d096 | ||
|
c03fd082cc | ||
|
540b28fa20 | ||
|
a4d282c1db | ||
|
f7f0799ceb | ||
|
60593c99dd | ||
|
5f96a32045 | ||
|
6019ec1ef3 | ||
|
a3b849e4db | ||
|
93099c1fb0 | ||
|
9a6a1e9f19 | ||
|
23be1160f5 | ||
|
47ece6e861 | ||
|
eef8d0fa67 | ||
|
f51101d752 | ||
|
490d0a4c4f | ||
|
749e29185b | ||
|
7c07bad930 | ||
|
687ab24e7d | ||
|
d073b991aa | ||
|
99d4b329e2 | ||
|
55d8ef2a7e | ||
|
9ac8aef12b | ||
|
4a89a9a5b2 | ||
|
71266fede9 | ||
|
5dcc3dd689 | ||
|
bf43a4a10a | ||
|
86fecf83b9 | ||
|
64068d7bf9 | ||
|
d466b7bd09 | ||
|
96438f44e4 | ||
|
a6004fe43b | ||
|
b252638369 | ||
|
5476c709c8 | ||
|
ea3caaa76b | ||
|
95fd91df2b | ||
|
7fe0304e9c | ||
|
8662815ebe | ||
|
9c4f15a47e | ||
|
bb995c2aeb | ||
|
41ebef2540 | ||
|
513ecdfdd0 | ||
|
723853fd18 | ||
|
44643af0b0 | ||
|
245a93326e | ||
|
700a04737a | ||
|
d2a7ed88f1 | ||
|
459e3c870a | ||
|
20aa9aa071 | ||
|
d868058d0c | ||
|
e229d1aaf6 | ||
|
64bdc3beec | ||
|
ba9525f425 | ||
|
a3447c50df | ||
|
aaa355f20b | ||
|
a0ef4be7fc | ||
|
7bd3c432fc | ||
|
2851712901 | ||
|
2ce7e15cca | ||
|
cfdf914c4b | ||
|
1572fbc87b | ||
|
b1815c25dc | ||
|
c6a4ab9d12 | ||
|
7276bee050 | ||
|
e7102e8cb0 | ||
|
60dd3444b3 | ||
|
4a86ebbe8b | ||
|
66ced9af1d | ||
|
8893cc49c5 | ||
|
ea7d5fb563 | ||
|
59a3aff3de | ||
|
6a63d0cf91 | ||
|
5d6166bf53 | ||
|
6c885bb39a | ||
|
ef8f65d917 | ||
|
f4f7812efd | ||
|
cd1794f3c7 | ||
|
26b43ccb02 | ||
|
26f06011ee | ||
|
d7c243ba83 | ||
|
c08de72d55 | ||
|
b72bff16d1 | ||
|
bf23251357 | ||
|
f4a052dfcf | ||
|
62395eb872 | ||
|
8638017020 | ||
|
ae3052fa8a | ||
|
a48a9bf6d1 | ||
|
16266d1590 | ||
|
0b2d7f729d | ||
|
daf9062a22 | ||
|
4e3fd5ff41 | ||
|
0eed4a551d | ||
|
30db28e775 | ||
|
f0868651fd | ||
|
6ff497d908 | ||
|
e79dffa731 | ||
|
c2b59fb714 | ||
|
9d526d1a76 | ||
|
979e739eff | ||
|
4948f8b009 | ||
|
f6218fa1de | ||
|
54103207e4 | ||
|
5a8d0b8b0d | ||
|
350fcbb4ec | ||
|
8dbe896f89 | ||
|
982d15c30e | ||
|
fce9a52ef4 | ||
|
0985cc11d7 | ||
|
72e251ed69 | ||
|
fb39ac4829 | ||
|
6d8c0059db | ||
|
b63229b110 | ||
|
b7f47c2a31 | ||
|
e006dd4753 | ||
|
1a9d3677f7 | ||
|
4c8bbbef0a | ||
|
a0c15c80ad | ||
|
b6db7bdd8a | ||
|
7f8a12423f | ||
|
713012c178 | ||
|
5dc8bc75ab | ||
|
94b1986228 | ||
|
4c7c279f70 | ||
|
5925f0fb3f | ||
|
c1bce6ac70 | ||
|
d543619e71 | ||
|
f42872fd53 | ||
|
74daf86fe5 | ||
|
dac38694ef | ||
|
47a3755378 | ||
|
4d675235dd | ||
|
3b1571ace6 | ||
|
b4516725f2 | ||
|
d02c1819f6 | ||
|
4996430709 | ||
|
6d3f574a8e | ||
|
1ad38530e0 | ||
|
3f9d31c6c7 | ||
|
02b1d50ca7 |
24 changed files with 10081 additions and 5582 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -10,5 +10,6 @@
|
||||||
|
|
||||||
/*.egg-info
|
/*.egg-info
|
||||||
/virtualenv
|
/virtualenv
|
||||||
|
/venv
|
||||||
|
|
||||||
config.yaml
|
config.yaml
|
||||||
|
|
67
README.rst
67
README.rst
|
@ -6,14 +6,10 @@ This is a set of unofficial Amazon AWS S3 compatibility
|
||||||
tests, that can be useful to people implementing software
|
tests, that can be useful to people implementing software
|
||||||
that exposes an S3-like API. The tests use the Boto2 and Boto3 libraries.
|
that exposes an S3-like API. The tests use the Boto2 and Boto3 libraries.
|
||||||
|
|
||||||
The tests use the Nose test framework. To get started, ensure you have
|
The tests use the Tox tool. To get started, ensure you have the ``tox``
|
||||||
the ``virtualenv`` software installed; e.g. on Debian/Ubuntu::
|
software installed; e.g. on Debian/Ubuntu::
|
||||||
|
|
||||||
sudo apt-get install python-virtualenv
|
sudo apt-get install tox
|
||||||
|
|
||||||
and then run::
|
|
||||||
|
|
||||||
./bootstrap
|
|
||||||
|
|
||||||
You will need to create a configuration file with the location of the
|
You will need to create a configuration file with the location of the
|
||||||
service and two different credentials. A sample configuration file named
|
service and two different credentials. A sample configuration file named
|
||||||
|
@ -22,29 +18,25 @@ used to run the s3 tests on a Ceph cluster started with vstart.
|
||||||
|
|
||||||
Once you have that file copied and edited, you can run the tests with::
|
Once you have that file copied and edited, you can run the tests with::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests
|
S3TEST_CONF=your.conf tox
|
||||||
|
|
||||||
You can specify which directory of tests to run::
|
You can specify which directory of tests to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests.functional
|
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional
|
||||||
|
|
||||||
You can specify which file of tests to run::
|
You can specify which file of tests to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests.functional.test_s3
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_s3.py
|
||||||
|
|
||||||
You can specify which test to run::
|
You can specify which test to run::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests.functional.test_s3:test_bucket_list_empty
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_s3.py::test_bucket_list_empty
|
||||||
|
|
||||||
To gather a list of tests being run, use the flags::
|
|
||||||
|
|
||||||
-v --collect-only
|
|
||||||
|
|
||||||
Some tests have attributes set based on their current reliability and
|
Some tests have attributes set based on their current reliability and
|
||||||
things like AWS not enforcing their spec stricly. You can filter tests
|
things like AWS not enforcing their spec stricly. You can filter tests
|
||||||
based on their attributes::
|
based on their attributes::
|
||||||
|
|
||||||
S3TEST_CONF=aws.conf ./virtualenv/bin/nosetests -a '!fails_on_aws'
|
S3TEST_CONF=aws.conf tox -- -m 'not fails_on_aws'
|
||||||
|
|
||||||
Most of the tests have both Boto3 and Boto2 versions. Tests written in
|
Most of the tests have both Boto3 and Boto2 versions. Tests written in
|
||||||
Boto2 are in the ``s3tests`` directory. Tests written in Boto3 are
|
Boto2 are in the ``s3tests`` directory. Tests written in Boto3 are
|
||||||
|
@ -52,7 +44,7 @@ located in the ``s3test_boto3`` directory.
|
||||||
|
|
||||||
You can run only the boto3 tests with::
|
You can run only the boto3 tests with::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'not fails_on_rgw' s3tests_boto3.functional
|
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional
|
||||||
|
|
||||||
========================
|
========================
|
||||||
STS compatibility tests
|
STS compatibility tests
|
||||||
|
@ -60,14 +52,51 @@ You can run only the boto3 tests with::
|
||||||
|
|
||||||
This section contains some basic tests for the AssumeRole, GetSessionToken and AssumeRoleWithWebIdentity API's. The test file is located under ``s3tests_boto3/functional``.
|
This section contains some basic tests for the AssumeRole, GetSessionToken and AssumeRoleWithWebIdentity API's. The test file is located under ``s3tests_boto3/functional``.
|
||||||
|
|
||||||
|
To run the STS tests, the vstart cluster should be started with the following parameter (in addition to any parameters already used with it)::
|
||||||
|
|
||||||
|
vstart.sh -o rgw_sts_key=abcdefghijklmnop -o rgw_s3_auth_use_sts=true
|
||||||
|
|
||||||
|
Note that the ``rgw_sts_key`` can be set to anything that is 128 bits in length.
|
||||||
|
After the cluster is up the following command should be executed::
|
||||||
|
|
||||||
|
radosgw-admin caps add --tenant=testx --uid="9876543210abcdef0123456789abcdef0123456789abcdef0123456789abcdef" --caps="roles=*"
|
||||||
|
|
||||||
You can run only the sts tests (all the three API's) with::
|
You can run only the sts tests (all the three API's) with::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_sts
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_sts.py
|
||||||
|
|
||||||
You can filter tests based on the attributes. There is a attribute named ``test_of_sts`` to run AssumeRole and GetSessionToken tests and ``webidentity_test`` to run the AssumeRoleWithWebIdentity tests. If you want to execute only ``test_of_sts`` tests you can apply that filter as below::
|
You can filter tests based on the attributes. There is a attribute named ``test_of_sts`` to run AssumeRole and GetSessionToken tests and ``webidentity_test`` to run the AssumeRoleWithWebIdentity tests. If you want to execute only ``test_of_sts`` tests you can apply that filter as below::
|
||||||
|
|
||||||
S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'test_of_sts' s3tests_boto3.functional.test_sts
|
S3TEST_CONF=your.conf tox -- -m test_of_sts s3tests_boto3/functional/test_sts.py
|
||||||
|
|
||||||
For running ``webidentity_test`` you'll need have Keycloak running.
|
For running ``webidentity_test`` you'll need have Keycloak running.
|
||||||
|
|
||||||
In order to run any STS test you'll need to add "iam" section to the config file. For further reference on how your config file should look check ``s3tests.conf.SAMPLE``.
|
In order to run any STS test you'll need to add "iam" section to the config file. For further reference on how your config file should look check ``s3tests.conf.SAMPLE``.
|
||||||
|
|
||||||
|
========================
|
||||||
|
IAM policy tests
|
||||||
|
========================
|
||||||
|
|
||||||
|
This is a set of IAM policy tests.
|
||||||
|
This section covers tests for user policies such as Put, Get, List, Delete, user policies with s3 actions, conflicting user policies etc
|
||||||
|
These tests uses Boto3 libraries. Tests are written in the ``s3test_boto3`` directory.
|
||||||
|
|
||||||
|
These iam policy tests uses two users with profile name "iam" and "s3 alt" as mentioned in s3tests.conf.SAMPLE.
|
||||||
|
If Ceph cluster is started with vstart, then above two users will get created as part of vstart with same access key, secrete key etc as mentioned in s3tests.conf.SAMPLE.
|
||||||
|
Out of those two users, "iam" user is with capabilities --caps=user-policy=* and "s3 alt" user is without capabilities.
|
||||||
|
Adding above capabilities to "iam" user is also taken care by vstart (If Ceph cluster is started with vstart).
|
||||||
|
|
||||||
|
To run these tests, create configuration file with section "iam" and "s3 alt" refer s3tests.conf.SAMPLE.
|
||||||
|
Once you have that configuration file copied and edited, you can run all the tests with::
|
||||||
|
|
||||||
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_iam.py
|
||||||
|
|
||||||
|
You can also specify specific test to run::
|
||||||
|
|
||||||
|
S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_iam.py::test_put_user_policy
|
||||||
|
|
||||||
|
Some tests have attributes set such as "fails_on_rgw".
|
||||||
|
You can filter tests based on their attributes::
|
||||||
|
|
||||||
|
S3TEST_CONF=your.conf tox -- s3tests_boto3/functional/test_iam.py -m 'not fails_on_rgw'
|
||||||
|
|
||||||
|
|
76
bootstrap
76
bootstrap
|
@ -1,76 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
virtualenv="virtualenv"
|
|
||||||
declare -a packages
|
|
||||||
source /etc/os-release
|
|
||||||
|
|
||||||
case "$ID" in
|
|
||||||
debian|ubuntu|devuan)
|
|
||||||
packages=(debianutils python3-pip python3-virtualenv python3-dev libevent-dev libffi-dev libxml2-dev libxslt-dev zlib1g-dev)
|
|
||||||
for package in ${packages[@]}; do
|
|
||||||
if [ "$(dpkg --status -- $package 2>/dev/null|sed -n 's/^Status: //p')" != "install ok installed" ]; then
|
|
||||||
# add a space after old values
|
|
||||||
missing="${missing:+$missing }$package"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -n "$missing" ]; then
|
|
||||||
echo "$0: missing required DEB packages. Installing via sudo." 1>&2
|
|
||||||
sudo apt-get -y install $missing
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
centos|fedora|rhel|ol|virtuozzo)
|
|
||||||
|
|
||||||
packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
|
|
||||||
for package in ${packages[@]}; do
|
|
||||||
# When the package is python36-devel we change it to python3-devel on Fedora
|
|
||||||
if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then
|
|
||||||
package=python36
|
|
||||||
fi
|
|
||||||
if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then
|
|
||||||
missing="${missing:+$missing }$package"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -n "$missing" ]; then
|
|
||||||
echo "$0: Missing required RPM packages: ${missing}." 1>&2
|
|
||||||
sudo yum -y install $missing
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
opensuse*|suse|sles)
|
|
||||||
|
|
||||||
packages=(which python3-virtualenv python3-devel libev-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
|
|
||||||
for package in ${packages[@]}; do
|
|
||||||
if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then
|
|
||||||
missing="${missing:+$missing }$package"
|
|
||||||
fi
|
|
||||||
if [ -n "$missing" ]; then
|
|
||||||
echo "$0: Missing required RPM packages: ${missing}." 1>&2
|
|
||||||
sudo zypper --non-interactive install --no-recommends $missing
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "Bootstrap script does not support this distro yet, consider adding the packages"
|
|
||||||
exit 1
|
|
||||||
esac
|
|
||||||
|
|
||||||
|
|
||||||
# s3-tests only works on python 3.6 not newer versions of python3
|
|
||||||
${virtualenv} --python=$(which python3.6) virtualenv
|
|
||||||
|
|
||||||
# avoid pip bugs
|
|
||||||
./virtualenv/bin/pip3 install --upgrade pip
|
|
||||||
|
|
||||||
# latest setuptools supporting python 2.7
|
|
||||||
./virtualenv/bin/pip install setuptools==44.1.0
|
|
||||||
|
|
||||||
./virtualenv/bin/pip3 install -r requirements.txt
|
|
||||||
|
|
||||||
# forbid setuptools from using the network because it'll try to use
|
|
||||||
# easy_install, and we really wanted pip; next line will fail if pip
|
|
||||||
# requirements.txt does not match setup.py requirements -- sucky but
|
|
||||||
# good enough for now
|
|
||||||
./virtualenv/bin/python3 setup.py develop
|
|
51
pytest.ini
Normal file
51
pytest.ini
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
[pytest]
|
||||||
|
markers =
|
||||||
|
abac_test
|
||||||
|
appendobject
|
||||||
|
auth_aws2
|
||||||
|
auth_aws4
|
||||||
|
auth_common
|
||||||
|
bucket_policy
|
||||||
|
bucket_encryption
|
||||||
|
checksum
|
||||||
|
cloud_transition
|
||||||
|
encryption
|
||||||
|
fails_on_aws
|
||||||
|
fails_on_dbstore
|
||||||
|
fails_on_dho
|
||||||
|
fails_on_mod_proxy_fcgi
|
||||||
|
fails_on_rgw
|
||||||
|
fails_on_s3
|
||||||
|
fails_with_subdomain
|
||||||
|
group
|
||||||
|
group_policy
|
||||||
|
iam_account
|
||||||
|
iam_cross_account
|
||||||
|
iam_role
|
||||||
|
iam_tenant
|
||||||
|
iam_user
|
||||||
|
lifecycle
|
||||||
|
lifecycle_expiration
|
||||||
|
lifecycle_transition
|
||||||
|
list_objects_v2
|
||||||
|
object_lock
|
||||||
|
role_policy
|
||||||
|
session_policy
|
||||||
|
s3select
|
||||||
|
s3website
|
||||||
|
s3website_routing_rules
|
||||||
|
s3website_redirect_location
|
||||||
|
sns
|
||||||
|
sse_s3
|
||||||
|
storage_class
|
||||||
|
tagging
|
||||||
|
test_of_sts
|
||||||
|
token_claims_trust_policy_test
|
||||||
|
token_principal_tag_role_policy_test
|
||||||
|
token_request_tag_trust_policy_test
|
||||||
|
token_resource_tags_test
|
||||||
|
token_role_tags_test
|
||||||
|
token_tag_keys_test
|
||||||
|
user_policy
|
||||||
|
versioning
|
||||||
|
webidentity_test
|
|
@ -1,12 +1,15 @@
|
||||||
PyYAML
|
PyYAML
|
||||||
nose >=1.0.0
|
|
||||||
boto >=2.6.0
|
boto >=2.6.0
|
||||||
boto3 >=1.0.0
|
boto3 >=1.0.0
|
||||||
|
# botocore-1.28 broke v2 signatures, see https://tracker.ceph.com/issues/58059
|
||||||
|
botocore <1.28.0
|
||||||
munch >=2.0.0
|
munch >=2.0.0
|
||||||
# 0.14 switches to libev, that means bootstrap needs to change too
|
# 0.14 switches to libev, that means bootstrap needs to change too
|
||||||
gevent >=1.0
|
gevent >=1.0
|
||||||
isodate >=0.4.4
|
isodate >=0.4.4
|
||||||
requests >=2.23.0
|
requests >=2.23.0
|
||||||
pytz >=2011k
|
pytz
|
||||||
httplib2
|
httplib2
|
||||||
lxml
|
lxml
|
||||||
|
pytest
|
||||||
|
tox
|
||||||
|
|
|
@ -19,6 +19,14 @@ ssl_verify = False
|
||||||
## the prefix to 30 characters long, and avoid collisions
|
## the prefix to 30 characters long, and avoid collisions
|
||||||
bucket prefix = yournamehere-{random}-
|
bucket prefix = yournamehere-{random}-
|
||||||
|
|
||||||
|
# all the iam account resources (users, roles, etc) created
|
||||||
|
# will start with this name prefix
|
||||||
|
iam name prefix = s3-tests-
|
||||||
|
|
||||||
|
# all the iam account resources (users, roles, etc) created
|
||||||
|
# will start with this path prefix
|
||||||
|
iam path prefix = /s3-tests/
|
||||||
|
|
||||||
[s3 main]
|
[s3 main]
|
||||||
# main display_name set in vstart.sh
|
# main display_name set in vstart.sh
|
||||||
display_name = M. Tester
|
display_name = M. Tester
|
||||||
|
@ -41,6 +49,12 @@ secret_key = h7GhxuBLTrlhVUyxSPUKUV8r/2EI4ngqJxD7iBdBYLhwluN30JaT3Q==
|
||||||
## replace with key id obtained when secret is created, or delete if KMS not tested
|
## replace with key id obtained when secret is created, or delete if KMS not tested
|
||||||
#kms_keyid = 01234567-89ab-cdef-0123-456789abcdef
|
#kms_keyid = 01234567-89ab-cdef-0123-456789abcdef
|
||||||
|
|
||||||
|
## Storage classes
|
||||||
|
#storage_classes = "LUKEWARM, FROZEN"
|
||||||
|
|
||||||
|
## Lifecycle debug interval (default: 10)
|
||||||
|
#lc_debug_interval = 20
|
||||||
|
|
||||||
[s3 alt]
|
[s3 alt]
|
||||||
# alt display_name set in vstart.sh
|
# alt display_name set in vstart.sh
|
||||||
display_name = john.doe
|
display_name = john.doe
|
||||||
|
@ -56,6 +70,37 @@ access_key = NOPQRSTUVWXYZABCDEFG
|
||||||
# alt AWS secret key set in vstart.sh
|
# alt AWS secret key set in vstart.sh
|
||||||
secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm
|
secret_key = nopqrstuvwxyzabcdefghijklmnabcdefghijklm
|
||||||
|
|
||||||
|
#[s3 cloud]
|
||||||
|
## to run the testcases with "cloud_transition" attribute.
|
||||||
|
## Note: the waiting time may have to tweaked depending on
|
||||||
|
## the I/O latency to the cloud endpoint.
|
||||||
|
|
||||||
|
## host set for cloud endpoint
|
||||||
|
# host = localhost
|
||||||
|
|
||||||
|
## port set for cloud endpoint
|
||||||
|
# port = 8001
|
||||||
|
|
||||||
|
## say "False" to disable TLS
|
||||||
|
# is_secure = False
|
||||||
|
|
||||||
|
## cloud endpoint credentials
|
||||||
|
# access_key = 0555b35654ad1656d804
|
||||||
|
# secret_key = h7GhxuBLTrlhVUyxSPUKUV8r/2EI4ngqJxD7iBdBYLhwluN30JaT3Q==
|
||||||
|
|
||||||
|
## storage class configured as cloud tier on local rgw server
|
||||||
|
# cloud_storage_class = CLOUDTIER
|
||||||
|
|
||||||
|
## Below are optional -
|
||||||
|
|
||||||
|
## Above configured cloud storage class config options
|
||||||
|
# retain_head_object = false
|
||||||
|
# target_storage_class = Target_SC
|
||||||
|
# target_path = cloud-bucket
|
||||||
|
|
||||||
|
## another regular storage class to test multiple transition rules,
|
||||||
|
# storage_class = S1
|
||||||
|
|
||||||
[s3 tenant]
|
[s3 tenant]
|
||||||
# tenant display_name set in vstart.sh
|
# tenant display_name set in vstart.sh
|
||||||
display_name = testx$tenanteduser
|
display_name = testx$tenanteduser
|
||||||
|
@ -72,6 +117,9 @@ secret_key = opqrstuvwxyzabcdefghijklmnopqrstuvwxyzab
|
||||||
# tenant email set in vstart.sh
|
# tenant email set in vstart.sh
|
||||||
email = tenanteduser@example.com
|
email = tenanteduser@example.com
|
||||||
|
|
||||||
|
# tenant name
|
||||||
|
tenant = testx
|
||||||
|
|
||||||
#following section needs to be added for all sts-tests
|
#following section needs to be added for all sts-tests
|
||||||
[iam]
|
[iam]
|
||||||
#used for iam operations in sts-tests
|
#used for iam operations in sts-tests
|
||||||
|
@ -90,6 +138,20 @@ secret_key = abcdefghijklmnopqrstuvwxyzabcdefghijklmn
|
||||||
#display_name from vstart.sh
|
#display_name from vstart.sh
|
||||||
display_name = youruseridhere
|
display_name = youruseridhere
|
||||||
|
|
||||||
|
# iam account root user for iam_account tests
|
||||||
|
[iam root]
|
||||||
|
access_key = AAAAAAAAAAAAAAAAAAaa
|
||||||
|
secret_key = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
|
||||||
|
user_id = RGW11111111111111111
|
||||||
|
email = account1@ceph.com
|
||||||
|
|
||||||
|
# iam account root user in a different account than [iam root]
|
||||||
|
[iam alt root]
|
||||||
|
access_key = BBBBBBBBBBBBBBBBBBbb
|
||||||
|
secret_key = bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
|
||||||
|
user_id = RGW22222222222222222
|
||||||
|
email = account2@ceph.com
|
||||||
|
|
||||||
#following section needs to be added when you want to run Assume Role With Webidentity test
|
#following section needs to be added when you want to run Assume Role With Webidentity test
|
||||||
[webidentity]
|
[webidentity]
|
||||||
#used for assume role with web identity test in sts-tests
|
#used for assume role with web identity test in sts-tests
|
||||||
|
@ -98,6 +160,12 @@ token=<access_token>
|
||||||
|
|
||||||
aud=<obtained after introspecting token>
|
aud=<obtained after introspecting token>
|
||||||
|
|
||||||
|
sub=<obtained after introspecting token>
|
||||||
|
|
||||||
|
azp=<obtained after introspecting token>
|
||||||
|
|
||||||
|
user_token=<access token for a user, with attribute Department=[Engineering, Marketing>]
|
||||||
|
|
||||||
thumbprint=<obtained from x509 certificate>
|
thumbprint=<obtained from x509 certificate>
|
||||||
|
|
||||||
KC_REALM=<name of the realm>
|
KC_REALM=<name of the realm>
|
||||||
|
|
|
@ -7,6 +7,7 @@ import itertools
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
import pytest
|
||||||
from http.client import HTTPConnection, HTTPSConnection
|
from http.client import HTTPConnection, HTTPSConnection
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
@ -370,6 +371,15 @@ def teardown():
|
||||||
# remove our buckets here also, to avoid littering
|
# remove our buckets here also, to avoid littering
|
||||||
nuke_prefixed_buckets(prefix=prefix)
|
nuke_prefixed_buckets(prefix=prefix)
|
||||||
|
|
||||||
|
@pytest.fixture(scope="package")
|
||||||
|
def configfile():
|
||||||
|
setup()
|
||||||
|
yield config
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup_teardown(configfile):
|
||||||
|
yield
|
||||||
|
teardown()
|
||||||
|
|
||||||
bucket_counter = itertools.count(1)
|
bucket_counter = itertools.count(1)
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -7,7 +7,7 @@ import datetime
|
||||||
import time
|
import time
|
||||||
import email.utils
|
import email.utils
|
||||||
import isodate
|
import isodate
|
||||||
import nose
|
import pytest
|
||||||
import operator
|
import operator
|
||||||
import socket
|
import socket
|
||||||
import ssl
|
import ssl
|
||||||
|
@ -27,16 +27,14 @@ import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from nose.tools import eq_ as eq
|
|
||||||
from nose.plugins.attrib import attr
|
|
||||||
from nose.plugins.skip import SkipTest
|
|
||||||
|
|
||||||
from . import utils
|
from . import utils
|
||||||
from .utils import assert_raises
|
from .utils import assert_raises
|
||||||
|
|
||||||
from .policy import Policy, Statement, make_json_policy
|
from .policy import Policy, Statement, make_json_policy
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
|
configfile,
|
||||||
|
setup_teardown,
|
||||||
nuke_prefixed_buckets,
|
nuke_prefixed_buckets,
|
||||||
get_new_bucket,
|
get_new_bucket,
|
||||||
get_new_bucket_name,
|
get_new_bucket_name,
|
||||||
|
@ -53,9 +51,9 @@ from . import (
|
||||||
|
|
||||||
def check_access_denied(fn, *args, **kwargs):
|
def check_access_denied(fn, *args, **kwargs):
|
||||||
e = assert_raises(boto.exception.S3ResponseError, fn, *args, **kwargs)
|
e = assert_raises(boto.exception.S3ResponseError, fn, *args, **kwargs)
|
||||||
eq(e.status, 403)
|
assert e.status == 403
|
||||||
eq(e.reason, 'Forbidden')
|
assert e.reason == 'Forbidden'
|
||||||
eq(e.error_code, 'AccessDenied')
|
assert e.error_code == 'AccessDenied'
|
||||||
|
|
||||||
def check_bad_bucket_name(name):
|
def check_bad_bucket_name(name):
|
||||||
"""
|
"""
|
||||||
|
@ -63,9 +61,9 @@ def check_bad_bucket_name(name):
|
||||||
that the request fails because of an invalid bucket name.
|
that the request fails because of an invalid bucket name.
|
||||||
"""
|
"""
|
||||||
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, targets.main.default, name)
|
e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, targets.main.default, name)
|
||||||
eq(e.status, 400)
|
assert e.status == 400
|
||||||
eq(e.reason.lower(), 'bad request') # some proxies vary the case
|
assert e.reason.lower() == 'bad request' # some proxies vary the case
|
||||||
eq(e.error_code, 'InvalidBucketName')
|
assert e.error_code == 'InvalidBucketName'
|
||||||
|
|
||||||
def _create_keys(bucket=None, keys=[]):
|
def _create_keys(bucket=None, keys=[]):
|
||||||
"""
|
"""
|
||||||
|
@ -94,20 +92,16 @@ def _get_alt_connection():
|
||||||
|
|
||||||
|
|
||||||
# Breaks DNS with SubdomainCallingFormat
|
# Breaks DNS with SubdomainCallingFormat
|
||||||
@attr('fails_with_subdomain')
|
@pytest.mark.fails_with_subdomain
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/! in name')
|
|
||||||
@attr(assertion='fails with subdomain')
|
|
||||||
def test_bucket_create_naming_bad_punctuation():
|
def test_bucket_create_naming_bad_punctuation():
|
||||||
# characters other than [a-zA-Z0-9._-]
|
# characters other than [a-zA-Z0-9._-]
|
||||||
check_bad_bucket_name('alpha!soup')
|
check_bad_bucket_name('alpha!soup')
|
||||||
|
|
||||||
def check_versioning(bucket, status):
|
def check_versioning(bucket, status):
|
||||||
try:
|
try:
|
||||||
eq(bucket.get_versioning_status()['Versioning'], status)
|
assert bucket.get_versioning_status()['Versioning'] == status
|
||||||
except KeyError:
|
except KeyError:
|
||||||
eq(status, None)
|
assert status == None
|
||||||
|
|
||||||
# amazon is eventual consistent, retry a bit if failed
|
# amazon is eventual consistent, retry a bit if failed
|
||||||
def check_configure_versioning_retry(bucket, status, expected_string):
|
def check_configure_versioning_retry(bucket, status, expected_string):
|
||||||
|
@ -126,13 +120,10 @@ def check_configure_versioning_retry(bucket, status, expected_string):
|
||||||
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
eq(expected_string, read_status)
|
assert expected_string == read_status
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.versioning
|
||||||
@attr(method='create')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='create versioned object, read not exist null version')
|
|
||||||
@attr(assertion='read null version behaves correctly')
|
|
||||||
@attr('versioning')
|
|
||||||
def test_versioning_obj_read_not_exist_null():
|
def test_versioning_obj_read_not_exist_null():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
check_versioning(bucket, None)
|
check_versioning(bucket, None)
|
||||||
|
@ -146,15 +137,12 @@ def test_versioning_obj_read_not_exist_null():
|
||||||
key.set_contents_from_string(content)
|
key.set_contents_from_string(content)
|
||||||
|
|
||||||
key = bucket.get_key(objname, version_id='null')
|
key = bucket.get_key(objname, version_id='null')
|
||||||
eq(key, None)
|
assert key == None
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(method='put')
|
@pytest.mark.fails_with_subdomain
|
||||||
@attr(operation='append object')
|
@pytest.mark.appendobject
|
||||||
@attr(assertion='success')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('fails_on_aws')
|
|
||||||
@attr('fails_with_subdomain')
|
|
||||||
@attr('appendobject')
|
|
||||||
def test_append_object():
|
def test_append_object():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
|
@ -166,19 +154,16 @@ def test_append_object():
|
||||||
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path1, body='abc', secure=s3.main.is_secure)
|
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path1, body='abc', secure=s3.main.is_secure)
|
||||||
path2 = path + '&append&position=3'
|
path2 = path + '&append&position=3'
|
||||||
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path2, body='abc', secure=s3.main.is_secure)
|
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path2, body='abc', secure=s3.main.is_secure)
|
||||||
eq(res.status, 200)
|
assert res.status == 200
|
||||||
eq(res.reason, 'OK')
|
assert res.reason == 'OK'
|
||||||
|
|
||||||
key = bucket.get_key('foo')
|
key = bucket.get_key('foo')
|
||||||
eq(key.size, 6)
|
assert key.size == 6
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(method='put')
|
@pytest.mark.fails_with_subdomain
|
||||||
@attr(operation='append to normal object')
|
@pytest.mark.appendobject
|
||||||
@attr(assertion='fails 409')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('fails_on_aws')
|
|
||||||
@attr('fails_with_subdomain')
|
|
||||||
@attr('appendobject')
|
|
||||||
def test_append_normal_object():
|
def test_append_normal_object():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
|
@ -189,16 +174,13 @@ def test_append_normal_object():
|
||||||
path = o.path + '?' + o.query
|
path = o.path + '?' + o.query
|
||||||
path = path + '&append&position=3'
|
path = path + '&append&position=3'
|
||||||
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path, body='abc', secure=s3.main.is_secure)
|
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path, body='abc', secure=s3.main.is_secure)
|
||||||
eq(res.status, 409)
|
assert res.status == 409
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(method='put')
|
@pytest.mark.fails_with_subdomain
|
||||||
@attr(operation='append position not right')
|
@pytest.mark.appendobject
|
||||||
@attr(assertion='fails 409')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('fails_on_aws')
|
|
||||||
@attr('fails_with_subdomain')
|
|
||||||
@attr('appendobject')
|
|
||||||
def test_append_object_position_wrong():
|
def test_append_object_position_wrong():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
|
@ -210,17 +192,13 @@ def test_append_object_position_wrong():
|
||||||
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path1, body='abc', secure=s3.main.is_secure)
|
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path1, body='abc', secure=s3.main.is_secure)
|
||||||
path2 = path + '&append&position=9'
|
path2 = path + '&append&position=9'
|
||||||
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path2, body='abc', secure=s3.main.is_secure)
|
res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path2, body='abc', secure=s3.main.is_secure)
|
||||||
eq(res.status, 409)
|
assert res.status == 409
|
||||||
eq(int(res.getheader('x-rgw-next-append-position')), 3)
|
assert int(res.getheader('x-rgw-next-append-position')) == 3
|
||||||
|
|
||||||
|
|
||||||
# TODO rgw log_bucket.set_as_logging_target() gives 403 Forbidden
|
# TODO rgw log_bucket.set_as_logging_target() gives 403 Forbidden
|
||||||
# http://tracker.newdream.net/issues/984
|
# http://tracker.newdream.net/issues/984
|
||||||
@attr(resource='bucket.log')
|
@pytest.mark.fails_on_rgw
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='set/enable/disable logging target')
|
|
||||||
@attr(assertion='operations succeed')
|
|
||||||
@attr('fails_on_rgw')
|
|
||||||
def test_logging_toggle():
|
def test_logging_toggle():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
log_bucket = get_new_bucket(targets.main.default, bucket.name + '-log')
|
log_bucket = get_new_bucket(targets.main.default, bucket.name + '-log')
|
||||||
|
@ -236,242 +214,6 @@ def list_bucket_storage_class(bucket):
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# The test harness for lifecycle is configured to treat days as 10 second intervals.
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='test lifecycle expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_lifecycle_transition():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = set_lifecycle(rules=[{'id': 'rule1', 'transition': lc_transition(days=1, storage_class=sc[1]), 'prefix': 'expire1/', 'status': 'Enabled'},
|
|
||||||
{'id':'rule2', 'transition': lc_transition(days=4, storage_class=sc[2]), 'prefix': 'expire3/', 'status': 'Enabled'}])
|
|
||||||
_create_keys(bucket=bucket, keys=['expire1/foo', 'expire1/bar', 'keep2/foo',
|
|
||||||
'keep2/bar', 'expire3/foo', 'expire3/bar'])
|
|
||||||
# Get list of all keys
|
|
||||||
init_keys = bucket.get_all_keys()
|
|
||||||
eq(len(init_keys), 6)
|
|
||||||
|
|
||||||
# Wait for first expiration (plus fudge to handle the timer window)
|
|
||||||
time.sleep(25)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 4)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 2)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for next expiration cycle
|
|
||||||
time.sleep(10)
|
|
||||||
keep2_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(keep2_keys['STANDARD']), 4)
|
|
||||||
eq(len(keep2_keys[sc[1]]), 2)
|
|
||||||
eq(len(keep2_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for final expiration cycle
|
|
||||||
time.sleep(20)
|
|
||||||
expire3_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire3_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire3_keys[sc[1]]), 2)
|
|
||||||
eq(len(expire3_keys[sc[2]]), 2)
|
|
||||||
|
|
||||||
# The test harness for lifecycle is configured to treat days as 10 second intervals.
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='test lifecycle expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_lifecycle_transition_single_rule_multi_trans():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = set_lifecycle(rules=[
|
|
||||||
{'id': 'rule1',
|
|
||||||
'transition': lc_transitions([
|
|
||||||
lc_transition(days=1, storage_class=sc[1]),
|
|
||||||
lc_transition(days=4, storage_class=sc[2])]),
|
|
||||||
'prefix': 'expire1/',
|
|
||||||
'status': 'Enabled'}])
|
|
||||||
|
|
||||||
_create_keys(bucket=bucket, keys=['expire1/foo', 'expire1/bar', 'keep2/foo',
|
|
||||||
'keep2/bar', 'expire3/foo', 'expire3/bar'])
|
|
||||||
# Get list of all keys
|
|
||||||
init_keys = bucket.get_all_keys()
|
|
||||||
eq(len(init_keys), 6)
|
|
||||||
|
|
||||||
# Wait for first expiration (plus fudge to handle the timer window)
|
|
||||||
time.sleep(25)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 4)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 2)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for next expiration cycle
|
|
||||||
time.sleep(10)
|
|
||||||
keep2_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(keep2_keys['STANDARD']), 4)
|
|
||||||
eq(len(keep2_keys[sc[1]]), 2)
|
|
||||||
eq(len(keep2_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
# Wait for final expiration cycle
|
|
||||||
time.sleep(20)
|
|
||||||
expire3_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire3_keys['STANDARD']), 4)
|
|
||||||
eq(len(expire3_keys[sc[1]]), 0)
|
|
||||||
eq(len(expire3_keys[sc[2]]), 2)
|
|
||||||
|
|
||||||
def generate_lifecycle_body(rules):
|
|
||||||
body = '<?xml version="1.0" encoding="UTF-8"?><LifecycleConfiguration>'
|
|
||||||
for rule in rules:
|
|
||||||
body += '<Rule><ID>%s</ID><Status>%s</Status>' % (rule['ID'], rule['Status'])
|
|
||||||
if 'Prefix' in list(rule.keys()):
|
|
||||||
body += '<Prefix>%s</Prefix>' % rule['Prefix']
|
|
||||||
if 'Filter' in list(rule.keys()):
|
|
||||||
prefix_str= '' # AWS supports empty filters
|
|
||||||
if 'Prefix' in list(rule['Filter'].keys()):
|
|
||||||
prefix_str = '<Prefix>%s</Prefix>' % rule['Filter']['Prefix']
|
|
||||||
body += '<Filter>%s</Filter>' % prefix_str
|
|
||||||
|
|
||||||
if 'Expiration' in list(rule.keys()):
|
|
||||||
if 'ExpiredObjectDeleteMarker' in list(rule['Expiration'].keys()):
|
|
||||||
body += '<Expiration><ExpiredObjectDeleteMarker>%s</ExpiredObjectDeleteMarker></Expiration>' \
|
|
||||||
% rule['Expiration']['ExpiredObjectDeleteMarker']
|
|
||||||
elif 'Date' in list(rule['Expiration'].keys()):
|
|
||||||
body += '<Expiration><Date>%s</Date></Expiration>' % rule['Expiration']['Date']
|
|
||||||
else:
|
|
||||||
body += '<Expiration><Days>%d</Days></Expiration>' % rule['Expiration']['Days']
|
|
||||||
if 'NoncurrentVersionExpiration' in list(rule.keys()):
|
|
||||||
body += '<NoncurrentVersionExpiration><NoncurrentDays>%d</NoncurrentDays></NoncurrentVersionExpiration>' % \
|
|
||||||
rule['NoncurrentVersionExpiration']['NoncurrentDays']
|
|
||||||
if 'NoncurrentVersionTransition' in list(rule.keys()):
|
|
||||||
for t in rule['NoncurrentVersionTransition']:
|
|
||||||
body += '<NoncurrentVersionTransition>'
|
|
||||||
body += '<NoncurrentDays>%d</NoncurrentDays>' % \
|
|
||||||
t['NoncurrentDays']
|
|
||||||
body += '<StorageClass>%s</StorageClass>' % \
|
|
||||||
t['StorageClass']
|
|
||||||
body += '</NoncurrentVersionTransition>'
|
|
||||||
if 'AbortIncompleteMultipartUpload' in list(rule.keys()):
|
|
||||||
body += '<AbortIncompleteMultipartUpload><DaysAfterInitiation>%d</DaysAfterInitiation>' \
|
|
||||||
'</AbortIncompleteMultipartUpload>' % rule['AbortIncompleteMultipartUpload']['DaysAfterInitiation']
|
|
||||||
body += '</Rule>'
|
|
||||||
body += '</LifecycleConfiguration>'
|
|
||||||
return body
|
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='set lifecycle config with noncurrent version expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
def test_lifecycle_set_noncurrent_transition():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
|
||||||
rules = [
|
|
||||||
{
|
|
||||||
'ID': 'rule1',
|
|
||||||
'Prefix': 'test1/',
|
|
||||||
'Status': 'Enabled',
|
|
||||||
'NoncurrentVersionTransition': [
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 2,
|
|
||||||
'StorageClass': sc[1]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 4,
|
|
||||||
'StorageClass': sc[2]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'NoncurrentVersionExpiration': {
|
|
||||||
'NoncurrentDays': 6
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{'ID': 'rule2', 'Prefix': 'test2/', 'Status': 'Disabled', 'NoncurrentVersionExpiration': {'NoncurrentDays': 3}}
|
|
||||||
]
|
|
||||||
body = generate_lifecycle_body(rules)
|
|
||||||
fp = StringIO(body)
|
|
||||||
md5 = boto.utils.compute_md5(fp)
|
|
||||||
headers = {'Content-MD5': md5[1], 'Content-Type': 'text/xml'}
|
|
||||||
res = bucket.connection.make_request('PUT', bucket.name, data=fp.getvalue(), query_args='lifecycle',
|
|
||||||
headers=headers)
|
|
||||||
eq(res.status, 200)
|
|
||||||
eq(res.reason, 'OK')
|
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='test lifecycle non-current version expiration')
|
|
||||||
@attr('lifecycle')
|
|
||||||
@attr('lifecycle_expiration')
|
|
||||||
@attr('lifecycle_transition')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_lifecycle_noncur_transition():
|
|
||||||
sc = configured_storage_classes()
|
|
||||||
if len(sc) < 3:
|
|
||||||
raise SkipTest
|
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
|
||||||
check_configure_versioning_retry(bucket, True, "Enabled")
|
|
||||||
|
|
||||||
rules = [
|
|
||||||
{
|
|
||||||
'ID': 'rule1',
|
|
||||||
'Prefix': 'test1/',
|
|
||||||
'Status': 'Enabled',
|
|
||||||
'NoncurrentVersionTransition': [
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 1,
|
|
||||||
'StorageClass': sc[1]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'NoncurrentDays': 3,
|
|
||||||
'StorageClass': sc[2]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
'NoncurrentVersionExpiration': {
|
|
||||||
'NoncurrentDays': 5
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
body = generate_lifecycle_body(rules)
|
|
||||||
fp = StringIO(body)
|
|
||||||
md5 = boto.utils.compute_md5(fp)
|
|
||||||
headers = {'Content-MD5': md5[1], 'Content-Type': 'text/xml'}
|
|
||||||
bucket.connection.make_request('PUT', bucket.name, data=fp.getvalue(), query_args='lifecycle',
|
|
||||||
headers=headers)
|
|
||||||
|
|
||||||
create_multiple_versions(bucket, "test1/a", 3)
|
|
||||||
create_multiple_versions(bucket, "test1/b", 3)
|
|
||||||
init_keys = bucket.get_all_versions()
|
|
||||||
eq(len(init_keys), 6)
|
|
||||||
|
|
||||||
time.sleep(25)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 4)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
time.sleep(20)
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 0)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 4)
|
|
||||||
|
|
||||||
time.sleep(20)
|
|
||||||
expire_keys = bucket.get_all_versions()
|
|
||||||
expire1_keys = list_bucket_storage_class(bucket)
|
|
||||||
eq(len(expire1_keys['STANDARD']), 2)
|
|
||||||
eq(len(expire1_keys[sc[1]]), 0)
|
|
||||||
eq(len(expire1_keys[sc[2]]), 0)
|
|
||||||
|
|
||||||
|
|
||||||
def transfer_part(bucket, mp_id, mp_keyname, i, part, headers=None):
|
def transfer_part(bucket, mp_id, mp_keyname, i, part, headers=None):
|
||||||
"""Transfer a part of a multipart upload. Designed to be run in parallel.
|
"""Transfer a part of a multipart upload. Designed to be run in parallel.
|
||||||
"""
|
"""
|
||||||
|
@ -543,13 +285,13 @@ def gen_rand_string(size, chars=string.ascii_uppercase + string.digits):
|
||||||
|
|
||||||
def verify_object(bucket, k, data=None, storage_class=None):
|
def verify_object(bucket, k, data=None, storage_class=None):
|
||||||
if storage_class:
|
if storage_class:
|
||||||
eq(k.storage_class, storage_class)
|
assert k.storage_class == storage_class
|
||||||
|
|
||||||
if data:
|
if data:
|
||||||
read_data = k.get_contents_as_string()
|
read_data = k.get_contents_as_string()
|
||||||
|
|
||||||
equal = data == read_data # avoid spamming log if data not equal
|
equal = data == read_data.decode() # avoid spamming log if data not equal
|
||||||
eq(equal, True)
|
assert equal == True
|
||||||
|
|
||||||
def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storage_class):
|
def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storage_class):
|
||||||
query_args=None
|
query_args=None
|
||||||
|
@ -565,7 +307,7 @@ def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storag
|
||||||
|
|
||||||
res = dest_bucket.connection.make_request('PUT', dest_bucket.name, dest_key.name,
|
res = dest_bucket.connection.make_request('PUT', dest_bucket.name, dest_key.name,
|
||||||
query_args=query_args, headers=headers)
|
query_args=query_args, headers=headers)
|
||||||
eq(res.status, 200)
|
assert res.status == 200
|
||||||
|
|
||||||
def _populate_multipart_key(bucket, kname, size, storage_class=None):
|
def _populate_multipart_key(bucket, kname, size, storage_class=None):
|
||||||
(upload, data) = _multipart_upload(bucket, kname, size, storage_class=storage_class)
|
(upload, data) = _multipart_upload(bucket, kname, size, storage_class=storage_class)
|
||||||
|
@ -620,6 +362,9 @@ def configured_storage_classes():
|
||||||
if item != 'STANDARD':
|
if item != 'STANDARD':
|
||||||
sc.append(item)
|
sc.append(item)
|
||||||
|
|
||||||
|
sc = [i for i in sc if i]
|
||||||
|
print("storage classes configured: " + str(sc))
|
||||||
|
|
||||||
return sc
|
return sc
|
||||||
|
|
||||||
def lc_transition(days=None, date=None, storage_class=None):
|
def lc_transition(days=None, date=None, storage_class=None):
|
||||||
|
@ -633,15 +378,13 @@ def lc_transitions(transitions=None):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.storage_class
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(operation='test create object with storage class')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('storage_class')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_object_storage_class():
|
def test_object_storage_class():
|
||||||
sc = configured_storage_classes()
|
sc = configured_storage_classes()
|
||||||
if len(sc) < 2:
|
if len(sc) < 2:
|
||||||
raise SkipTest
|
pytest.skip('requires multiple storage classes')
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
|
||||||
|
@ -651,15 +394,13 @@ def test_object_storage_class():
|
||||||
|
|
||||||
verify_object(bucket, k, data, storage_class)
|
verify_object(bucket, k, data, storage_class)
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.storage_class
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(operation='test create multipart object with storage class')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('storage_class')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_object_storage_class_multipart():
|
def test_object_storage_class_multipart():
|
||||||
sc = configured_storage_classes()
|
sc = configured_storage_classes()
|
||||||
if len(sc) < 2:
|
if len(sc) < 2:
|
||||||
raise SkipTest
|
pytest.skip('requires multiple storage classes')
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
size = 11 * 1024 * 1024
|
size = 11 * 1024 * 1024
|
||||||
|
@ -669,13 +410,13 @@ def test_object_storage_class_multipart():
|
||||||
(upload, data) = _multipart_upload(bucket, key, size, storage_class=storage_class)
|
(upload, data) = _multipart_upload(bucket, key, size, storage_class=storage_class)
|
||||||
upload.complete_upload()
|
upload.complete_upload()
|
||||||
key2 = bucket.get_key(key)
|
key2 = bucket.get_key(key)
|
||||||
eq(key2.size, size)
|
assert key2.size == size
|
||||||
eq(key2.storage_class, storage_class)
|
assert key2.storage_class == storage_class
|
||||||
|
|
||||||
def _do_test_object_modify_storage_class(obj_write_func, size):
|
def _do_test_object_modify_storage_class(obj_write_func, size):
|
||||||
sc = configured_storage_classes()
|
sc = configured_storage_classes()
|
||||||
if len(sc) < 2:
|
if len(sc) < 2:
|
||||||
raise SkipTest
|
pytest.skip('requires multiple storage classes')
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
|
||||||
|
@ -692,27 +433,23 @@ def _do_test_object_modify_storage_class(obj_write_func, size):
|
||||||
copy_object_storage_class(bucket, k, bucket, k, new_storage_class)
|
copy_object_storage_class(bucket, k, bucket, k, new_storage_class)
|
||||||
verify_object(bucket, k, data, storage_class)
|
verify_object(bucket, k, data, storage_class)
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.storage_class
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(operation='test changing objects storage class')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('storage_class')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_object_modify_storage_class():
|
def test_object_modify_storage_class():
|
||||||
_do_test_object_modify_storage_class(_populate_key, size=9*1024*1024)
|
_do_test_object_modify_storage_class(_populate_key, size=9*1024*1024)
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.storage_class
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(operation='test changing objects storage class')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('storage_class')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_object_modify_storage_class_multipart():
|
def test_object_modify_storage_class_multipart():
|
||||||
_do_test_object_modify_storage_class(_populate_multipart_key, size=11*1024*1024)
|
_do_test_object_modify_storage_class(_populate_multipart_key, size=11*1024*1024)
|
||||||
|
|
||||||
def _do_test_object_storage_class_copy(obj_write_func, size):
|
def _do_test_object_storage_class_copy(obj_write_func, size):
|
||||||
sc = configured_storage_classes()
|
sc = configured_storage_classes()
|
||||||
if len(sc) < 2:
|
if len(sc) < 2:
|
||||||
raise SkipTest
|
pytest.skip('requires multiple storage classes')
|
||||||
|
|
||||||
src_bucket = get_new_bucket()
|
src_bucket = get_new_bucket()
|
||||||
dest_bucket = get_new_bucket()
|
dest_bucket = get_new_bucket()
|
||||||
|
@ -730,19 +467,15 @@ def _do_test_object_storage_class_copy(obj_write_func, size):
|
||||||
copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, new_storage_class)
|
copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, new_storage_class)
|
||||||
verify_object(dest_bucket, dest_key, data, new_storage_class)
|
verify_object(dest_bucket, dest_key, data, new_storage_class)
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.storage_class
|
||||||
@attr(method='copy')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(operation='test copy object to object with different storage class')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('storage_class')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_object_storage_class_copy():
|
def test_object_storage_class_copy():
|
||||||
_do_test_object_storage_class_copy(_populate_key, size=9*1024*1024)
|
_do_test_object_storage_class_copy(_populate_key, size=9*1024*1024)
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.storage_class
|
||||||
@attr(method='copy')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(operation='test changing objects storage class')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr('storage_class')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_object_storage_class_copy_multipart():
|
def test_object_storage_class_copy_multipart():
|
||||||
_do_test_object_storage_class_copy(_populate_multipart_key, size=9*1024*1024)
|
_do_test_object_storage_class_copy(_populate_multipart_key, size=9*1024*1024)
|
||||||
|
|
||||||
|
@ -799,7 +532,7 @@ class FakeFileVerifier(object):
|
||||||
if self.char == None:
|
if self.char == None:
|
||||||
self.char = data[0]
|
self.char = data[0]
|
||||||
self.size += size
|
self.size += size
|
||||||
eq(data.decode(), self.char*size)
|
assert data.decode() == self.char*size
|
||||||
|
|
||||||
def _verify_atomic_key_data(key, size=-1, char=None):
|
def _verify_atomic_key_data(key, size=-1, char=None):
|
||||||
"""
|
"""
|
||||||
|
@ -808,7 +541,7 @@ def _verify_atomic_key_data(key, size=-1, char=None):
|
||||||
fp_verify = FakeFileVerifier(char)
|
fp_verify = FakeFileVerifier(char)
|
||||||
key.get_contents_to_file(fp_verify)
|
key.get_contents_to_file(fp_verify)
|
||||||
if size >= 0:
|
if size >= 0:
|
||||||
eq(fp_verify.size, size)
|
assert fp_verify.size == size
|
||||||
|
|
||||||
def _test_atomic_dual_conditional_write(file_size):
|
def _test_atomic_dual_conditional_write(file_size):
|
||||||
"""
|
"""
|
||||||
|
@ -837,26 +570,20 @@ def _test_atomic_dual_conditional_write(file_size):
|
||||||
# key.set_contents_from_file(fp_c, headers={'If-Match': etag_fp_a})
|
# key.set_contents_from_file(fp_c, headers={'If-Match': etag_fp_a})
|
||||||
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_file, fp_c,
|
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_file, fp_c,
|
||||||
headers={'If-Match': etag_fp_a})
|
headers={'If-Match': etag_fp_a})
|
||||||
eq(e.status, 412)
|
assert e.status == 412
|
||||||
eq(e.reason, 'Precondition Failed')
|
assert e.reason == 'Precondition Failed'
|
||||||
eq(e.error_code, 'PreconditionFailed')
|
assert e.error_code == 'PreconditionFailed'
|
||||||
|
|
||||||
# verify the file
|
# verify the file
|
||||||
_verify_atomic_key_data(key, file_size, 'B')
|
_verify_atomic_key_data(key, file_size, 'B')
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='write one or the other')
|
|
||||||
@attr(assertion='1MB successful')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_atomic_dual_conditional_write_1mb():
|
def test_atomic_dual_conditional_write_1mb():
|
||||||
_test_atomic_dual_conditional_write(1024*1024)
|
_test_atomic_dual_conditional_write(1024*1024)
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.fails_on_aws
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='write file in deleted bucket')
|
|
||||||
@attr(assertion='fail 404')
|
|
||||||
@attr('fails_on_aws')
|
|
||||||
def test_atomic_write_bucket_gone():
|
def test_atomic_write_bucket_gone():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
|
||||||
|
@ -868,9 +595,9 @@ def test_atomic_write_bucket_gone():
|
||||||
key = bucket.new_key('foo')
|
key = bucket.new_key('foo')
|
||||||
fp_a = FakeWriteFile(1024*1024, 'A', remove_bucket)
|
fp_a = FakeWriteFile(1024*1024, 'A', remove_bucket)
|
||||||
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_file, fp_a)
|
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_file, fp_a)
|
||||||
eq(e.status, 404)
|
assert e.status == 404
|
||||||
eq(e.reason, 'Not Found')
|
assert e.reason == 'Not Found'
|
||||||
eq(e.error_code, 'NoSuchBucket')
|
assert e.error_code == 'NoSuchBucket'
|
||||||
|
|
||||||
def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024,
|
def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024,
|
||||||
do_list=None, init_headers=None, part_headers=None,
|
do_list=None, init_headers=None, part_headers=None,
|
||||||
|
@ -896,11 +623,8 @@ def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024,
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.encryption
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='multipart upload with bad key for uploading chunks')
|
|
||||||
@attr(assertion='successful')
|
|
||||||
@attr('encryption')
|
|
||||||
def test_encryption_sse_c_multipart_invalid_chunks_1():
|
def test_encryption_sse_c_multipart_invalid_chunks_1():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = "multipart_enc"
|
key = "multipart_enc"
|
||||||
|
@ -921,13 +645,10 @@ def test_encryption_sse_c_multipart_invalid_chunks_1():
|
||||||
_multipart_upload_enc, bucket, key, objlen,
|
_multipart_upload_enc, bucket, key, objlen,
|
||||||
init_headers=init_headers, part_headers=part_headers,
|
init_headers=init_headers, part_headers=part_headers,
|
||||||
metadata={'foo': 'bar'})
|
metadata={'foo': 'bar'})
|
||||||
eq(e.status, 400)
|
assert e.status == 400
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.encryption
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='multipart upload with bad md5 for chunks')
|
|
||||||
@attr(assertion='successful')
|
|
||||||
@attr('encryption')
|
|
||||||
def test_encryption_sse_c_multipart_invalid_chunks_2():
|
def test_encryption_sse_c_multipart_invalid_chunks_2():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = "multipart_enc"
|
key = "multipart_enc"
|
||||||
|
@ -948,14 +669,11 @@ def test_encryption_sse_c_multipart_invalid_chunks_2():
|
||||||
_multipart_upload_enc, bucket, key, objlen,
|
_multipart_upload_enc, bucket, key, objlen,
|
||||||
init_headers=init_headers, part_headers=part_headers,
|
init_headers=init_headers, part_headers=part_headers,
|
||||||
metadata={'foo': 'bar'})
|
metadata={'foo': 'bar'})
|
||||||
eq(e.status, 400)
|
assert e.status == 400
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.fails_with_subdomain
|
||||||
@attr(method='get')
|
@pytest.mark.bucket_policy
|
||||||
@attr(operation='Test Bucket Policy for a user belonging to a different tenant')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='succeeds')
|
|
||||||
@attr('fails_with_subdomain')
|
|
||||||
@attr('bucket-policy')
|
|
||||||
def test_bucket_policy_different_tenant():
|
def test_bucket_policy_different_tenant():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
key = bucket.new_key('asdf')
|
key = bucket.new_key('asdf')
|
||||||
|
@ -990,10 +708,8 @@ def test_bucket_policy_different_tenant():
|
||||||
b = new_conn.get_bucket(bucket_name)
|
b = new_conn.get_bucket(bucket_name)
|
||||||
b.get_all_keys()
|
b.get_all_keys()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.bucket_policy
|
||||||
@attr(method='put')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='Test put condition operator end with ifExists')
|
|
||||||
@attr('bucket-policy')
|
|
||||||
def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
||||||
bucket = _create_keys(keys=['foo'])
|
bucket = _create_keys(keys=['foo'])
|
||||||
policy = '''{
|
policy = '''{
|
||||||
|
@ -1012,73 +728,25 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}''' % bucket.name
|
}''' % bucket.name
|
||||||
eq(bucket.set_policy(policy), True)
|
assert bucket.set_policy(policy) == True
|
||||||
res = _make_request('GET', bucket.name, bucket.get_key("foo"),
|
res = _make_request('GET', bucket.name, bucket.get_key("foo"),
|
||||||
request_headers={'referer': 'http://www.example.com/'})
|
request_headers={'referer': 'http://www.example.com/'})
|
||||||
eq(res.status, 200)
|
assert res.status == 200
|
||||||
res = _make_request('GET', bucket.name, bucket.get_key("foo"),
|
res = _make_request('GET', bucket.name, bucket.get_key("foo"),
|
||||||
request_headers={'referer': 'http://www.example.com/index.html'})
|
request_headers={'referer': 'http://www.example.com/index.html'})
|
||||||
eq(res.status, 200)
|
assert res.status == 200
|
||||||
res = _make_request('GET', bucket.name, bucket.get_key("foo"))
|
res = _make_request('GET', bucket.name, bucket.get_key("foo"))
|
||||||
eq(res.status, 200)
|
assert res.status == 200
|
||||||
res = _make_request('GET', bucket.name, bucket.get_key("foo"),
|
res = _make_request('GET', bucket.name, bucket.get_key("foo"),
|
||||||
request_headers={'referer': 'http://example.com'})
|
request_headers={'referer': 'http://example.com'})
|
||||||
eq(res.status, 403)
|
assert res.status == 403
|
||||||
|
|
||||||
def _make_arn_resource(path="*"):
|
def _make_arn_resource(path="*"):
|
||||||
return "arn:aws:s3:::{}".format(path)
|
return "arn:aws:s3:::{}".format(path)
|
||||||
|
|
||||||
@attr(resource='object')
|
@pytest.mark.tagging
|
||||||
@attr(method='put')
|
@pytest.mark.bucket_policy
|
||||||
@attr(operation='Deny put obj requests without encryption')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='success')
|
|
||||||
@attr('encryption')
|
|
||||||
@attr('bucket-policy')
|
|
||||||
def test_bucket_policy_put_obj_enc():
|
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
|
||||||
|
|
||||||
deny_incorrect_algo = {
|
|
||||||
"StringNotEquals": {
|
|
||||||
"s3:x-amz-server-side-encryption": "AES256"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
deny_unencrypted_obj = {
|
|
||||||
"Null" : {
|
|
||||||
"s3:x-amz-server-side-encryption": "true"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
p = Policy()
|
|
||||||
resource = _make_arn_resource("{}/{}".format(bucket.name, "*"))
|
|
||||||
|
|
||||||
s1 = Statement("s3:PutObject", resource, effect="Deny", condition=deny_incorrect_algo)
|
|
||||||
s2 = Statement("s3:PutObject", resource, effect="Deny", condition=deny_unencrypted_obj)
|
|
||||||
policy_document = p.add_statement(s1).add_statement(s2).to_json()
|
|
||||||
|
|
||||||
bucket.set_policy(policy_document)
|
|
||||||
|
|
||||||
key1_str ='testobj'
|
|
||||||
key1 = bucket.new_key(key1_str)
|
|
||||||
check_access_denied(key1.set_contents_from_string, key1_str)
|
|
||||||
|
|
||||||
sse_client_headers = {
|
|
||||||
'x-amz-server-side-encryption' : 'AES256',
|
|
||||||
'x-amz-server-side-encryption-customer-algorithm': 'AES256',
|
|
||||||
'x-amz-server-side-encryption-customer-key': 'pO3upElrwuEXSoFwCfnZPdSsmt/xWeFa0N9KgDijwVs=',
|
|
||||||
'x-amz-server-side-encryption-customer-key-md5': 'DWygnHRtgiJ77HCm+1rvHw=='
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
key1.set_contents_from_string(key1_str, headers=sse_client_headers)
|
|
||||||
|
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='put obj with RequestObjectTag')
|
|
||||||
@attr(assertion='success')
|
|
||||||
@attr('tagging')
|
|
||||||
@attr('bucket-policy')
|
|
||||||
def test_bucket_policy_put_obj_request_obj_tag():
|
def test_bucket_policy_put_obj_request_obj_tag():
|
||||||
|
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import collections
|
from collections.abc import Container
|
||||||
import nose
|
import pytest
|
||||||
import string
|
import string
|
||||||
import random
|
import random
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
@ -11,14 +10,11 @@ import socket
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from nose.tools import eq_ as eq, ok_ as ok
|
|
||||||
from nose.plugins.attrib import attr
|
|
||||||
from nose.tools import timed
|
|
||||||
from nose.plugins.skip import SkipTest
|
|
||||||
|
|
||||||
from .. import common
|
from .. import common
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
|
configfile,
|
||||||
|
setup_teardown,
|
||||||
get_new_bucket,
|
get_new_bucket,
|
||||||
get_new_bucket_name,
|
get_new_bucket_name,
|
||||||
s3,
|
s3,
|
||||||
|
@ -43,37 +39,26 @@ ERRORDOC_TEMPLATE = '<html><h1>ErrorDoc</h1><body>{random}</body></html>'
|
||||||
|
|
||||||
CAN_WEBSITE = None
|
CAN_WEBSITE = None
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True, scope="module")
|
||||||
def check_can_test_website():
|
def check_can_test_website():
|
||||||
global CAN_WEBSITE
|
bucket = get_new_bucket()
|
||||||
# This is a bit expensive, so we cache this
|
try:
|
||||||
if CAN_WEBSITE is None:
|
wsconf = bucket.get_website_configuration()
|
||||||
bucket = get_new_bucket()
|
|
||||||
try:
|
|
||||||
wsconf = bucket.get_website_configuration()
|
|
||||||
CAN_WEBSITE = True
|
|
||||||
except boto.exception.S3ResponseError as e:
|
|
||||||
if e.status == 404 and e.reason == 'Not Found' and e.error_code in ['NoSuchWebsiteConfiguration', 'NoSuchKey']:
|
|
||||||
CAN_WEBSITE = True
|
|
||||||
elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed':
|
|
||||||
# rgw_enable_static_website is false
|
|
||||||
CAN_WEBSITE = False
|
|
||||||
elif e.status == 403 and e.reason == 'SignatureDoesNotMatch' and e.error_code == 'Forbidden':
|
|
||||||
# This is older versions that do not support the website code
|
|
||||||
CAN_WEBSITE = False
|
|
||||||
elif e.status == 501 and e.error_code == 'NotImplemented':
|
|
||||||
CAN_WEBSITE = False
|
|
||||||
else:
|
|
||||||
raise RuntimeError("Unknown response in checking if WebsiteConf is supported", e)
|
|
||||||
finally:
|
|
||||||
bucket.delete()
|
|
||||||
|
|
||||||
if CAN_WEBSITE is True:
|
|
||||||
return True
|
return True
|
||||||
elif CAN_WEBSITE is False:
|
except boto.exception.S3ResponseError as e:
|
||||||
raise SkipTest
|
if e.status == 404 and e.reason == 'Not Found' and e.error_code in ['NoSuchWebsiteConfiguration', 'NoSuchKey']:
|
||||||
else:
|
return True
|
||||||
raise RuntimeError("Unknown cached response in checking if WebsiteConf is supported")
|
elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed':
|
||||||
|
pytest.skip('rgw_enable_static_website is false')
|
||||||
|
elif e.status == 403 and e.reason == 'SignatureDoesNotMatch' and e.error_code == 'Forbidden':
|
||||||
|
# This is older versions that do not support the website code
|
||||||
|
pytest.skip('static website is not implemented')
|
||||||
|
elif e.status == 501 and e.error_code == 'NotImplemented':
|
||||||
|
pytest.skip('static website is not implemented')
|
||||||
|
else:
|
||||||
|
raise RuntimeError("Unknown response in checking if WebsiteConf is supported", e)
|
||||||
|
finally:
|
||||||
|
bucket.delete()
|
||||||
|
|
||||||
def make_website_config(xml_fragment):
|
def make_website_config(xml_fragment):
|
||||||
"""
|
"""
|
||||||
|
@ -169,20 +154,20 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail=
|
||||||
# Cleanup for our validation
|
# Cleanup for our validation
|
||||||
common.assert_xml_equal(config_xmlcmp, config_xmlnew)
|
common.assert_xml_equal(config_xmlcmp, config_xmlnew)
|
||||||
#print("config_xmlcmp\n", config_xmlcmp)
|
#print("config_xmlcmp\n", config_xmlcmp)
|
||||||
#eq (config_xmlnew, config_xmlcmp)
|
#assert config_xmlnew == config_xmlcmp
|
||||||
f['WebsiteConfiguration'] = config_xmlcmp
|
f['WebsiteConfiguration'] = config_xmlcmp
|
||||||
return f
|
return f
|
||||||
|
|
||||||
def __website_expected_reponse_status(res, status, reason):
|
def __website_expected_reponse_status(res, status, reason):
|
||||||
if not isinstance(status, collections.Container):
|
if not isinstance(status, Container):
|
||||||
status = set([status])
|
status = set([status])
|
||||||
if not isinstance(reason, collections.Container):
|
if not isinstance(reason, Container):
|
||||||
reason = set([reason])
|
reason = set([reason])
|
||||||
|
|
||||||
if status is not IGNORE_FIELD:
|
if status is not IGNORE_FIELD:
|
||||||
ok(res.status in status, 'HTTP code was %s should be %s' % (res.status, status))
|
assert res.status in status, 'HTTP code was %s should be %s' % (res.status, status)
|
||||||
if reason is not IGNORE_FIELD:
|
if reason is not IGNORE_FIELD:
|
||||||
ok(res.reason in reason, 'HTTP reason was was %s should be %s' % (res.reason, reason))
|
assert res.reason in reason, 'HTTP reason was was %s should be %s' % (res.reason, reason)
|
||||||
|
|
||||||
def _website_expected_default_html(**kwargs):
|
def _website_expected_default_html(**kwargs):
|
||||||
fields = []
|
fields = []
|
||||||
|
@ -194,7 +179,7 @@ def _website_expected_default_html(**kwargs):
|
||||||
v = kwargs[k]
|
v = kwargs[k]
|
||||||
if isinstance(v, str):
|
if isinstance(v, str):
|
||||||
v = [v]
|
v = [v]
|
||||||
elif not isinstance(v, collections.Container):
|
elif not isinstance(v, Container):
|
||||||
v = [v]
|
v = [v]
|
||||||
for v2 in v:
|
for v2 in v:
|
||||||
s = '<li>%s: %s</li>' % (k,v2)
|
s = '<li>%s: %s</li>' % (k,v2)
|
||||||
|
@ -212,22 +197,22 @@ def _website_expected_error_response(res, bucket_name, status, reason, code, con
|
||||||
errorcode = res.getheader('x-amz-error-code', None)
|
errorcode = res.getheader('x-amz-error-code', None)
|
||||||
if errorcode is not None:
|
if errorcode is not None:
|
||||||
if code is not IGNORE_FIELD:
|
if code is not IGNORE_FIELD:
|
||||||
eq(errorcode, code)
|
assert errorcode == code
|
||||||
|
|
||||||
if not isinstance(content, collections.Container):
|
if not isinstance(content, Container):
|
||||||
content = set([content])
|
content = set([content])
|
||||||
for f in content:
|
for f in content:
|
||||||
if f is not IGNORE_FIELD and f is not None:
|
if f is not IGNORE_FIELD and f is not None:
|
||||||
f = bytes(f, 'utf-8')
|
f = bytes(f, 'utf-8')
|
||||||
ok(f in body, 'HTML should contain "%s"' % (f, ))
|
assert f in body, 'HTML should contain "%s"' % (f, )
|
||||||
|
|
||||||
def _website_expected_redirect_response(res, status, reason, new_url):
|
def _website_expected_redirect_response(res, status, reason, new_url):
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
__website_expected_reponse_status(res, status, reason)
|
__website_expected_reponse_status(res, status, reason)
|
||||||
loc = res.getheader('Location', None)
|
loc = res.getheader('Location', None)
|
||||||
eq(loc, new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,))
|
assert loc == new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,)
|
||||||
ok(len(body) == 0, 'Body of a redirect should be empty')
|
assert len(body) == 0, 'Body of a redirect should be empty'
|
||||||
|
|
||||||
def _website_request(bucket_name, path, connect_hostname=None, method='GET', timeout=None):
|
def _website_request(bucket_name, path, connect_hostname=None, method='GET', timeout=None):
|
||||||
url = get_website_url(proto='http', bucket=bucket_name, path=path)
|
url = get_website_url(proto='http', bucket=bucket_name, path=path)
|
||||||
|
@ -246,26 +231,16 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET', tim
|
||||||
return res
|
return res
|
||||||
|
|
||||||
# ---------- Non-existant buckets via the website endpoint
|
# ---------- Non-existant buckets via the website endpoint
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_rgw
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket, exposing security risk')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('fails_on_rgw')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_nonexistant_bucket_s3():
|
def test_website_nonexistant_bucket_s3():
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
res = _website_request(bucket_name, '')
|
res = _website_request(bucket_name, '')
|
||||||
_website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket'))
|
_website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket'))
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_s3
|
||||||
@attr(operation='list')
|
@pytest.mark.fails_on_dbstore
|
||||||
#@attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure')
|
|
||||||
@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('fails_on_s3')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_nonexistant_bucket_rgw():
|
def test_website_nonexistant_bucket_rgw():
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
res = _website_request(bucket_name, '')
|
res = _website_request(bucket_name, '')
|
||||||
|
@ -273,13 +248,9 @@ def test_website_nonexistant_bucket_rgw():
|
||||||
_website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket'))
|
_website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket'))
|
||||||
|
|
||||||
#------------- IndexDocument only, successes
|
#------------- IndexDocument only, successes
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
@pytest.mark.timeout(10)
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is public')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
@timed(10)
|
|
||||||
def test_website_public_bucket_list_public_index():
|
def test_website_public_bucket_list_public_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -296,17 +267,13 @@ def test_website_public_bucket_list_public_index():
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
indexstring = bytes(indexstring, 'utf-8')
|
indexstring = bytes(indexstring, 'utf-8')
|
||||||
eq(body, indexstring) # default content should match index.html set content
|
assert body == indexstring # default content should match index.html set content
|
||||||
__website_expected_reponse_status(res, 200, 'OK')
|
__website_expected_reponse_status(res, 200, 'OK')
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_public_index():
|
def test_website_private_bucket_list_public_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -325,18 +292,14 @@ def test_website_private_bucket_list_public_index():
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
indexstring = bytes(indexstring, 'utf-8')
|
indexstring = bytes(indexstring, 'utf-8')
|
||||||
eq(body, indexstring, 'default content should match index.html set content')
|
assert body == indexstring, 'default content should match index.html set content'
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
|
|
||||||
# ---------- IndexDocument only, failures
|
# ---------- IndexDocument only, failures
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_empty():
|
def test_website_private_bucket_list_empty():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -347,12 +310,8 @@ def test_website_private_bucket_list_empty():
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'))
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'))
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_empty():
|
def test_website_public_bucket_list_empty():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -362,12 +321,8 @@ def test_website_public_bucket_list_empty():
|
||||||
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'))
|
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'))
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_private_index():
|
def test_website_public_bucket_list_private_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -387,12 +342,8 @@ def test_website_public_bucket_list_private_index():
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_private_index():
|
def test_website_private_bucket_list_private_index():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -413,12 +364,8 @@ def test_website_private_bucket_list_private_index():
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
# ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but missing
|
# ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but missing
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_empty_missingerrordoc():
|
def test_website_private_bucket_list_empty_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -429,12 +376,8 @@ def test_website_private_bucket_list_empty_missingerrordoc():
|
||||||
|
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_empty_missingerrordoc():
|
def test_website_public_bucket_list_empty_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -444,12 +387,8 @@ def test_website_public_bucket_list_empty_missingerrordoc():
|
||||||
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey')
|
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey')
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_private_index_missingerrordoc():
|
def test_website_public_bucket_list_private_index_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -468,12 +407,8 @@ def test_website_public_bucket_list_private_index_missingerrordoc():
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_private_index_missingerrordoc():
|
def test_website_private_bucket_list_private_index_missingerrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -493,12 +428,8 @@ def test_website_private_bucket_list_private_index_missingerrordoc():
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
# ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but not accessible
|
# ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but not accessible
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_empty_blockederrordoc():
|
def test_website_private_bucket_list_empty_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -516,17 +447,13 @@ def test_website_private_bucket_list_empty_blockederrordoc():
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
||||||
errorstring = bytes(errorstring, 'utf-8')
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should NOT match error.html set content')
|
assert errorstring not in body, 'error content should NOT match error.html set content'
|
||||||
|
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='check if there is an invalid payload after serving error doc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_pubilc_errordoc():
|
def test_website_public_bucket_list_pubilc_errordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -567,17 +494,13 @@ def test_website_public_bucket_list_pubilc_errordoc():
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
print('no invalid payload')
|
print('no invalid payload')
|
||||||
|
|
||||||
ok(resp_len == 0, 'invalid payload')
|
assert resp_len == 0, 'invalid payload'
|
||||||
|
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_empty_blockederrordoc():
|
def test_website_public_bucket_list_empty_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -594,17 +517,13 @@ def test_website_public_bucket_list_empty_blockederrordoc():
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'), body=body)
|
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'), body=body)
|
||||||
errorstring = bytes(errorstring, 'utf-8')
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should match error.html set content')
|
assert errorstring not in body, 'error content should match error.html set content'
|
||||||
|
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_private_index_blockederrordoc():
|
def test_website_public_bucket_list_private_index_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -626,18 +545,14 @@ def test_website_public_bucket_list_private_index_blockederrordoc():
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
||||||
errorstring = bytes(errorstring, 'utf-8')
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should match error.html set content')
|
assert errorstring not in body, 'error content should match error.html set content'
|
||||||
|
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_private_index_blockederrordoc():
|
def test_website_private_bucket_list_private_index_blockederrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -659,19 +574,15 @@ def test_website_private_bucket_list_private_index_blockederrordoc():
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
||||||
errorstring = bytes(errorstring, 'utf-8')
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should match error.html set content')
|
assert errorstring not in body, 'error content should match error.html set content'
|
||||||
|
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
# ---------- IndexDocument & ErrorDocument, failures with errordoc available
|
# ---------- IndexDocument & ErrorDocument, failures with errordoc available
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_empty_gooderrordoc():
|
def test_website_private_bucket_list_empty_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -689,12 +600,8 @@ def test_website_private_bucket_list_empty_gooderrordoc():
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_empty_gooderrordoc():
|
def test_website_public_bucket_list_empty_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -713,12 +620,8 @@ def test_website_public_bucket_list_empty_gooderrordoc():
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty public buckets via s3website return page for /, where page is private')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_public_bucket_list_private_index_gooderrordoc():
|
def test_website_public_bucket_list_private_index_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -742,12 +645,8 @@ def test_website_public_bucket_list_private_index_gooderrordoc():
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='non-empty private buckets via s3website return page for /, where page is private')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_private_bucket_list_private_index_gooderrordoc():
|
def test_website_private_bucket_list_private_index_gooderrordoc():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'])
|
||||||
|
@ -772,12 +671,8 @@ def test_website_private_bucket_list_private_index_gooderrordoc():
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
# ------ RedirectAll tests
|
# ------ RedirectAll tests
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_bucket_private_redirectall_base():
|
def test_website_bucket_private_redirectall_base():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll'])
|
||||||
|
@ -789,12 +684,8 @@ def test_website_bucket_private_redirectall_base():
|
||||||
|
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_bucket_private_redirectall_path():
|
def test_website_bucket_private_redirectall_path():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll'])
|
||||||
|
@ -808,12 +699,8 @@ def test_website_bucket_private_redirectall_path():
|
||||||
|
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(operation='list')
|
|
||||||
@attr(assertion='RedirectAllRequestsTo without protocol should TODO')
|
|
||||||
@attr('s3website')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_bucket_private_redirectall_path_upgrade():
|
def test_website_bucket_private_redirectall_path_upgrade():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
x = string.Template(WEBSITE_CONFIGS_XMLFRAG['RedirectAll+Protocol']).safe_substitute(RedirectAllRequestsTo_Protocol='https')
|
x = string.Template(WEBSITE_CONFIGS_XMLFRAG['RedirectAll+Protocol']).safe_substitute(RedirectAllRequestsTo_Protocol='https')
|
||||||
|
@ -829,13 +716,9 @@ def test_website_bucket_private_redirectall_path_upgrade():
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
# ------ x-amz redirect tests
|
# ------ x-amz redirect tests
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.s3website_redirect_location
|
||||||
@attr(operation='list')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='x-amz-website-redirect-location should not fire without websiteconf')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('x-amz-website-redirect-location')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_xredirect_nonwebsite():
|
def test_website_xredirect_nonwebsite():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
#f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll'])
|
#f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll'])
|
||||||
|
@ -847,7 +730,7 @@ def test_website_xredirect_nonwebsite():
|
||||||
headers = {'x-amz-website-redirect-location': redirect_dest}
|
headers = {'x-amz-website-redirect-location': redirect_dest}
|
||||||
k.set_contents_from_string(content, headers=headers, policy='public-read')
|
k.set_contents_from_string(content, headers=headers, policy='public-read')
|
||||||
redirect = k.get_redirect()
|
redirect = k.get_redirect()
|
||||||
eq(k.get_redirect(), redirect_dest)
|
assert k.get_redirect() == redirect_dest
|
||||||
|
|
||||||
res = _website_request(bucket.name, '/page')
|
res = _website_request(bucket.name, '/page')
|
||||||
body = res.read()
|
body = res.read()
|
||||||
|
@ -861,13 +744,9 @@ def test_website_xredirect_nonwebsite():
|
||||||
k.delete()
|
k.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.s3website_redirect_location
|
||||||
@attr(operation='list')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('x-amz-website-redirect-location')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_xredirect_public_relative():
|
def test_website_xredirect_public_relative():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -879,7 +758,7 @@ def test_website_xredirect_public_relative():
|
||||||
headers = {'x-amz-website-redirect-location': redirect_dest}
|
headers = {'x-amz-website-redirect-location': redirect_dest}
|
||||||
k.set_contents_from_string(content, headers=headers, policy='public-read')
|
k.set_contents_from_string(content, headers=headers, policy='public-read')
|
||||||
redirect = k.get_redirect()
|
redirect = k.get_redirect()
|
||||||
eq(k.get_redirect(), redirect_dest)
|
assert k.get_redirect() == redirect_dest
|
||||||
|
|
||||||
res = _website_request(bucket.name, '/page')
|
res = _website_request(bucket.name, '/page')
|
||||||
#new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest)
|
#new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest)
|
||||||
|
@ -888,13 +767,9 @@ def test_website_xredirect_public_relative():
|
||||||
k.delete()
|
k.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.s3website_redirect_location
|
||||||
@attr(operation='list')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('x-amz-website-redirect-location')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_xredirect_public_abs():
|
def test_website_xredirect_public_abs():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -906,7 +781,7 @@ def test_website_xredirect_public_abs():
|
||||||
headers = {'x-amz-website-redirect-location': redirect_dest}
|
headers = {'x-amz-website-redirect-location': redirect_dest}
|
||||||
k.set_contents_from_string(content, headers=headers, policy='public-read')
|
k.set_contents_from_string(content, headers=headers, policy='public-read')
|
||||||
redirect = k.get_redirect()
|
redirect = k.get_redirect()
|
||||||
eq(k.get_redirect(), redirect_dest)
|
assert k.get_redirect() == redirect_dest
|
||||||
|
|
||||||
res = _website_request(bucket.name, '/page')
|
res = _website_request(bucket.name, '/page')
|
||||||
new_url = get_website_url(proto='http', hostname='example.com', path='/foo')
|
new_url = get_website_url(proto='http', hostname='example.com', path='/foo')
|
||||||
|
@ -915,13 +790,9 @@ def test_website_xredirect_public_abs():
|
||||||
k.delete()
|
k.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.s3website_redirect_location
|
||||||
@attr(operation='list')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('x-amz-website-redirect-location')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_xredirect_private_relative():
|
def test_website_xredirect_private_relative():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -933,7 +804,7 @@ def test_website_xredirect_private_relative():
|
||||||
headers = {'x-amz-website-redirect-location': redirect_dest}
|
headers = {'x-amz-website-redirect-location': redirect_dest}
|
||||||
k.set_contents_from_string(content, headers=headers, policy='private')
|
k.set_contents_from_string(content, headers=headers, policy='private')
|
||||||
redirect = k.get_redirect()
|
redirect = k.get_redirect()
|
||||||
eq(k.get_redirect(), redirect_dest)
|
assert k.get_redirect() == redirect_dest
|
||||||
|
|
||||||
res = _website_request(bucket.name, '/page')
|
res = _website_request(bucket.name, '/page')
|
||||||
# We get a 403 because the page is private
|
# We get a 403 because the page is private
|
||||||
|
@ -942,13 +813,9 @@ def test_website_xredirect_private_relative():
|
||||||
k.delete()
|
k.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
|
||||||
@attr(resource='bucket')
|
@pytest.mark.s3website
|
||||||
@attr(method='get')
|
@pytest.mark.s3website_redirect_location
|
||||||
@attr(operation='list')
|
@pytest.mark.fails_on_dbstore
|
||||||
@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key')
|
|
||||||
@attr('s3website')
|
|
||||||
@attr('x-amz-website-redirect-location')
|
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
|
||||||
def test_website_xredirect_private_abs():
|
def test_website_xredirect_private_abs():
|
||||||
bucket = get_new_bucket()
|
bucket = get_new_bucket()
|
||||||
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc'])
|
||||||
|
@ -960,7 +827,7 @@ def test_website_xredirect_private_abs():
|
||||||
headers = {'x-amz-website-redirect-location': redirect_dest}
|
headers = {'x-amz-website-redirect-location': redirect_dest}
|
||||||
k.set_contents_from_string(content, headers=headers, policy='private')
|
k.set_contents_from_string(content, headers=headers, policy='private')
|
||||||
redirect = k.get_redirect()
|
redirect = k.get_redirect()
|
||||||
eq(k.get_redirect(), redirect_dest)
|
assert k.get_redirect() == redirect_dest
|
||||||
|
|
||||||
res = _website_request(bucket.name, '/page')
|
res = _website_request(bucket.name, '/page')
|
||||||
new_url = get_website_url(proto='http', hostname='example.com', path='/foo')
|
new_url = get_website_url(proto='http', hostname='example.com', path='/foo')
|
||||||
|
@ -1173,8 +1040,6 @@ def routing_teardown(**kwargs):
|
||||||
print('Deleting', str(o))
|
print('Deleting', str(o))
|
||||||
o.delete()
|
o.delete()
|
||||||
|
|
||||||
@common.with_setup_kwargs(setup=routing_setup, teardown=routing_teardown)
|
|
||||||
#@timed(10)
|
|
||||||
def routing_check(*args, **kwargs):
|
def routing_check(*args, **kwargs):
|
||||||
bucket = kwargs['bucket']
|
bucket = kwargs['bucket']
|
||||||
args=args[0]
|
args=args[0]
|
||||||
|
@ -1200,8 +1065,8 @@ def routing_check(*args, **kwargs):
|
||||||
if args['code'] >= 200 and args['code'] < 300:
|
if args['code'] >= 200 and args['code'] < 300:
|
||||||
#body = res.read()
|
#body = res.read()
|
||||||
#print(body)
|
#print(body)
|
||||||
#eq(body, args['content'], 'default content should match index.html set content')
|
#assert body == args['content'], 'default content should match index.html set content'
|
||||||
ok(int(res.getheader('Content-Length', -1)) > 0)
|
assert int(res.getheader('Content-Length', -1)) > 0
|
||||||
elif args['code'] >= 300 and args['code'] < 400:
|
elif args['code'] >= 300 and args['code'] < 400:
|
||||||
_website_expected_redirect_response(res, args['code'], IGNORE_FIELD, new_url)
|
_website_expected_redirect_response(res, args['code'], IGNORE_FIELD, new_url)
|
||||||
elif args['code'] >= 400:
|
elif args['code'] >= 400:
|
||||||
|
@ -1209,9 +1074,9 @@ def routing_check(*args, **kwargs):
|
||||||
else:
|
else:
|
||||||
assert(False)
|
assert(False)
|
||||||
|
|
||||||
@attr('s3website_RoutingRules')
|
@pytest.mark.s3website_routing_rules
|
||||||
@attr('s3website')
|
@pytest.mark.s3website
|
||||||
@nose.with_setup(setup=check_can_test_website, teardown=common.teardown)
|
@pytest.mark.fails_on_dbstore
|
||||||
def test_routing_generator():
|
def test_routing_generator():
|
||||||
for t in ROUTING_RULES_TESTS:
|
for t in ROUTING_RULES_TESTS:
|
||||||
if 'xml' in t and 'RoutingRules' in t['xml'] and len(t['xml']['RoutingRules']) > 0:
|
if 'xml' in t and 'RoutingRules' in t['xml'] and len(t['xml']['RoutingRules']) > 0:
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
from nose.tools import eq_ as eq
|
|
||||||
|
|
||||||
from . import utils
|
from . import utils
|
||||||
|
|
||||||
def test_generate():
|
def test_generate():
|
||||||
FIVE_MB = 5 * 1024 * 1024
|
FIVE_MB = 5 * 1024 * 1024
|
||||||
eq(len(''.join(utils.generate_random(0))), 0)
|
assert len(''.join(utils.generate_random(0))) == 0
|
||||||
eq(len(''.join(utils.generate_random(1))), 1)
|
assert len(''.join(utils.generate_random(1))) == 1
|
||||||
eq(len(''.join(utils.generate_random(FIVE_MB - 1))), FIVE_MB - 1)
|
assert len(''.join(utils.generate_random(FIVE_MB - 1))) == FIVE_MB - 1
|
||||||
eq(len(''.join(utils.generate_random(FIVE_MB))), FIVE_MB)
|
assert len(''.join(utils.generate_random(FIVE_MB))) == FIVE_MB
|
||||||
eq(len(''.join(utils.generate_random(FIVE_MB + 1))), FIVE_MB + 1)
|
assert len(''.join(utils.generate_random(FIVE_MB + 1))) == FIVE_MB + 1
|
||||||
|
|
|
@ -3,8 +3,6 @@ import requests
|
||||||
import string
|
import string
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from nose.tools import eq_ as eq
|
|
||||||
|
|
||||||
def assert_raises(excClass, callableObj, *args, **kwargs):
|
def assert_raises(excClass, callableObj, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Like unittest.TestCase.assertRaises, but returns the exception.
|
Like unittest.TestCase.assertRaises, but returns the exception.
|
||||||
|
@ -48,7 +46,7 @@ def region_sync_meta(targets, region):
|
||||||
conf = r.conf
|
conf = r.conf
|
||||||
if conf.sync_agent_addr:
|
if conf.sync_agent_addr:
|
||||||
ret = requests.post('http://{addr}:{port}/metadata/incremental'.format(addr = conf.sync_agent_addr, port = conf.sync_agent_port))
|
ret = requests.post('http://{addr}:{port}/metadata/incremental'.format(addr = conf.sync_agent_addr, port = conf.sync_agent_port))
|
||||||
eq(ret.status_code, 200)
|
assert ret.status_code == 200
|
||||||
if conf.sync_meta_wait:
|
if conf.sync_meta_wait:
|
||||||
time.sleep(conf.sync_meta_wait)
|
time.sleep(conf.sync_meta_wait)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import pytest
|
||||||
import boto3
|
import boto3
|
||||||
from botocore import UNSIGNED
|
from botocore import UNSIGNED
|
||||||
from botocore.client import Config
|
from botocore.client import Config
|
||||||
|
@ -12,6 +13,7 @@ import random
|
||||||
import string
|
import string
|
||||||
import itertools
|
import itertools
|
||||||
import urllib3
|
import urllib3
|
||||||
|
import re
|
||||||
|
|
||||||
config = munch.Munch
|
config = munch.Munch
|
||||||
|
|
||||||
|
@ -80,18 +82,13 @@ def get_objects_list(bucket, client=None, prefix=None):
|
||||||
# generator function that returns object listings in batches, where each
|
# generator function that returns object listings in batches, where each
|
||||||
# batch is a list of dicts compatible with delete_objects()
|
# batch is a list of dicts compatible with delete_objects()
|
||||||
def list_versions(client, bucket, batch_size):
|
def list_versions(client, bucket, batch_size):
|
||||||
key_marker = ''
|
kwargs = {'Bucket': bucket, 'MaxKeys': batch_size}
|
||||||
version_marker = ''
|
|
||||||
truncated = True
|
truncated = True
|
||||||
while truncated:
|
while truncated:
|
||||||
listing = client.list_object_versions(
|
listing = client.list_object_versions(**kwargs)
|
||||||
Bucket=bucket,
|
|
||||||
KeyMarker=key_marker,
|
|
||||||
VersionIdMarker=version_marker,
|
|
||||||
MaxKeys=batch_size)
|
|
||||||
|
|
||||||
key_marker = listing.get('NextKeyMarker')
|
kwargs['KeyMarker'] = listing.get('NextKeyMarker')
|
||||||
version_marker = listing.get('NextVersionIdMarker')
|
kwargs['VersionIdMarker'] = listing.get('NextVersionIdMarker')
|
||||||
truncated = listing['IsTruncated']
|
truncated = listing['IsTruncated']
|
||||||
|
|
||||||
objs = listing.get('Versions', []) + listing.get('DeleteMarkers', [])
|
objs = listing.get('Versions', []) + listing.get('DeleteMarkers', [])
|
||||||
|
@ -163,7 +160,21 @@ def nuke_prefixed_buckets(prefix, client=None):
|
||||||
|
|
||||||
print('Done with cleanup of buckets in tests.')
|
print('Done with cleanup of buckets in tests.')
|
||||||
|
|
||||||
def setup():
|
def configured_storage_classes():
|
||||||
|
sc = ['STANDARD']
|
||||||
|
|
||||||
|
extra_sc = re.split(r"[\b\W\b]+", config.storage_classes)
|
||||||
|
|
||||||
|
for item in extra_sc:
|
||||||
|
if item != 'STANDARD':
|
||||||
|
sc.append(item)
|
||||||
|
|
||||||
|
sc = [i for i in sc if i]
|
||||||
|
print("storage classes configured: " + str(sc))
|
||||||
|
|
||||||
|
return sc
|
||||||
|
|
||||||
|
def configure():
|
||||||
cfg = configparser.RawConfigParser()
|
cfg = configparser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
path = os.environ['S3TEST_CONF']
|
path = os.environ['S3TEST_CONF']
|
||||||
|
@ -226,6 +237,17 @@ def setup():
|
||||||
config.main_api_name = ""
|
config.main_api_name = ""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.storage_classes = cfg.get('s3 main',"storage_classes")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.storage_classes = ""
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.lc_debug_interval = int(cfg.get('s3 main',"lc_debug_interval"))
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.lc_debug_interval = 10
|
||||||
|
|
||||||
config.alt_access_key = cfg.get('s3 alt',"access_key")
|
config.alt_access_key = cfg.get('s3 alt',"access_key")
|
||||||
config.alt_secret_key = cfg.get('s3 alt',"secret_key")
|
config.alt_secret_key = cfg.get('s3 alt',"secret_key")
|
||||||
config.alt_display_name = cfg.get('s3 alt',"display_name")
|
config.alt_display_name = cfg.get('s3 alt',"display_name")
|
||||||
|
@ -237,21 +259,44 @@ def setup():
|
||||||
config.tenant_display_name = cfg.get('s3 tenant',"display_name")
|
config.tenant_display_name = cfg.get('s3 tenant',"display_name")
|
||||||
config.tenant_user_id = cfg.get('s3 tenant',"user_id")
|
config.tenant_user_id = cfg.get('s3 tenant',"user_id")
|
||||||
config.tenant_email = cfg.get('s3 tenant',"email")
|
config.tenant_email = cfg.get('s3 tenant',"email")
|
||||||
|
config.tenant_name = cfg.get('s3 tenant',"tenant")
|
||||||
|
|
||||||
|
config.iam_access_key = cfg.get('iam',"access_key")
|
||||||
|
config.iam_secret_key = cfg.get('iam',"secret_key")
|
||||||
|
config.iam_display_name = cfg.get('iam',"display_name")
|
||||||
|
config.iam_user_id = cfg.get('iam',"user_id")
|
||||||
|
config.iam_email = cfg.get('iam',"email")
|
||||||
|
|
||||||
|
config.iam_root_access_key = cfg.get('iam root',"access_key")
|
||||||
|
config.iam_root_secret_key = cfg.get('iam root',"secret_key")
|
||||||
|
config.iam_root_user_id = cfg.get('iam root',"user_id")
|
||||||
|
config.iam_root_email = cfg.get('iam root',"email")
|
||||||
|
|
||||||
|
config.iam_alt_root_access_key = cfg.get('iam alt root',"access_key")
|
||||||
|
config.iam_alt_root_secret_key = cfg.get('iam alt root',"secret_key")
|
||||||
|
config.iam_alt_root_user_id = cfg.get('iam alt root',"user_id")
|
||||||
|
config.iam_alt_root_email = cfg.get('iam alt root',"email")
|
||||||
|
|
||||||
# vars from the fixtures section
|
# vars from the fixtures section
|
||||||
try:
|
template = cfg.get('fixtures', "bucket prefix", fallback='test-{random}-')
|
||||||
template = cfg.get('fixtures', "bucket prefix")
|
|
||||||
except (configparser.NoOptionError):
|
|
||||||
template = 'test-{random}-'
|
|
||||||
prefix = choose_bucket_prefix(template=template)
|
prefix = choose_bucket_prefix(template=template)
|
||||||
|
template = cfg.get('fixtures', "iam name prefix", fallback="s3-tests-")
|
||||||
|
config.iam_name_prefix = choose_bucket_prefix(template=template)
|
||||||
|
template = cfg.get('fixtures', "iam path prefix", fallback="/s3-tests/")
|
||||||
|
config.iam_path_prefix = choose_bucket_prefix(template=template)
|
||||||
|
|
||||||
|
if cfg.has_section("s3 cloud"):
|
||||||
|
get_cloud_config(cfg)
|
||||||
|
else:
|
||||||
|
config.cloud_storage_class = None
|
||||||
|
|
||||||
|
def setup():
|
||||||
alt_client = get_alt_client()
|
alt_client = get_alt_client()
|
||||||
tenant_client = get_tenant_client()
|
tenant_client = get_tenant_client()
|
||||||
nuke_prefixed_buckets(prefix=prefix)
|
nuke_prefixed_buckets(prefix=prefix)
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
nuke_prefixed_buckets(prefix=prefix, client=alt_client)
|
||||||
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
nuke_prefixed_buckets(prefix=prefix, client=tenant_client)
|
||||||
|
|
||||||
|
|
||||||
def teardown():
|
def teardown():
|
||||||
alt_client = get_alt_client()
|
alt_client = get_alt_client()
|
||||||
tenant_client = get_tenant_client()
|
tenant_client = get_tenant_client()
|
||||||
|
@ -277,6 +322,17 @@ def teardown():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@pytest.fixture(scope="package")
|
||||||
|
def configfile():
|
||||||
|
configure()
|
||||||
|
return config
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup_teardown(configfile):
|
||||||
|
setup()
|
||||||
|
yield
|
||||||
|
teardown()
|
||||||
|
|
||||||
def check_webidentity():
|
def check_webidentity():
|
||||||
cfg = configparser.RawConfigParser()
|
cfg = configparser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
|
@ -294,6 +350,46 @@ def check_webidentity():
|
||||||
config.webidentity_aud = cfg.get('webidentity', "aud")
|
config.webidentity_aud = cfg.get('webidentity', "aud")
|
||||||
config.webidentity_token = cfg.get('webidentity', "token")
|
config.webidentity_token = cfg.get('webidentity', "token")
|
||||||
config.webidentity_realm = cfg.get('webidentity', "KC_REALM")
|
config.webidentity_realm = cfg.get('webidentity', "KC_REALM")
|
||||||
|
config.webidentity_sub = cfg.get('webidentity', "sub")
|
||||||
|
config.webidentity_azp = cfg.get('webidentity', "azp")
|
||||||
|
config.webidentity_user_token = cfg.get('webidentity', "user_token")
|
||||||
|
|
||||||
|
def get_cloud_config(cfg):
|
||||||
|
config.cloud_host = cfg.get('s3 cloud',"host")
|
||||||
|
config.cloud_port = int(cfg.get('s3 cloud',"port"))
|
||||||
|
config.cloud_is_secure = cfg.getboolean('s3 cloud', "is_secure")
|
||||||
|
|
||||||
|
proto = 'https' if config.cloud_is_secure else 'http'
|
||||||
|
config.cloud_endpoint = "%s://%s:%d" % (proto, config.cloud_host, config.cloud_port)
|
||||||
|
|
||||||
|
config.cloud_access_key = cfg.get('s3 cloud',"access_key")
|
||||||
|
config.cloud_secret_key = cfg.get('s3 cloud',"secret_key")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_storage_class = cfg.get('s3 cloud', "cloud_storage_class")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_storage_class = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_retain_head_object = cfg.get('s3 cloud',"retain_head_object")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_retain_head_object = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_target_path = cfg.get('s3 cloud',"target_path")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_target_path = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_target_storage_class = cfg.get('s3 cloud',"target_storage_class")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_target_storage_class = 'STANDARD'
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.cloud_regular_storage_class = cfg.get('s3 cloud', "storage_class")
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
|
config.cloud_regular_storage_class = None
|
||||||
|
|
||||||
|
|
||||||
def get_client(client_config=None):
|
def get_client(client_config=None):
|
||||||
if client_config == None:
|
if client_config == None:
|
||||||
|
@ -318,52 +414,65 @@ def get_v2_client():
|
||||||
config=Config(signature_version='s3'))
|
config=Config(signature_version='s3'))
|
||||||
return client
|
return client
|
||||||
|
|
||||||
def get_sts_client(client_config=None):
|
def get_sts_client(**kwargs):
|
||||||
if client_config == None:
|
kwargs.setdefault('aws_access_key_id', config.alt_access_key)
|
||||||
client_config = Config(signature_version='s3v4')
|
kwargs.setdefault('aws_secret_access_key', config.alt_secret_key)
|
||||||
|
kwargs.setdefault('config', Config(signature_version='s3v4'))
|
||||||
|
|
||||||
client = boto3.client(service_name='sts',
|
client = boto3.client(service_name='sts',
|
||||||
aws_access_key_id=config.alt_access_key,
|
endpoint_url=config.default_endpoint,
|
||||||
aws_secret_access_key=config.alt_secret_key,
|
region_name='',
|
||||||
endpoint_url=config.default_endpoint,
|
use_ssl=config.default_is_secure,
|
||||||
region_name='',
|
verify=config.default_ssl_verify,
|
||||||
use_ssl=config.default_is_secure,
|
**kwargs)
|
||||||
verify=config.default_ssl_verify,
|
|
||||||
config=client_config)
|
|
||||||
return client
|
return client
|
||||||
|
|
||||||
def get_iam_client(client_config=None):
|
def get_iam_client(**kwargs):
|
||||||
cfg = configparser.RawConfigParser()
|
kwargs.setdefault('aws_access_key_id', config.iam_access_key)
|
||||||
try:
|
kwargs.setdefault('aws_secret_access_key', config.iam_secret_key)
|
||||||
path = os.environ['S3TEST_CONF']
|
|
||||||
except KeyError:
|
|
||||||
raise RuntimeError(
|
|
||||||
'To run tests, point environment '
|
|
||||||
+ 'variable S3TEST_CONF to a config file.',
|
|
||||||
)
|
|
||||||
cfg.read(path)
|
|
||||||
if not cfg.has_section("iam"):
|
|
||||||
raise RuntimeError('Your config file is missing the "iam" section!')
|
|
||||||
|
|
||||||
config.iam_access_key = cfg.get('iam',"access_key")
|
|
||||||
config.iam_secret_key = cfg.get('iam',"secret_key")
|
|
||||||
config.iam_display_name = cfg.get('iam',"display_name")
|
|
||||||
config.iam_user_id = cfg.get('iam',"user_id")
|
|
||||||
config.iam_email = cfg.get('iam',"email")
|
|
||||||
|
|
||||||
if client_config == None:
|
|
||||||
client_config = Config(signature_version='s3v4')
|
|
||||||
|
|
||||||
client = boto3.client(service_name='iam',
|
client = boto3.client(service_name='iam',
|
||||||
aws_access_key_id=config.iam_access_key,
|
|
||||||
aws_secret_access_key=config.iam_secret_key,
|
|
||||||
endpoint_url=config.default_endpoint,
|
endpoint_url=config.default_endpoint,
|
||||||
region_name='',
|
region_name='',
|
||||||
use_ssl=config.default_is_secure,
|
use_ssl=config.default_is_secure,
|
||||||
verify=config.default_ssl_verify,
|
verify=config.default_ssl_verify,
|
||||||
config=client_config)
|
**kwargs)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def get_iam_s3client(**kwargs):
|
||||||
|
kwargs.setdefault('aws_access_key_id', config.iam_access_key)
|
||||||
|
kwargs.setdefault('aws_secret_access_key', config.iam_secret_key)
|
||||||
|
kwargs.setdefault('config', Config(signature_version='s3v4'))
|
||||||
|
|
||||||
|
client = boto3.client(service_name='s3',
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
**kwargs)
|
||||||
|
return client
|
||||||
|
|
||||||
|
def get_iam_root_client(**kwargs):
|
||||||
|
kwargs.setdefault('service_name', 'iam')
|
||||||
|
kwargs.setdefault('aws_access_key_id', config.iam_root_access_key)
|
||||||
|
kwargs.setdefault('aws_secret_access_key', config.iam_root_secret_key)
|
||||||
|
|
||||||
|
return boto3.client(endpoint_url=config.default_endpoint,
|
||||||
|
region_name='',
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
**kwargs)
|
||||||
|
|
||||||
|
def get_iam_alt_root_client(**kwargs):
|
||||||
|
kwargs.setdefault('service_name', 'iam')
|
||||||
|
kwargs.setdefault('aws_access_key_id', config.iam_alt_root_access_key)
|
||||||
|
kwargs.setdefault('aws_secret_access_key', config.iam_alt_root_secret_key)
|
||||||
|
|
||||||
|
return boto3.client(endpoint_url=config.default_endpoint,
|
||||||
|
region_name='',
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
**kwargs)
|
||||||
|
|
||||||
def get_alt_client(client_config=None):
|
def get_alt_client(client_config=None):
|
||||||
if client_config == None:
|
if client_config == None:
|
||||||
client_config = Config(signature_version='s3v4')
|
client_config = Config(signature_version='s3v4')
|
||||||
|
@ -377,6 +486,18 @@ def get_alt_client(client_config=None):
|
||||||
config=client_config)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def get_cloud_client(client_config=None):
|
||||||
|
if client_config == None:
|
||||||
|
client_config = Config(signature_version='s3v4')
|
||||||
|
|
||||||
|
client = boto3.client(service_name='s3',
|
||||||
|
aws_access_key_id=config.cloud_access_key,
|
||||||
|
aws_secret_access_key=config.cloud_secret_key,
|
||||||
|
endpoint_url=config.cloud_endpoint,
|
||||||
|
use_ssl=config.cloud_is_secure,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
def get_tenant_client(client_config=None):
|
def get_tenant_client(client_config=None):
|
||||||
if client_config == None:
|
if client_config == None:
|
||||||
client_config = Config(signature_version='s3v4')
|
client_config = Config(signature_version='s3v4')
|
||||||
|
@ -390,6 +511,17 @@ def get_tenant_client(client_config=None):
|
||||||
config=client_config)
|
config=client_config)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def get_v2_tenant_client():
|
||||||
|
client_config = Config(signature_version='s3')
|
||||||
|
client = boto3.client(service_name='s3',
|
||||||
|
aws_access_key_id=config.tenant_access_key,
|
||||||
|
aws_secret_access_key=config.tenant_secret_key,
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
use_ssl=config.default_is_secure,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
config=client_config)
|
||||||
|
return client
|
||||||
|
|
||||||
def get_tenant_iam_client():
|
def get_tenant_iam_client():
|
||||||
|
|
||||||
client = boto3.client(service_name='iam',
|
client = boto3.client(service_name='iam',
|
||||||
|
@ -401,6 +533,17 @@ def get_tenant_iam_client():
|
||||||
use_ssl=config.default_is_secure)
|
use_ssl=config.default_is_secure)
|
||||||
return client
|
return client
|
||||||
|
|
||||||
|
def get_alt_iam_client():
|
||||||
|
|
||||||
|
client = boto3.client(service_name='iam',
|
||||||
|
region_name='',
|
||||||
|
aws_access_key_id=config.alt_access_key,
|
||||||
|
aws_secret_access_key=config.alt_secret_key,
|
||||||
|
endpoint_url=config.default_endpoint,
|
||||||
|
verify=config.default_ssl_verify,
|
||||||
|
use_ssl=config.default_is_secure)
|
||||||
|
return client
|
||||||
|
|
||||||
def get_unauthenticated_client():
|
def get_unauthenticated_client():
|
||||||
client = boto3.client(service_name='s3',
|
client = boto3.client(service_name='s3',
|
||||||
aws_access_key_id='',
|
aws_access_key_id='',
|
||||||
|
@ -563,6 +706,9 @@ def get_tenant_aws_secret_key():
|
||||||
def get_tenant_display_name():
|
def get_tenant_display_name():
|
||||||
return config.tenant_display_name
|
return config.tenant_display_name
|
||||||
|
|
||||||
|
def get_tenant_name():
|
||||||
|
return config.tenant_name
|
||||||
|
|
||||||
def get_tenant_user_id():
|
def get_tenant_user_id():
|
||||||
return config.tenant_user_id
|
return config.tenant_user_id
|
||||||
|
|
||||||
|
@ -575,14 +721,62 @@ def get_thumbprint():
|
||||||
def get_aud():
|
def get_aud():
|
||||||
return config.webidentity_aud
|
return config.webidentity_aud
|
||||||
|
|
||||||
|
def get_sub():
|
||||||
|
return config.webidentity_sub
|
||||||
|
|
||||||
|
def get_azp():
|
||||||
|
return config.webidentity_azp
|
||||||
|
|
||||||
def get_token():
|
def get_token():
|
||||||
return config.webidentity_token
|
return config.webidentity_token
|
||||||
|
|
||||||
def get_realm_name():
|
def get_realm_name():
|
||||||
return config.webidentity_realm
|
return config.webidentity_realm
|
||||||
|
|
||||||
|
def get_iam_name_prefix():
|
||||||
|
return config.iam_name_prefix
|
||||||
|
|
||||||
|
def make_iam_name(name):
|
||||||
|
return config.iam_name_prefix + name
|
||||||
|
|
||||||
|
def get_iam_path_prefix():
|
||||||
|
return config.iam_path_prefix
|
||||||
|
|
||||||
def get_iam_access_key():
|
def get_iam_access_key():
|
||||||
return config.iam_access_key
|
return config.iam_access_key
|
||||||
|
|
||||||
def get_iam_secret_key():
|
def get_iam_secret_key():
|
||||||
return config.iam_secret_key
|
return config.iam_secret_key
|
||||||
|
|
||||||
|
def get_iam_root_user_id():
|
||||||
|
return config.iam_root_user_id
|
||||||
|
|
||||||
|
def get_iam_root_email():
|
||||||
|
return config.iam_root_email
|
||||||
|
|
||||||
|
def get_iam_alt_root_user_id():
|
||||||
|
return config.iam_alt_root_user_id
|
||||||
|
|
||||||
|
def get_iam_alt_root_email():
|
||||||
|
return config.iam_alt_root_email
|
||||||
|
|
||||||
|
def get_user_token():
|
||||||
|
return config.webidentity_user_token
|
||||||
|
|
||||||
|
def get_cloud_storage_class():
|
||||||
|
return config.cloud_storage_class
|
||||||
|
|
||||||
|
def get_cloud_retain_head_object():
|
||||||
|
return config.cloud_retain_head_object
|
||||||
|
|
||||||
|
def get_cloud_regular_storage_class():
|
||||||
|
return config.cloud_regular_storage_class
|
||||||
|
|
||||||
|
def get_cloud_target_path():
|
||||||
|
return config.cloud_target_path
|
||||||
|
|
||||||
|
def get_cloud_target_storage_class():
|
||||||
|
return config.cloud_target_storage_class
|
||||||
|
|
||||||
|
def get_lc_debug_interval():
|
||||||
|
return config.lc_debug_interval
|
||||||
|
|
199
s3tests_boto3/functional/iam.py
Normal file
199
s3tests_boto3/functional/iam.py
Normal file
|
@ -0,0 +1,199 @@
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from . import (
|
||||||
|
configfile,
|
||||||
|
get_iam_root_client,
|
||||||
|
get_iam_root_user_id,
|
||||||
|
get_iam_root_email,
|
||||||
|
get_iam_alt_root_client,
|
||||||
|
get_iam_alt_root_user_id,
|
||||||
|
get_iam_alt_root_email,
|
||||||
|
get_iam_path_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
def nuke_user_keys(client, name):
|
||||||
|
p = client.get_paginator('list_access_keys')
|
||||||
|
for response in p.paginate(UserName=name):
|
||||||
|
for key in response['AccessKeyMetadata']:
|
||||||
|
try:
|
||||||
|
client.delete_access_key(UserName=name, AccessKeyId=key['AccessKeyId'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_user_policies(client, name):
|
||||||
|
p = client.get_paginator('list_user_policies')
|
||||||
|
for response in p.paginate(UserName=name):
|
||||||
|
for policy in response['PolicyNames']:
|
||||||
|
try:
|
||||||
|
client.delete_user_policy(UserName=name, PolicyName=policy)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_attached_user_policies(client, name):
|
||||||
|
p = client.get_paginator('list_attached_user_policies')
|
||||||
|
for response in p.paginate(UserName=name):
|
||||||
|
for policy in response['AttachedPolicies']:
|
||||||
|
try:
|
||||||
|
client.detach_user_policy(UserName=name, PolicyArn=policy['PolicyArn'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_user(client, name):
|
||||||
|
# delete access keys, user policies, etc
|
||||||
|
try:
|
||||||
|
nuke_user_keys(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
nuke_user_policies(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
nuke_attached_user_policies(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
client.delete_user(UserName=name)
|
||||||
|
|
||||||
|
def nuke_users(client, **kwargs):
|
||||||
|
p = client.get_paginator('list_users')
|
||||||
|
for response in p.paginate(**kwargs):
|
||||||
|
for user in response['Users']:
|
||||||
|
try:
|
||||||
|
nuke_user(client, user['UserName'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_group_policies(client, name):
|
||||||
|
p = client.get_paginator('list_group_policies')
|
||||||
|
for response in p.paginate(GroupName=name):
|
||||||
|
for policy in response['PolicyNames']:
|
||||||
|
try:
|
||||||
|
client.delete_group_policy(GroupName=name, PolicyName=policy)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_attached_group_policies(client, name):
|
||||||
|
p = client.get_paginator('list_attached_group_policies')
|
||||||
|
for response in p.paginate(GroupName=name):
|
||||||
|
for policy in response['AttachedPolicies']:
|
||||||
|
try:
|
||||||
|
client.detach_group_policy(GroupName=name, PolicyArn=policy['PolicyArn'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_group_users(client, name):
|
||||||
|
p = client.get_paginator('get_group')
|
||||||
|
for response in p.paginate(GroupName=name):
|
||||||
|
for user in response['Users']:
|
||||||
|
try:
|
||||||
|
client.remove_user_from_group(GroupName=name, UserName=user['UserName'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_group(client, name):
|
||||||
|
# delete group policies and remove all users
|
||||||
|
try:
|
||||||
|
nuke_group_policies(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
nuke_attached_group_policies(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
nuke_group_users(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
client.delete_group(GroupName=name)
|
||||||
|
|
||||||
|
def nuke_groups(client, **kwargs):
|
||||||
|
p = client.get_paginator('list_groups')
|
||||||
|
for response in p.paginate(**kwargs):
|
||||||
|
for user in response['Groups']:
|
||||||
|
try:
|
||||||
|
nuke_group(client, user['GroupName'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_role_policies(client, name):
|
||||||
|
p = client.get_paginator('list_role_policies')
|
||||||
|
for response in p.paginate(RoleName=name):
|
||||||
|
for policy in response['PolicyNames']:
|
||||||
|
try:
|
||||||
|
client.delete_role_policy(RoleName=name, PolicyName=policy)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_attached_role_policies(client, name):
|
||||||
|
p = client.get_paginator('list_attached_role_policies')
|
||||||
|
for response in p.paginate(RoleName=name):
|
||||||
|
for policy in response['AttachedPolicies']:
|
||||||
|
try:
|
||||||
|
client.detach_role_policy(RoleName=name, PolicyArn=policy['PolicyArn'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_role(client, name):
|
||||||
|
# delete role policies, etc
|
||||||
|
try:
|
||||||
|
nuke_role_policies(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
nuke_attached_role_policies(client, name)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
client.delete_role(RoleName=name)
|
||||||
|
|
||||||
|
def nuke_roles(client, **kwargs):
|
||||||
|
p = client.get_paginator('list_roles')
|
||||||
|
for response in p.paginate(**kwargs):
|
||||||
|
for role in response['Roles']:
|
||||||
|
try:
|
||||||
|
nuke_role(client, role['RoleName'])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def nuke_oidc_providers(client, prefix):
|
||||||
|
result = client.list_open_id_connect_providers()
|
||||||
|
for provider in result['OpenIDConnectProviderList']:
|
||||||
|
arn = provider['Arn']
|
||||||
|
if f':oidc-provider{prefix}' in arn:
|
||||||
|
try:
|
||||||
|
client.delete_open_id_connect_provider(OpenIDConnectProviderArn=arn)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# fixture for iam account root user
|
||||||
|
@pytest.fixture
|
||||||
|
def iam_root(configfile):
|
||||||
|
client = get_iam_root_client()
|
||||||
|
try:
|
||||||
|
arn = client.get_user()['User']['Arn']
|
||||||
|
if not arn.endswith(':root'):
|
||||||
|
pytest.skip('[iam root] user does not have :root arn')
|
||||||
|
except ClientError as e:
|
||||||
|
pytest.skip('[iam root] user does not belong to an account')
|
||||||
|
|
||||||
|
yield client
|
||||||
|
nuke_users(client, PathPrefix=get_iam_path_prefix())
|
||||||
|
nuke_groups(client, PathPrefix=get_iam_path_prefix())
|
||||||
|
nuke_roles(client, PathPrefix=get_iam_path_prefix())
|
||||||
|
nuke_oidc_providers(client, get_iam_path_prefix())
|
||||||
|
|
||||||
|
# fixture for iam alt account root user
|
||||||
|
@pytest.fixture
|
||||||
|
def iam_alt_root(configfile):
|
||||||
|
client = get_iam_alt_root_client()
|
||||||
|
try:
|
||||||
|
arn = client.get_user()['User']['Arn']
|
||||||
|
if not arn.endswith(':root'):
|
||||||
|
pytest.skip('[iam alt root] user does not have :root arn')
|
||||||
|
except ClientError as e:
|
||||||
|
pytest.skip('[iam alt root] user does not belong to an account')
|
||||||
|
|
||||||
|
yield client
|
||||||
|
nuke_users(client, PathPrefix=get_iam_path_prefix())
|
||||||
|
nuke_roles(client, PathPrefix=get_iam_path_prefix())
|
|
@ -37,10 +37,10 @@ class Policy(object):
|
||||||
|
|
||||||
return json.dumps(policy_dict)
|
return json.dumps(policy_dict)
|
||||||
|
|
||||||
def make_json_policy(action, resource, principal={"AWS": "*"}, conditions=None):
|
def make_json_policy(action, resource, principal={"AWS": "*"}, effect="Allow", conditions=None):
|
||||||
"""
|
"""
|
||||||
Helper function to make single statement policies
|
Helper function to make single statement policies
|
||||||
"""
|
"""
|
||||||
s = Statement(action, resource, principal, condition=conditions)
|
s = Statement(action, resource, principal, effect=effect, condition=conditions)
|
||||||
p = Policy()
|
p = Policy()
|
||||||
return p.add_statement(s).to_json()
|
return p.add_statement(s).to_json()
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
import boto3
|
import boto3
|
||||||
from nose.tools import eq_ as eq
|
import pytest
|
||||||
from nose.plugins.attrib import attr
|
|
||||||
import nose
|
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from email.utils import formatdate
|
from email.utils import formatdate
|
||||||
|
|
||||||
|
@ -10,6 +8,8 @@ from .utils import _get_status_and_error_code
|
||||||
from .utils import _get_status
|
from .utils import _get_status
|
||||||
|
|
||||||
from . import (
|
from . import (
|
||||||
|
configfile,
|
||||||
|
setup_teardown,
|
||||||
get_client,
|
get_client,
|
||||||
get_v2_client,
|
get_v2_client,
|
||||||
get_new_bucket,
|
get_new_bucket,
|
||||||
|
@ -149,178 +149,97 @@ def _remove_header_create_bad_bucket(remove, client=None):
|
||||||
|
|
||||||
return e
|
return e
|
||||||
|
|
||||||
def tag(*tags):
|
|
||||||
def wrap(func):
|
|
||||||
for tag in tags:
|
|
||||||
setattr(func, tag, True)
|
|
||||||
return func
|
|
||||||
return wrap
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# common tests
|
# common tests
|
||||||
#
|
#
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/invalid MD5')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
def test_object_create_bad_md5_invalid_short():
|
def test_object_create_bad_md5_invalid_short():
|
||||||
e = _add_header_create_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='})
|
e = _add_header_create_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='})
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'InvalidDigest')
|
assert error_code == 'InvalidDigest'
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/mismatched MD5')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
def test_object_create_bad_md5_bad():
|
def test_object_create_bad_md5_bad():
|
||||||
e = _add_header_create_bad_object({'Content-MD5':'rL0Y20xC+Fzt72VPzMSk2A=='})
|
e = _add_header_create_bad_object({'Content-MD5':'rL0Y20xC+Fzt72VPzMSk2A=='})
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'BadDigest')
|
assert error_code == 'BadDigest'
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty MD5')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
def test_object_create_bad_md5_empty():
|
def test_object_create_bad_md5_empty():
|
||||||
e = _add_header_create_bad_object({'Content-MD5':''})
|
e = _add_header_create_bad_object({'Content-MD5':''})
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'InvalidDigest')
|
assert error_code == 'InvalidDigest'
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no MD5 header')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_md5_none():
|
def test_object_create_bad_md5_none():
|
||||||
bucket_name, key_name = _remove_header_create_object('Content-MD5')
|
bucket_name, key_name = _remove_header_create_object('Content-MD5')
|
||||||
client = get_client()
|
client = get_client()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/Expect 200')
|
|
||||||
@attr(assertion='garbage, but S3 succeeds!')
|
|
||||||
def test_object_create_bad_expect_mismatch():
|
def test_object_create_bad_expect_mismatch():
|
||||||
bucket_name, key_name = _add_header_create_object({'Expect': 200})
|
bucket_name, key_name = _add_header_create_object({'Expect': 200})
|
||||||
client = get_client()
|
client = get_client()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty expect')
|
|
||||||
@attr(assertion='succeeds ... should it?')
|
|
||||||
def test_object_create_bad_expect_empty():
|
def test_object_create_bad_expect_empty():
|
||||||
bucket_name, key_name = _add_header_create_object({'Expect': ''})
|
bucket_name, key_name = _add_header_create_object({'Expect': ''})
|
||||||
client = get_client()
|
client = get_client()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no expect')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_expect_none():
|
def test_object_create_bad_expect_none():
|
||||||
bucket_name, key_name = _remove_header_create_object('Expect')
|
bucket_name, key_name = _remove_header_create_object('Expect')
|
||||||
client = get_client()
|
client = get_client()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty content length')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_contentlength_empty():
|
def test_object_create_bad_contentlength_empty():
|
||||||
e = _add_header_create_bad_object({'Content-Length':''})
|
e = _add_header_create_bad_object({'Content-Length':''})
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
@pytest.mark.fails_on_mod_proxy_fcgi
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/negative content length')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
@attr('fails_on_mod_proxy_fcgi')
|
|
||||||
def test_object_create_bad_contentlength_negative():
|
def test_object_create_bad_contentlength_negative():
|
||||||
client = get_client()
|
client = get_client()
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
key_name = 'foo'
|
key_name = 'foo'
|
||||||
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key_name, ContentLength=-1)
|
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key_name, ContentLength=-1)
|
||||||
status = _get_status(e.response)
|
status = _get_status(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no content length')
|
|
||||||
@attr(assertion='fails 411')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_contentlength_none():
|
def test_object_create_bad_contentlength_none():
|
||||||
remove = 'Content-Length'
|
remove = 'Content-Length'
|
||||||
e = _remove_header_create_bad_object('Content-Length')
|
e = _remove_header_create_bad_object('Content-Length')
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 411)
|
assert status == 411
|
||||||
eq(error_code, 'MissingContentLength')
|
assert error_code == 'MissingContentLength'
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/content length too long')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
|
||||||
@attr('fails_on_rgw')
|
|
||||||
def test_object_create_bad_contentlength_mismatch_above():
|
|
||||||
content = 'bar'
|
|
||||||
length = len(content) + 1
|
|
||||||
|
|
||||||
client = get_client()
|
|
||||||
bucket_name = get_new_bucket()
|
|
||||||
key_name = 'foo'
|
|
||||||
headers = {'Content-Length': str(length)}
|
|
||||||
add_headers = (lambda **kwargs: kwargs['params']['headers'].update(headers))
|
|
||||||
client.meta.events.register('before-sign.s3.PutObject', add_headers)
|
|
||||||
|
|
||||||
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key_name, Body=content)
|
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
|
||||||
eq(status, 400)
|
|
||||||
|
|
||||||
@tag('auth_common')
|
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/content type text/plain')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_contenttype_invalid():
|
def test_object_create_bad_contenttype_invalid():
|
||||||
bucket_name, key_name = _add_header_create_object({'Content-Type': 'text/plain'})
|
bucket_name, key_name = _add_header_create_object({'Content-Type': 'text/plain'})
|
||||||
client = get_client()
|
client = get_client()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty content type')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_contenttype_empty():
|
def test_object_create_bad_contenttype_empty():
|
||||||
client = get_client()
|
client = get_client()
|
||||||
key_name = 'foo'
|
key_name = 'foo'
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar', ContentType='')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar', ContentType='')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no content type')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_contenttype_none():
|
def test_object_create_bad_contenttype_none():
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
key_name = 'foo'
|
key_name = 'foo'
|
||||||
|
@ -329,38 +248,26 @@ def test_object_create_bad_contenttype_none():
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty authorization')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_authorization_empty():
|
def test_object_create_bad_authorization_empty():
|
||||||
e = _add_header_create_bad_object({'Authorization': ''})
|
e = _add_header_create_bad_object({'Authorization': ''})
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date and x-amz-date')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before
|
# TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_date_and_amz_date():
|
def test_object_create_date_and_amz_date():
|
||||||
date = formatdate(usegmt=True)
|
date = formatdate(usegmt=True)
|
||||||
bucket_name, key_name = _add_header_create_object({'Date': date, 'X-Amz-Date': date})
|
bucket_name, key_name = _add_header_create_object({'Date': date, 'X-Amz-Date': date})
|
||||||
client = get_client()
|
client = get_client()
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/x-amz-date and no date')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before
|
# TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_amz_date_and_no_date():
|
def test_object_create_amz_date_and_no_date():
|
||||||
date = formatdate(usegmt=True)
|
date = formatdate(usegmt=True)
|
||||||
bucket_name, key_name = _add_header_create_object({'Date': '', 'X-Amz-Date': date})
|
bucket_name, key_name = _add_header_create_object({'Date': '', 'X-Amz-Date': date})
|
||||||
|
@ -368,36 +275,24 @@ def test_object_create_amz_date_and_no_date():
|
||||||
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
# the teardown is really messed up here. check it out
|
# the teardown is really messed up here. check it out
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no authorization')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_authorization_none():
|
def test_object_create_bad_authorization_none():
|
||||||
e = _remove_header_create_bad_object('Authorization')
|
e = _remove_header_create_bad_object('Authorization')
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no content length')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_contentlength_none():
|
def test_bucket_create_contentlength_none():
|
||||||
remove = 'Content-Length'
|
remove = 'Content-Length'
|
||||||
_remove_header_create_bucket(remove)
|
_remove_header_create_bucket(remove)
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='acls')
|
|
||||||
@attr(operation='set w/no content length')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_acl_create_contentlength_none():
|
def test_object_acl_create_contentlength_none():
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
@ -411,11 +306,7 @@ def test_object_acl_create_contentlength_none():
|
||||||
client.meta.events.register('before-call.s3.PutObjectAcl', remove_header)
|
client.meta.events.register('before-call.s3.PutObjectAcl', remove_header)
|
||||||
client.put_object_acl(Bucket=bucket_name, Key='foo', ACL='public-read')
|
client.put_object_acl(Bucket=bucket_name, Key='foo', ACL='public-read')
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='acls')
|
|
||||||
@attr(operation='set w/invalid permission')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
def test_bucket_put_bad_canned_acl():
|
def test_bucket_put_bad_canned_acl():
|
||||||
bucket_name = get_new_bucket()
|
bucket_name = get_new_bucket()
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
@ -426,13 +317,9 @@ def test_bucket_put_bad_canned_acl():
|
||||||
|
|
||||||
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, ACL='public-read')
|
e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, ACL='public-read')
|
||||||
status = _get_status(e.response)
|
status = _get_status(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/expect 200')
|
|
||||||
@attr(assertion='garbage, but S3 succeeds!')
|
|
||||||
def test_bucket_create_bad_expect_mismatch():
|
def test_bucket_create_bad_expect_mismatch():
|
||||||
bucket_name = get_new_bucket_name()
|
bucket_name = get_new_bucket_name()
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
@ -442,99 +329,67 @@ def test_bucket_create_bad_expect_mismatch():
|
||||||
client.meta.events.register('before-call.s3.CreateBucket', add_headers)
|
client.meta.events.register('before-call.s3.CreateBucket', add_headers)
|
||||||
client.create_bucket(Bucket=bucket_name)
|
client.create_bucket(Bucket=bucket_name)
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/expect empty')
|
|
||||||
@attr(assertion='garbage, but S3 succeeds!')
|
|
||||||
def test_bucket_create_bad_expect_empty():
|
def test_bucket_create_bad_expect_empty():
|
||||||
headers = {'Expect': ''}
|
headers = {'Expect': ''}
|
||||||
_add_header_create_bucket(headers)
|
_add_header_create_bucket(headers)
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty content length')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
# TODO: The request isn't even making it to the RGW past the frontend
|
# TODO: The request isn't even making it to the RGW past the frontend
|
||||||
# This test had 'fails_on_rgw' before the move to boto3
|
# This test had 'fails_on_rgw' before the move to boto3
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_bad_contentlength_empty():
|
def test_bucket_create_bad_contentlength_empty():
|
||||||
headers = {'Content-Length': ''}
|
headers = {'Content-Length': ''}
|
||||||
e = _add_header_create_bad_bucket(headers)
|
e = _add_header_create_bad_bucket(headers)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
@pytest.mark.fails_on_mod_proxy_fcgi
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/negative content length')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
@attr('fails_on_mod_proxy_fcgi')
|
|
||||||
def test_bucket_create_bad_contentlength_negative():
|
def test_bucket_create_bad_contentlength_negative():
|
||||||
headers = {'Content-Length': '-1'}
|
headers = {'Content-Length': '-1'}
|
||||||
e = _add_header_create_bad_bucket(headers)
|
e = _add_header_create_bad_bucket(headers)
|
||||||
status = _get_status(e.response)
|
status = _get_status(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no content length')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_bad_contentlength_none():
|
def test_bucket_create_bad_contentlength_none():
|
||||||
remove = 'Content-Length'
|
remove = 'Content-Length'
|
||||||
_remove_header_create_bucket(remove)
|
_remove_header_create_bucket(remove)
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty authorization')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_bad_authorization_empty():
|
def test_bucket_create_bad_authorization_empty():
|
||||||
headers = {'Authorization': ''}
|
headers = {'Authorization': ''}
|
||||||
e = _add_header_create_bad_bucket(headers)
|
e = _add_header_create_bad_bucket(headers)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_common')
|
@pytest.mark.auth_common
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no authorization')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_bad_authorization_none():
|
def test_bucket_create_bad_authorization_none():
|
||||||
e = _remove_header_create_bad_bucket('Authorization')
|
e = _remove_header_create_bad_bucket('Authorization')
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/invalid MD5')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
def test_object_create_bad_md5_invalid_garbage_aws2():
|
def test_object_create_bad_md5_invalid_garbage_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'Content-MD5': 'AWS HAHAHA'}
|
headers = {'Content-MD5': 'AWS HAHAHA'}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'InvalidDigest')
|
assert error_code == 'InvalidDigest'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/content length too short')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the Content-Length header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the Content-Length header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_contentlength_mismatch_below_aws2():
|
def test_object_create_bad_contentlength_mismatch_below_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
content = 'bar'
|
content = 'bar'
|
||||||
|
@ -542,252 +397,176 @@ def test_object_create_bad_contentlength_mismatch_below_aws2():
|
||||||
headers = {'Content-Length': str(length)}
|
headers = {'Content-Length': str(length)}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'BadDigest')
|
assert error_code == 'BadDigest'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/incorrect authorization')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_authorization_incorrect_aws2():
|
def test_object_create_bad_authorization_incorrect_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='}
|
headers = {'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'InvalidDigest')
|
assert error_code == 'InvalidDigest'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/invalid authorization')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_authorization_invalid_aws2():
|
def test_object_create_bad_authorization_invalid_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'Authorization': 'AWS HAHAHA'}
|
headers = {'Authorization': 'AWS HAHAHA'}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'InvalidArgument')
|
assert error_code == 'InvalidArgument'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty user agent')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_ua_empty_aws2():
|
def test_object_create_bad_ua_empty_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'User-Agent': ''}
|
headers = {'User-Agent': ''}
|
||||||
bucket_name, key_name = _add_header_create_object(headers, v2_client)
|
bucket_name, key_name = _add_header_create_object(headers, v2_client)
|
||||||
v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no user agent')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_object_create_bad_ua_none_aws2():
|
def test_object_create_bad_ua_none_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
remove = 'User-Agent'
|
remove = 'User-Agent'
|
||||||
bucket_name, key_name = _remove_header_create_object(remove, v2_client)
|
bucket_name, key_name = _remove_header_create_object(remove, v2_client)
|
||||||
v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar')
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/invalid date')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_object_create_bad_date_invalid_aws2():
|
def test_object_create_bad_date_invalid_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Bad Date'}
|
headers = {'x-amz-date': 'Bad Date'}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty date')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_object_create_bad_date_empty_aws2():
|
def test_object_create_bad_date_empty_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': ''}
|
headers = {'x-amz-date': ''}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no date')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_object_create_bad_date_none_aws2():
|
def test_object_create_bad_date_none_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
remove = 'x-amz-date'
|
remove = 'x-amz-date'
|
||||||
e = _remove_header_create_bad_object(remove, v2_client)
|
e = _remove_header_create_bad_object(remove, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date in past')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_object_create_bad_date_before_today_aws2():
|
def test_object_create_bad_date_before_today_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'RequestTimeTooSkewed')
|
assert error_code == 'RequestTimeTooSkewed'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date before epoch')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_object_create_bad_date_before_epoch_aws2():
|
def test_object_create_bad_date_before_epoch_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='object')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date after 9999')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_object_create_bad_date_after_end_aws2():
|
def test_object_create_bad_date_after_end_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
|
||||||
e = _add_header_create_bad_object(headers, v2_client)
|
e = _add_header_create_bad_object(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'RequestTimeTooSkewed')
|
assert error_code == 'RequestTimeTooSkewed'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/invalid authorization')
|
|
||||||
@attr(assertion='fails 400')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_bad_authorization_invalid_aws2():
|
def test_bucket_create_bad_authorization_invalid_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'Authorization': 'AWS HAHAHA'}
|
headers = {'Authorization': 'AWS HAHAHA'}
|
||||||
e = _add_header_create_bad_bucket(headers, v2_client)
|
e = _add_header_create_bad_bucket(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 400)
|
assert status == 400
|
||||||
eq(error_code, 'InvalidArgument')
|
assert error_code == 'InvalidArgument'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty user agent')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_bucket_create_bad_ua_empty_aws2():
|
def test_bucket_create_bad_ua_empty_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'User-Agent': ''}
|
headers = {'User-Agent': ''}
|
||||||
_add_header_create_bucket(headers, v2_client)
|
_add_header_create_bucket(headers, v2_client)
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no user agent')
|
|
||||||
@attr(assertion='succeeds')
|
|
||||||
def test_bucket_create_bad_ua_none_aws2():
|
def test_bucket_create_bad_ua_none_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
remove = 'User-Agent'
|
remove = 'User-Agent'
|
||||||
_remove_header_create_bucket(remove, v2_client)
|
_remove_header_create_bucket(remove, v2_client)
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/invalid date')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_bucket_create_bad_date_invalid_aws2():
|
def test_bucket_create_bad_date_invalid_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Bad Date'}
|
headers = {'x-amz-date': 'Bad Date'}
|
||||||
e = _add_header_create_bad_bucket(headers, v2_client)
|
e = _add_header_create_bad_bucket(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/empty date')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_bucket_create_bad_date_empty_aws2():
|
def test_bucket_create_bad_date_empty_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': ''}
|
headers = {'x-amz-date': ''}
|
||||||
e = _add_header_create_bad_bucket(headers, v2_client)
|
e = _add_header_create_bad_bucket(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/no date')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header
|
# TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header
|
||||||
@attr('fails_on_rgw')
|
@pytest.mark.fails_on_rgw
|
||||||
def test_bucket_create_bad_date_none_aws2():
|
def test_bucket_create_bad_date_none_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
remove = 'x-amz-date'
|
remove = 'x-amz-date'
|
||||||
e = _remove_header_create_bad_bucket(remove, v2_client)
|
e = _remove_header_create_bad_bucket(remove, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date in past')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_bucket_create_bad_date_before_today_aws2():
|
def test_bucket_create_bad_date_before_today_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'}
|
||||||
e = _add_header_create_bad_bucket(headers, v2_client)
|
e = _add_header_create_bad_bucket(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'RequestTimeTooSkewed')
|
assert error_code == 'RequestTimeTooSkewed'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date in future')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_bucket_create_bad_date_after_today_aws2():
|
def test_bucket_create_bad_date_after_today_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 2030 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 2030 21:53:04 GMT'}
|
||||||
e = _add_header_create_bad_bucket(headers, v2_client)
|
e = _add_header_create_bad_bucket(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'RequestTimeTooSkewed')
|
assert error_code == 'RequestTimeTooSkewed'
|
||||||
|
|
||||||
@tag('auth_aws2')
|
@pytest.mark.auth_aws2
|
||||||
@attr(resource='bucket')
|
|
||||||
@attr(method='put')
|
|
||||||
@attr(operation='create w/date before epoch')
|
|
||||||
@attr(assertion='fails 403')
|
|
||||||
def test_bucket_create_bad_date_before_epoch_aws2():
|
def test_bucket_create_bad_date_before_epoch_aws2():
|
||||||
v2_client = get_v2_client()
|
v2_client = get_v2_client()
|
||||||
headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}
|
headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}
|
||||||
e = _add_header_create_bad_bucket(headers, v2_client)
|
e = _add_header_create_bad_bucket(headers, v2_client)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
eq(status, 403)
|
assert status == 403
|
||||||
eq(error_code, 'AccessDenied')
|
assert error_code == 'AccessDenied'
|
||||||
|
|
2922
s3tests_boto3/functional/test_iam.py
Normal file
2922
s3tests_boto3/functional/test_iam.py
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
159
s3tests_boto3/functional/test_sns.py
Normal file
159
s3tests_boto3/functional/test_sns.py
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
|
from . import (
|
||||||
|
configfile,
|
||||||
|
get_iam_root_client,
|
||||||
|
get_iam_alt_root_client,
|
||||||
|
get_new_bucket_name,
|
||||||
|
get_prefix,
|
||||||
|
nuke_prefixed_buckets,
|
||||||
|
)
|
||||||
|
from .iam import iam_root, iam_alt_root
|
||||||
|
from .utils import assert_raises, _get_status_and_error_code
|
||||||
|
|
||||||
|
def get_new_topic_name():
|
||||||
|
return get_new_bucket_name()
|
||||||
|
|
||||||
|
def nuke_topics(client, prefix):
|
||||||
|
p = client.get_paginator('list_topics')
|
||||||
|
for response in p.paginate():
|
||||||
|
for topic in response['Topics']:
|
||||||
|
arn = topic['TopicArn']
|
||||||
|
if prefix not in arn:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
client.delete_topic(TopicArn=arn)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sns(iam_root):
|
||||||
|
client = get_iam_root_client(service_name='sns')
|
||||||
|
yield client
|
||||||
|
nuke_topics(client, get_prefix())
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sns_alt(iam_alt_root):
|
||||||
|
client = get_iam_alt_root_client(service_name='sns')
|
||||||
|
yield client
|
||||||
|
nuke_topics(client, get_prefix())
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3(iam_root):
|
||||||
|
client = get_iam_root_client(service_name='s3')
|
||||||
|
yield client
|
||||||
|
nuke_prefixed_buckets(get_prefix(), client)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def s3_alt(iam_alt_root):
|
||||||
|
client = get_iam_alt_root_client(service_name='s3')
|
||||||
|
yield client
|
||||||
|
nuke_prefixed_buckets(get_prefix(), client)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.iam_account
|
||||||
|
@pytest.mark.sns
|
||||||
|
def test_account_topic(sns):
|
||||||
|
name = get_new_topic_name()
|
||||||
|
|
||||||
|
response = sns.create_topic(Name=name)
|
||||||
|
arn = response['TopicArn']
|
||||||
|
assert arn.startswith('arn:aws:sns:')
|
||||||
|
assert arn.endswith(f':{name}')
|
||||||
|
|
||||||
|
response = sns.list_topics()
|
||||||
|
assert arn in [p['TopicArn'] for p in response['Topics']]
|
||||||
|
|
||||||
|
sns.set_topic_attributes(TopicArn=arn, AttributeName='Policy', AttributeValue='')
|
||||||
|
|
||||||
|
response = sns.get_topic_attributes(TopicArn=arn)
|
||||||
|
assert 'Attributes' in response
|
||||||
|
|
||||||
|
sns.delete_topic(TopicArn=arn)
|
||||||
|
|
||||||
|
response = sns.list_topics()
|
||||||
|
assert arn not in [p['TopicArn'] for p in response['Topics']]
|
||||||
|
|
||||||
|
with pytest.raises(sns.exceptions.NotFoundException):
|
||||||
|
sns.get_topic_attributes(TopicArn=arn)
|
||||||
|
sns.delete_topic(TopicArn=arn)
|
||||||
|
|
||||||
|
@pytest.mark.iam_account
|
||||||
|
@pytest.mark.sns
|
||||||
|
def test_cross_account_topic(sns, sns_alt):
|
||||||
|
name = get_new_topic_name()
|
||||||
|
arn = sns.create_topic(Name=name)['TopicArn']
|
||||||
|
|
||||||
|
# not visible to any alt user apis
|
||||||
|
with pytest.raises(sns.exceptions.NotFoundException):
|
||||||
|
sns_alt.get_topic_attributes(TopicArn=arn)
|
||||||
|
with pytest.raises(sns.exceptions.NotFoundException):
|
||||||
|
sns_alt.set_topic_attributes(TopicArn=arn, AttributeName='Policy', AttributeValue='')
|
||||||
|
|
||||||
|
# delete returns success
|
||||||
|
sns_alt.delete_topic(TopicArn=arn)
|
||||||
|
|
||||||
|
response = sns_alt.list_topics()
|
||||||
|
assert arn not in [p['TopicArn'] for p in response['Topics']]
|
||||||
|
|
||||||
|
@pytest.mark.iam_account
|
||||||
|
@pytest.mark.sns
|
||||||
|
def test_account_topic_publish(sns, s3):
|
||||||
|
name = get_new_topic_name()
|
||||||
|
|
||||||
|
response = sns.create_topic(Name=name)
|
||||||
|
topic_arn = response['TopicArn']
|
||||||
|
|
||||||
|
bucket = get_new_bucket_name()
|
||||||
|
s3.create_bucket(Bucket=bucket)
|
||||||
|
|
||||||
|
config = {'TopicConfigurations': [{
|
||||||
|
'Id': 'id',
|
||||||
|
'TopicArn': topic_arn,
|
||||||
|
'Events': [ 's3:ObjectCreated:*' ],
|
||||||
|
}]}
|
||||||
|
s3.put_bucket_notification_configuration(
|
||||||
|
Bucket=bucket, NotificationConfiguration=config)
|
||||||
|
|
||||||
|
@pytest.mark.iam_account
|
||||||
|
@pytest.mark.iam_cross_account
|
||||||
|
@pytest.mark.sns
|
||||||
|
def test_cross_account_topic_publish(sns, s3_alt, iam_alt_root):
|
||||||
|
name = get_new_topic_name()
|
||||||
|
|
||||||
|
response = sns.create_topic(Name=name)
|
||||||
|
topic_arn = response['TopicArn']
|
||||||
|
|
||||||
|
bucket = get_new_bucket_name()
|
||||||
|
s3_alt.create_bucket(Bucket=bucket)
|
||||||
|
|
||||||
|
config = {'TopicConfigurations': [{
|
||||||
|
'Id': 'id',
|
||||||
|
'TopicArn': topic_arn,
|
||||||
|
'Events': [ 's3:ObjectCreated:*' ],
|
||||||
|
}]}
|
||||||
|
|
||||||
|
# expect AccessDenies because no resource policy allows cross-account access
|
||||||
|
e = assert_raises(ClientError, s3_alt.put_bucket_notification_configuration,
|
||||||
|
Bucket=bucket, NotificationConfiguration=config)
|
||||||
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
|
assert status == 403
|
||||||
|
assert error_code == 'AccessDenied'
|
||||||
|
|
||||||
|
# add topic policy to allow the alt user
|
||||||
|
alt_principal = iam_alt_root.get_user()['User']['Arn']
|
||||||
|
policy = json.dumps({
|
||||||
|
'Version': '2012-10-17',
|
||||||
|
'Statement': [{
|
||||||
|
'Effect': 'Allow',
|
||||||
|
'Principal': {'AWS': alt_principal},
|
||||||
|
'Action': 'sns:Publish',
|
||||||
|
'Resource': topic_arn
|
||||||
|
}]
|
||||||
|
})
|
||||||
|
sns.set_topic_attributes(TopicArn=topic_arn, AttributeName='Policy',
|
||||||
|
AttributeValue=policy)
|
||||||
|
|
||||||
|
s3_alt.put_bucket_notification_configuration(
|
||||||
|
Bucket=bucket, NotificationConfiguration=config)
|
File diff suppressed because it is too large
Load diff
|
@ -1,11 +1,9 @@
|
||||||
from nose.tools import eq_ as eq
|
|
||||||
|
|
||||||
from . import utils
|
from . import utils
|
||||||
|
|
||||||
def test_generate():
|
def test_generate():
|
||||||
FIVE_MB = 5 * 1024 * 1024
|
FIVE_MB = 5 * 1024 * 1024
|
||||||
eq(len(''.join(utils.generate_random(0))), 0)
|
assert len(''.join(utils.generate_random(0))) == 0
|
||||||
eq(len(''.join(utils.generate_random(1))), 1)
|
assert len(''.join(utils.generate_random(1))) == 1
|
||||||
eq(len(''.join(utils.generate_random(FIVE_MB - 1))), FIVE_MB - 1)
|
assert len(''.join(utils.generate_random(FIVE_MB - 1))) == FIVE_MB - 1
|
||||||
eq(len(''.join(utils.generate_random(FIVE_MB))), FIVE_MB)
|
assert len(''.join(utils.generate_random(FIVE_MB))) == FIVE_MB
|
||||||
eq(len(''.join(utils.generate_random(FIVE_MB + 1))), FIVE_MB + 1)
|
assert len(''.join(utils.generate_random(FIVE_MB + 1))) == FIVE_MB + 1
|
||||||
|
|
|
@ -3,8 +3,6 @@ import requests
|
||||||
import string
|
import string
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from nose.tools import eq_ as eq
|
|
||||||
|
|
||||||
def assert_raises(excClass, callableObj, *args, **kwargs):
|
def assert_raises(excClass, callableObj, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
Like unittest.TestCase.assertRaises, but returns the exception.
|
Like unittest.TestCase.assertRaises, but returns the exception.
|
||||||
|
|
9
tox.ini
Normal file
9
tox.ini
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
[tox]
|
||||||
|
envlist = py
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
deps = -rrequirements.txt
|
||||||
|
passenv =
|
||||||
|
S3TEST_CONF
|
||||||
|
S3_USE_SIGV4
|
||||||
|
commands = pytest {posargs}
|
Loading…
Reference in a new issue