From 9319a41b245387e79a0801f97ee5119cc4df9bf3 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Fri, 20 Jan 2023 15:24:07 -0500 Subject: [PATCH 01/12] pytest: add tox.ini to run pytest, update README Signed-off-by: Casey Bodley --- README.rst | 36 ++++++++++++++---------------------- requirements.txt | 1 + tox.ini | 7 +++++++ 3 files changed, 22 insertions(+), 22 deletions(-) create mode 100644 tox.ini diff --git a/README.rst b/README.rst index 42ad7d5..f2b9818 100644 --- a/README.rst +++ b/README.rst @@ -6,14 +6,10 @@ This is a set of unofficial Amazon AWS S3 compatibility tests, that can be useful to people implementing software that exposes an S3-like API. The tests use the Boto2 and Boto3 libraries. -The tests use the Nose test framework. To get started, ensure you have -the ``virtualenv`` software installed; e.g. on Debian/Ubuntu:: +The tests use the Tox tool. To get started, ensure you have the ``tox`` +software installed; e.g. on Debian/Ubuntu:: - sudo apt-get install python-virtualenv - -and then run:: - - ./bootstrap + sudo apt-get install tox You will need to create a configuration file with the location of the service and two different credentials. A sample configuration file named @@ -22,29 +18,25 @@ used to run the s3 tests on a Ceph cluster started with vstart. Once you have that file copied and edited, you can run the tests with:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests + S3TEST_CONF=your.conf tox You can specify which directory of tests to run:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional + S3TEST_CONF=your.conf tox s3tests_boto3/functional You can specify which file of tests to run:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_s3 + S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_s3.py You can specify which test to run:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_s3:test_bucket_list_empty - -To gather a list of tests being run, use the flags:: - - -v --collect-only + S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_s3.py::test_bucket_list_empty Some tests have attributes set based on their current reliability and things like AWS not enforcing their spec stricly. You can filter tests based on their attributes:: - S3TEST_CONF=aws.conf ./virtualenv/bin/nosetests -a '!fails_on_aws' + S3TEST_CONF=aws.conf tox -- -m 'not fails_on_aws' Most of the tests have both Boto3 and Boto2 versions. Tests written in Boto2 are in the ``s3tests`` directory. Tests written in Boto3 are @@ -52,7 +44,7 @@ located in the ``s3test_boto3`` directory. You can run only the boto3 tests with:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'not fails_on_rgw' s3tests_boto3.functional + S3TEST_CONF=your.conf tox -- -m 'not fails_on_aws' s3tests_boto3/functional ======================== STS compatibility tests @@ -62,11 +54,11 @@ This section contains some basic tests for the AssumeRole, GetSessionToken and A You can run only the sts tests (all the three API's) with:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_sts + S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_sts.py You can filter tests based on the attributes. There is a attribute named ``test_of_sts`` to run AssumeRole and GetSessionToken tests and ``webidentity_test`` to run the AssumeRoleWithWebIdentity tests. If you want to execute only ``test_of_sts`` tests you can apply that filter as below:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests -v -s -A 'test_of_sts' s3tests_boto3.functional.test_sts + S3TEST_CONF=your.conf tox -- -m test_of_sts s3tests_boto3/functional/test_sts.py For running ``webidentity_test`` you'll need have Keycloak running. @@ -88,14 +80,14 @@ Adding above capabilities to "iam" user is also taken care by vstart (If Ceph cl To run these tests, create configuration file with section "iam" and "s3 alt" refer s3tests.conf.SAMPLE. Once you have that configuration file copied and edited, you can run all the tests with:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_iam + S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_iam.py You can also specify specific test to run:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_iam:test_put_user_policy + S3TEST_CONF=your.conf tox s3tests_boto3/functional/test_iam.py::test_put_user_policy Some tests have attributes set such as "fails_on_rgw". You can filter tests based on their attributes:: - S3TEST_CONF=your.conf ./virtualenv/bin/nosetests s3tests_boto3.functional.test_iam -a '!fails_on_rgw' + S3TEST_CONF=your.conf tox -- s3tests_boto3/functional/test_iam.py -m 'not fails_on_rgw' diff --git a/requirements.txt b/requirements.txt index 0e625a1..20ffec8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,3 +12,4 @@ requests >=2.23.0 pytz >=2011k httplib2 lxml +pytest diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..93fde38 --- /dev/null +++ b/tox.ini @@ -0,0 +1,7 @@ +[tox] +envlist = py + +[testenv] +deps = -rrequirements.txt +passenv = S3TEST_CONF S3_USE_SIGV4 +commands = pytest {posargs} From 672a1233485926b1e9fdcb4170cb7b4e3abcb30b Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Fri, 20 Jan 2023 14:44:08 -0500 Subject: [PATCH 02/12] pytest: add global configfile and autouse teardown fixtures Signed-off-by: Casey Bodley --- s3tests/functional/__init__.py | 10 ++++++++++ s3tests/functional/test_headers.py | 2 ++ s3tests/functional/test_s3.py | 2 ++ s3tests/functional/test_s3_website.py | 2 ++ s3tests_boto3/functional/__init__.py | 11 +++++++++++ s3tests_boto3/functional/test_headers.py | 2 ++ s3tests_boto3/functional/test_iam.py | 2 ++ s3tests_boto3/functional/test_s3.py | 2 ++ s3tests_boto3/functional/test_s3select.py | 2 ++ s3tests_boto3/functional/test_sts.py | 2 ++ 10 files changed, 37 insertions(+) diff --git a/s3tests/functional/__init__.py b/s3tests/functional/__init__.py index 8911e02..a3890d4 100644 --- a/s3tests/functional/__init__.py +++ b/s3tests/functional/__init__.py @@ -7,6 +7,7 @@ import itertools import os import random import string +import pytest from http.client import HTTPConnection, HTTPSConnection from urllib.parse import urlparse @@ -370,6 +371,15 @@ def teardown(): # remove our buckets here also, to avoid littering nuke_prefixed_buckets(prefix=prefix) +@pytest.fixture(scope="package") +def configfile(): + setup() + yield config + +@pytest.fixture(autouse=True) +def setup_teardown(configfile): + yield + teardown() bucket_counter = itertools.count(1) diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index 63f0c56..f4415b7 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -27,6 +27,8 @@ from .utils import assert_raises from email.header import decode_header from . import ( + configfile, + setup_teardown, _make_raw_request, nuke_prefixed_buckets, get_new_bucket, diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index 808f9fa..db7dfb3 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -37,6 +37,8 @@ from .utils import assert_raises from .policy import Policy, Statement, make_json_policy from . import ( + configfile, + setup_teardown, nuke_prefixed_buckets, get_new_bucket, get_new_bucket_name, diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 3696004..bd83468 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -19,6 +19,8 @@ from nose.plugins.skip import SkipTest from .. import common from . import ( + configfile, + setup_teardown, get_new_bucket, get_new_bucket_name, s3, diff --git a/s3tests_boto3/functional/__init__.py b/s3tests_boto3/functional/__init__.py index 4d03d65..22b136c 100644 --- a/s3tests_boto3/functional/__init__.py +++ b/s3tests_boto3/functional/__init__.py @@ -1,3 +1,4 @@ +import pytest import boto3 from botocore import UNSIGNED from botocore.client import Config @@ -308,6 +309,16 @@ def teardown(): except: pass +@pytest.fixture(scope="package") +def configfile(): + setup() + return config + +@pytest.fixture(autouse=True) +def setup_teardown(configfile): + yield + teardown() + def check_webidentity(): cfg = configparser.RawConfigParser() try: diff --git a/s3tests_boto3/functional/test_headers.py b/s3tests_boto3/functional/test_headers.py index 04b2757..a983c6e 100644 --- a/s3tests_boto3/functional/test_headers.py +++ b/s3tests_boto3/functional/test_headers.py @@ -10,6 +10,8 @@ from .utils import _get_status_and_error_code from .utils import _get_status from . import ( + configfile, + setup_teardown, get_client, get_v2_client, get_new_bucket, diff --git a/s3tests_boto3/functional/test_iam.py b/s3tests_boto3/functional/test_iam.py index bf7236f..e402356 100644 --- a/s3tests_boto3/functional/test_iam.py +++ b/s3tests_boto3/functional/test_iam.py @@ -7,6 +7,8 @@ from nose.tools import eq_ as eq from s3tests_boto3.functional.utils import assert_raises from s3tests_boto3.functional.test_s3 import _multipart_upload from . import ( + configfile, + setup_teardown, get_alt_client, get_iam_client, get_new_bucket, diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 20ae4f1..1601809 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -41,6 +41,8 @@ from .utils import _get_status from .policy import Policy, Statement, make_json_policy from . import ( + configfile, + setup_teardown, get_client, get_prefix, get_unauthenticated_client, diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 85d7c39..0b5ccda 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -9,6 +9,8 @@ import uuid from nose.tools import eq_ as eq from . import ( + configfile, + setup_teardown, get_client ) diff --git a/s3tests_boto3/functional/test_sts.py b/s3tests_boto3/functional/test_sts.py index dce1712..16b967c 100644 --- a/s3tests_boto3/functional/test_sts.py +++ b/s3tests_boto3/functional/test_sts.py @@ -32,6 +32,8 @@ from collections import namedtuple from email.header import decode_header from . import( + configfile, + setup_teardown, get_iam_client, get_sts_client, get_client, From 3652cfe2ec5500b37adf7830acc1d957f71b5a9e Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Fri, 20 Jan 2023 15:34:47 -0500 Subject: [PATCH 03/12] remove tests tagged fails_strict_rfc2616 Signed-off-by: Casey Bodley --- s3tests_boto3/functional/test_s3.py | 48 ----------------------------- 1 file changed, 48 deletions(-) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 1601809..578255e 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -2241,54 +2241,6 @@ def _set_get_metadata_unreadable(metadata, bucket_name=None): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='bar', Metadata=metadata_dict) return e -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write/re-write') -@attr(assertion='non-UTF-8 values detected, but rejected by webserver') -@attr('fails_strict_rfc2616') -@attr(assertion='fails 400') -def test_object_set_get_non_utf8_metadata(): - metadata = '\x04mymeta' - e = _set_get_metadata_unreadable(metadata) - status, error_code = _get_status_and_error_code(e.response) - eq(status, 400 or 403) - -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write') -@attr(assertion='non-printing prefixes rejected by webserver') -@attr('fails_strict_rfc2616') -@attr(assertion='fails 400') -def test_object_set_get_metadata_empty_to_unreadable_prefix(): - metadata = '\x04w' - e = _set_get_metadata_unreadable(metadata) - status, error_code = _get_status_and_error_code(e.response) - eq(status, 400 or 403) - -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write') -@attr(assertion='non-printing suffixes rejected by webserver') -@attr('fails_strict_rfc2616') -@attr(assertion='fails 400') -def test_object_set_get_metadata_empty_to_unreadable_suffix(): - metadata = 'h\x04' - e = _set_get_metadata_unreadable(metadata) - status, error_code = _get_status_and_error_code(e.response) - eq(status, 400 or 403) - -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write') -@attr(assertion='non-priting in-fixes rejected by webserver') -@attr('fails_strict_rfc2616') -@attr(assertion='fails 400') -def test_object_set_get_metadata_empty_to_unreadable_infix(): - metadata = 'h\x04w' - e = _set_get_metadata_unreadable(metadata) - status, error_code = _get_status_and_error_code(e.response) - eq(status, 400 or 403) - @attr(resource='object') @attr(method='put') @attr(operation='data re-write') From 4864dbc340dff24370fb01c0af04b44bf00d5e47 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Fri, 20 Jan 2023 14:35:45 -0500 Subject: [PATCH 04/12] pytest: add custom marks for each nose @attr and register them in pytest.ini Signed-off-by: Casey Bodley --- pytest.ini | 40 ++ s3tests/functional/test_headers.py | 18 + s3tests/functional/test_s3.py | 51 +++ s3tests/functional/test_s3_website.py | 69 ++- s3tests_boto3/functional/test_headers.py | 21 + s3tests_boto3/functional/test_iam.py | 58 +++ s3tests_boto3/functional/test_s3.py | 495 +++++++++++++++++++++- s3tests_boto3/functional/test_s3select.py | 37 ++ s3tests_boto3/functional/test_sts.py | 109 +++++ 9 files changed, 896 insertions(+), 2 deletions(-) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..f4f638e --- /dev/null +++ b/pytest.ini @@ -0,0 +1,40 @@ +[pytest] +markers = + abac_test + appendobject + bucket_policy + bucket_encryption + cloud_transition + encryption + fails_on_aws + fails_on_dbstore + fails_on_dho + fails_on_mod_proxy_fcgi + fails_on_rgw + fails_on_s3 + fails_with_subdomain + lifecycle + lifecycle_expiration + lifecycle_transition + list_objects_v2 + object_lock + session_policy + s3select + s3website + s3website_routing_rules + s3website_redirect_location + 3website + sse_s3 + storage_class + tagging + test_of_iam + test_of_sts + token_claims_trust_policy_test + token_principal_tag_role_policy_test + token_request_tag_trust_policy_test + token_resource_tags_test + token_role_tags_test + token_tag_keys_test + user_policy + versioning + webidentity_test diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index f4415b7..c9ba342 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -5,6 +5,7 @@ import boto.s3.connection import boto.s3.acl import boto.utils import nose +import pytest import operator import random import string @@ -185,6 +186,7 @@ def tag(*tags): @attr(operation='create w/no content length') @attr(assertion='fails 411') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_contentlength_none(): key = _setup_bad_object(remove=('Content-Length',)) @@ -202,6 +204,7 @@ def test_object_create_bad_contentlength_none(): @attr(assertion='fails 400') @nose.with_setup(teardown=_clear_custom_headers) @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_above(): content = 'bar' length = len(content) + 1 @@ -225,6 +228,7 @@ def test_object_create_bad_contentlength_mismatch_above(): @attr(operation='create w/empty authorization') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_empty(): key = _setup_bad_object({'Authorization': ''}) @@ -240,6 +244,7 @@ def test_object_create_bad_authorization_empty(): @attr(operation='create w/date and x-amz-date') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_date_and_amz_date(): date = formatdate(usegmt=True) @@ -252,6 +257,7 @@ def test_object_create_date_and_amz_date(): @attr(operation='create w/x-amz-date and no date') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_amz_date_and_no_date(): date = formatdate(usegmt=True) @@ -266,6 +272,7 @@ def test_object_create_amz_date_and_no_date(): @attr(operation='create w/no authorization') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_none(): key = _setup_bad_object(remove=('Authorization',)) @@ -282,6 +289,7 @@ def test_object_create_bad_authorization_none(): @attr(operation='create w/no content length') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) @@ -294,6 +302,7 @@ def test_bucket_create_contentlength_none(): @attr(operation='set w/no content length') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_acl_create_contentlength_none(): bucket = get_new_bucket() @@ -324,6 +333,7 @@ def _create_new_connection(): @attr(assertion='fails 400') @nose.with_setup(teardown=_clear_custom_headers) @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_empty(): conn = _create_new_connection() _add_custom_headers({'Content-Length': ''}) @@ -338,6 +348,7 @@ def test_bucket_create_bad_contentlength_empty(): @attr(operation='create w/no content length') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) @@ -350,6 +361,7 @@ def test_bucket_create_bad_contentlength_none(): @attr(operation='create w/empty authorization') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_authorization_empty(): _add_custom_headers({'Authorization': ''}) @@ -366,6 +378,7 @@ def test_bucket_create_bad_authorization_empty(): @attr(operation='create w/no authorization') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_authorization_none(): _add_custom_headers(remove=('Authorization',)) @@ -384,6 +397,7 @@ def test_bucket_create_bad_authorization_none(): @attr(operation='create w/content length too short') @attr(assertion='fails 400') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_contentlength_mismatch_below_aws2(): check_aws2_support() @@ -402,6 +416,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): @attr(operation='create w/incorrect authorization') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_incorrect_aws2(): check_aws2_support() @@ -419,6 +434,7 @@ def test_object_create_bad_authorization_incorrect_aws2(): @attr(operation='create w/invalid authorization') @attr(assertion='fails 400') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_invalid_aws2(): check_aws2_support() key = _setup_bad_object({'Authorization': 'AWS HAHAHA'}) @@ -433,6 +449,7 @@ def test_object_create_bad_authorization_invalid_aws2(): @attr(operation='create w/no date') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_none_aws2(): check_aws2_support() @@ -463,6 +480,7 @@ def test_bucket_create_bad_authorization_invalid_aws2(): @attr(operation='create w/no date') @attr(assertion='fails 403') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_none_aws2(): check_aws2_support() diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index db7dfb3..5fca0ca 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -8,6 +8,7 @@ import time import email.utils import isodate import nose +import pytest import operator import socket import ssl @@ -97,6 +98,7 @@ def _get_alt_connection(): # Breaks DNS with SubdomainCallingFormat @attr('fails_with_subdomain') +@pytest.mark.fails_with_subdomain @attr(resource='bucket') @attr(method='put') @attr(operation='create w/! in name') @@ -135,7 +137,9 @@ def check_configure_versioning_retry(bucket, status, expected_string): @attr(operation='create versioned object, read not exist null version') @attr(assertion='read null version behaves correctly') @attr('versioning') +@pytest.mark.versioning @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_versioning_obj_read_not_exist_null(): bucket = get_new_bucket() check_versioning(bucket, None) @@ -156,9 +160,13 @@ def test_versioning_obj_read_not_exist_null(): @attr(operation='append object') @attr(assertion='success') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_with_subdomain') +@pytest.mark.fails_with_subdomain @attr('appendobject') +@pytest.mark.appendobject @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_append_object(): bucket = get_new_bucket() key = bucket.new_key('foo') @@ -181,9 +189,13 @@ def test_append_object(): @attr(operation='append to normal object') @attr(assertion='fails 409') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_with_subdomain') +@pytest.mark.fails_with_subdomain @attr('appendobject') +@pytest.mark.appendobject @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_append_normal_object(): bucket = get_new_bucket() key = bucket.new_key('foo') @@ -202,9 +214,13 @@ def test_append_normal_object(): @attr(operation='append position not right') @attr(assertion='fails 409') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_with_subdomain') +@pytest.mark.fails_with_subdomain @attr('appendobject') +@pytest.mark.appendobject @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_append_object_position_wrong(): bucket = get_new_bucket() key = bucket.new_key('foo') @@ -227,6 +243,7 @@ def test_append_object_position_wrong(): @attr(operation='set/enable/disable logging target') @attr(assertion='operations succeed') @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_logging_toggle(): bucket = get_new_bucket() log_bucket = get_new_bucket(targets.main.default, bucket.name + '-log') @@ -407,8 +424,11 @@ def lc_transitions(transitions=None): @attr(method='put') @attr(operation='test create object with storage class') @attr('storage_class') +@pytest.mark.storage_class @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_storage_class(): sc = configured_storage_classes() if len(sc) < 2: @@ -426,8 +446,11 @@ def test_object_storage_class(): @attr(method='put') @attr(operation='test create multipart object with storage class') @attr('storage_class') +@pytest.mark.storage_class @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_storage_class_multipart(): sc = configured_storage_classes() if len(sc) < 2: @@ -468,8 +491,11 @@ def _do_test_object_modify_storage_class(obj_write_func, size): @attr(method='put') @attr(operation='test changing objects storage class') @attr('storage_class') +@pytest.mark.storage_class @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_modify_storage_class(): _do_test_object_modify_storage_class(_populate_key, size=9*1024*1024) @@ -478,8 +504,11 @@ def test_object_modify_storage_class(): @attr(method='put') @attr(operation='test changing objects storage class') @attr('storage_class') +@pytest.mark.storage_class @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_modify_storage_class_multipart(): _do_test_object_modify_storage_class(_populate_multipart_key, size=11*1024*1024) @@ -508,8 +537,11 @@ def _do_test_object_storage_class_copy(obj_write_func, size): @attr(method='copy') @attr(operation='test copy object to object with different storage class') @attr('storage_class') +@pytest.mark.storage_class @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_storage_class_copy(): _do_test_object_storage_class_copy(_populate_key, size=9*1024*1024) @@ -517,8 +549,11 @@ def test_object_storage_class_copy(): @attr(method='copy') @attr(operation='test changing objects storage class') @attr('storage_class') +@pytest.mark.storage_class @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_storage_class_copy_multipart(): _do_test_object_storage_class_copy(_populate_multipart_key, size=9*1024*1024) @@ -625,7 +660,9 @@ def _test_atomic_dual_conditional_write(file_size): @attr(operation='write one or the other') @attr(assertion='1MB successful') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_atomic_dual_conditional_write_1mb(): _test_atomic_dual_conditional_write(1024*1024) @@ -634,7 +671,9 @@ def test_atomic_dual_conditional_write_1mb(): @attr(operation='write file in deleted bucket') @attr(assertion='fail 404') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_atomic_write_bucket_gone(): bucket = get_new_bucket() @@ -679,7 +718,9 @@ def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024, @attr(operation='multipart upload with bad key for uploading chunks') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_invalid_chunks_1(): bucket = get_new_bucket() key = "multipart_enc" @@ -707,7 +748,9 @@ def test_encryption_sse_c_multipart_invalid_chunks_1(): @attr(operation='multipart upload with bad md5 for chunks') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_invalid_chunks_2(): bucket = get_new_bucket() key = "multipart_enc" @@ -735,8 +778,11 @@ def test_encryption_sse_c_multipart_invalid_chunks_2(): @attr(operation='Test Bucket Policy for a user belonging to a different tenant') @attr(assertion='succeeds') @attr('fails_with_subdomain') +@pytest.mark.fails_with_subdomain @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_different_tenant(): bucket = get_new_bucket() key = bucket.new_key('asdf') @@ -775,7 +821,9 @@ def test_bucket_policy_different_tenant(): @attr(method='put') @attr(operation='Test put condition operator end with ifExists') @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_set_condition_operator_end_with_IfExists(): bucket = _create_keys(keys=['foo']) policy = '''{ @@ -815,8 +863,11 @@ def _make_arn_resource(path="*"): @attr(operation='put obj with RequestObjectTag') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_request_obj_tag(): bucket = get_new_bucket() diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index bd83468..79646c0 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -2,6 +2,7 @@ import sys import collections import nose +import pytest import string import random from pprint import pprint @@ -45,7 +46,6 @@ ERRORDOC_TEMPLATE = '

ErrorDoc

{random}' CAN_WEBSITE = None -@attr('fails_on_dbstore') def check_can_test_website(): global CAN_WEBSITE # This is a bit expensive, so we cache this @@ -254,7 +254,9 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET', tim @attr(operation='list') @attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket, exposing security risk') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_nonexistant_bucket_s3(): bucket_name = get_new_bucket_name() @@ -267,8 +269,11 @@ def test_website_nonexistant_bucket_s3(): #@attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure') @attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_s3') +@pytest.mark.fails_on_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_nonexistant_bucket_rgw(): bucket_name = get_new_bucket_name() @@ -282,7 +287,9 @@ def test_website_nonexistant_bucket_rgw(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is public') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) @timed(10) def test_website_public_bucket_list_public_index(): @@ -311,7 +318,9 @@ def test_website_public_bucket_list_public_index(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_public_index(): bucket = get_new_bucket() @@ -342,7 +351,9 @@ def test_website_private_bucket_list_public_index(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty(): bucket = get_new_bucket() @@ -359,7 +370,9 @@ def test_website_private_bucket_list_empty(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty(): bucket = get_new_bucket() @@ -375,7 +388,9 @@ def test_website_public_bucket_list_empty(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index(): bucket = get_new_bucket() @@ -401,7 +416,9 @@ def test_website_public_bucket_list_private_index(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index(): bucket = get_new_bucket() @@ -428,7 +445,9 @@ def test_website_private_bucket_list_private_index(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() @@ -445,7 +464,9 @@ def test_website_private_bucket_list_empty_missingerrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() @@ -461,7 +482,9 @@ def test_website_public_bucket_list_empty_missingerrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() @@ -486,7 +509,9 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() @@ -512,7 +537,9 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() @@ -541,7 +568,9 @@ def test_website_private_bucket_list_empty_blockederrordoc(): @attr(operation='list') @attr(assertion='check if there is an invalid payload after serving error doc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_pubilc_errordoc(): bucket = get_new_bucket() @@ -593,7 +622,9 @@ def test_website_public_bucket_list_pubilc_errordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() @@ -621,7 +652,9 @@ def test_website_public_bucket_list_empty_blockederrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() @@ -655,7 +688,9 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() @@ -690,8 +725,10 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc') @attr('s3website') +@pytest.mark.s3website @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_website_private_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -714,7 +751,9 @@ def test_website_private_bucket_list_empty_gooderrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() @@ -739,7 +778,9 @@ def test_website_public_bucket_list_empty_gooderrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() @@ -769,7 +810,9 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() @@ -800,7 +843,9 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_base(): bucket = get_new_bucket() @@ -818,7 +863,9 @@ def test_website_bucket_private_redirectall_base(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_path(): bucket = get_new_bucket() @@ -838,7 +885,9 @@ def test_website_bucket_private_redirectall_path(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_path_upgrade(): bucket = get_new_bucket() @@ -860,8 +909,11 @@ def test_website_bucket_private_redirectall_path_upgrade(): @attr(operation='list') @attr(assertion='x-amz-website-redirect-location should not fire without websiteconf') @attr('s3website') +@pytest.mark.s3website @attr('x-amz-website-redirect-location') +@pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_nonwebsite(): bucket = get_new_bucket() @@ -893,8 +945,11 @@ def test_website_xredirect_nonwebsite(): @attr(operation='list') @attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key') @attr('s3website') +@pytest.mark.s3website @attr('x-amz-website-redirect-location') +@pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_public_relative(): bucket = get_new_bucket() @@ -921,8 +976,11 @@ def test_website_xredirect_public_relative(): @attr(operation='list') @attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key') @attr('s3website') +@pytest.mark.s3website @attr('x-amz-website-redirect-location') +@pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_public_abs(): bucket = get_new_bucket() @@ -949,8 +1007,11 @@ def test_website_xredirect_public_abs(): @attr(operation='list') @attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key') @attr('s3website') +@pytest.mark.s3website @attr('x-amz-website-redirect-location') +@pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_private_relative(): bucket = get_new_bucket() @@ -977,8 +1038,11 @@ def test_website_xredirect_private_relative(): @attr(operation='list') @attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key') @attr('s3website') +@pytest.mark.s3website @attr('x-amz-website-redirect-location') +@pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_private_abs(): bucket = get_new_bucket() @@ -1241,8 +1305,11 @@ def routing_check(*args, **kwargs): assert(False) @attr('s3website_RoutingRules') +@pytest.mark.s3website_routing_rules @attr('s3website') +@pytest.mark.s3website @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_routing_generator(): for t in ROUTING_RULES_TESTS: diff --git a/s3tests_boto3/functional/test_headers.py b/s3tests_boto3/functional/test_headers.py index a983c6e..c7fcec6 100644 --- a/s3tests_boto3/functional/test_headers.py +++ b/s3tests_boto3/functional/test_headers.py @@ -2,6 +2,7 @@ import boto3 from nose.tools import eq_ as eq from nose.plugins.attrib import attr import nose +import pytest from botocore.exceptions import ClientError from email.utils import formatdate @@ -242,6 +243,7 @@ def test_object_create_bad_expect_none(): @attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_empty(): e = _add_header_create_bad_object({'Content-Length':''}) status, error_code = _get_status_and_error_code(e.response) @@ -253,6 +255,7 @@ def test_object_create_bad_contentlength_empty(): @attr(operation='create w/negative content length') @attr(assertion='fails 400') @attr('fails_on_mod_proxy_fcgi') +@pytest.mark.fails_on_mod_proxy_fcgi def test_object_create_bad_contentlength_negative(): client = get_client() bucket_name = get_new_bucket() @@ -268,6 +271,7 @@ def test_object_create_bad_contentlength_negative(): @attr(assertion='fails 411') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_none(): remove = 'Content-Length' e = _remove_header_create_bad_object('Content-Length') @@ -316,6 +320,7 @@ def test_object_create_bad_contenttype_none(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_authorization_empty(): e = _add_header_create_bad_object({'Authorization': ''}) status, error_code = _get_status_and_error_code(e.response) @@ -328,6 +333,7 @@ def test_object_create_bad_authorization_empty(): @attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_date_and_amz_date(): date = formatdate(usegmt=True) bucket_name, key_name = _add_header_create_object({'Date': date, 'X-Amz-Date': date}) @@ -341,6 +347,7 @@ def test_object_create_date_and_amz_date(): @attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_amz_date_and_no_date(): date = formatdate(usegmt=True) bucket_name, key_name = _add_header_create_object({'Date': '', 'X-Amz-Date': date}) @@ -355,6 +362,7 @@ def test_object_create_amz_date_and_no_date(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_authorization_none(): e = _remove_header_create_bad_object('Authorization') status, error_code = _get_status_and_error_code(e.response) @@ -367,6 +375,7 @@ def test_object_create_bad_authorization_none(): @attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_contentlength_none(): remove = 'Content-Length' _remove_header_create_bucket(remove) @@ -378,6 +387,7 @@ def test_bucket_create_contentlength_none(): @attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_acl_create_contentlength_none(): bucket_name = get_new_bucket() client = get_client() @@ -439,6 +449,7 @@ def test_bucket_create_bad_expect_empty(): # TODO: The request isn't even making it to the RGW past the frontend # This test had 'fails_on_rgw' before the move to boto3 @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_empty(): headers = {'Content-Length': ''} e = _add_header_create_bad_bucket(headers) @@ -451,6 +462,7 @@ def test_bucket_create_bad_contentlength_empty(): @attr(operation='create w/negative content length') @attr(assertion='fails 400') @attr('fails_on_mod_proxy_fcgi') +@pytest.mark.fails_on_mod_proxy_fcgi def test_bucket_create_bad_contentlength_negative(): headers = {'Content-Length': '-1'} e = _add_header_create_bad_bucket(headers) @@ -464,6 +476,7 @@ def test_bucket_create_bad_contentlength_negative(): @attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_none(): remove = 'Content-Length' _remove_header_create_bucket(remove) @@ -475,6 +488,7 @@ def test_bucket_create_bad_contentlength_none(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_empty(): headers = {'Authorization': ''} e = _add_header_create_bad_bucket(headers) @@ -489,6 +503,7 @@ def test_bucket_create_bad_authorization_empty(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_none(): e = _remove_header_create_bad_bucket('Authorization') status, error_code = _get_status_and_error_code(e.response) @@ -515,6 +530,7 @@ def test_object_create_bad_md5_invalid_garbage_aws2(): @attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the Content-Length header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_below_aws2(): v2_client = get_v2_client() content = 'bar' @@ -532,6 +548,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_authorization_incorrect_aws2(): v2_client = get_v2_client() headers = {'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='} @@ -547,6 +564,7 @@ def test_object_create_bad_authorization_incorrect_aws2(): @attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_authorization_invalid_aws2(): v2_client = get_v2_client() headers = {'Authorization': 'AWS HAHAHA'} @@ -610,6 +628,7 @@ def test_object_create_bad_date_empty_aws2(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_create_bad_date_none_aws2(): v2_client = get_v2_client() remove = 'x-amz-date' @@ -664,6 +683,7 @@ def test_object_create_bad_date_after_end_aws2(): @attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_invalid_aws2(): v2_client = get_v2_client() headers = {'Authorization': 'AWS HAHAHA'} @@ -725,6 +745,7 @@ def test_bucket_create_bad_date_empty_aws2(): @attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_create_bad_date_none_aws2(): v2_client = get_v2_client() remove = 'x-amz-date' diff --git a/s3tests_boto3/functional/test_iam.py b/s3tests_boto3/functional/test_iam.py index e402356..0f6b124 100644 --- a/s3tests_boto3/functional/test_iam.py +++ b/s3tests_boto3/functional/test_iam.py @@ -3,6 +3,7 @@ import json from botocore.exceptions import ClientError from nose.plugins.attrib import attr from nose.tools import eq_ as eq +import pytest from s3tests_boto3.functional.utils import assert_raises from s3tests_boto3.functional.test_s3 import _multipart_upload @@ -24,7 +25,9 @@ from .utils import _get_status, _get_status_and_error_code @attr(operation='Verify Put User Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_put_user_policy(): client = get_iam_client() @@ -48,7 +51,9 @@ def test_put_user_policy(): @attr(operation='Verify Put User Policy with invalid user') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_put_user_policy_invalid_user(): client = get_iam_client() @@ -70,7 +75,9 @@ def test_put_user_policy_invalid_user(): @attr(operation='Verify Put User Policy using parameter value outside limit') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_put_user_policy_parameter_limit(): client = get_iam_client() @@ -93,8 +100,11 @@ def test_put_user_policy_parameter_limit(): @attr(operation='Verify Put User Policy using invalid policy document elements') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_put_user_policy_invalid_element(): client = get_iam_client() @@ -163,7 +173,9 @@ def test_put_user_policy_invalid_element(): @attr(operation='Verify Put a policy that already exists') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_put_existing_user_policy(): client = get_iam_client() @@ -188,7 +200,9 @@ def test_put_existing_user_policy(): @attr(operation='Verify List User policies') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_list_user_policy(): client = get_iam_client() @@ -213,7 +227,9 @@ def test_list_user_policy(): @attr(operation='Verify List User policies with invalid user') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_list_user_policy_invalid_user(): client = get_iam_client() e = assert_raises(ClientError, client.list_user_policies, UserName="some-non-existing-user-id") @@ -226,7 +242,9 @@ def test_list_user_policy_invalid_user(): @attr(operation='Verify Get User policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_get_user_policy(): client = get_iam_client() @@ -253,7 +271,9 @@ def test_get_user_policy(): @attr(operation='Verify Get User Policy with invalid user') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_get_user_policy_invalid_user(): client = get_iam_client() @@ -279,8 +299,11 @@ def test_get_user_policy_invalid_user(): @attr(operation='Verify Get User Policy with invalid policy name') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_get_user_policy_invalid_policy_name(): client = get_iam_client() @@ -305,8 +328,11 @@ def test_get_user_policy_invalid_policy_name(): @attr(operation='Verify Get Deleted User Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_get_deleted_user_policy(): client = get_iam_client() @@ -331,7 +357,9 @@ def test_get_deleted_user_policy(): @attr(operation='Verify Get a policy from multiple policies for a user') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_get_user_policy_from_multiple_policies(): client = get_iam_client() @@ -368,7 +396,9 @@ def test_get_user_policy_from_multiple_policies(): @attr(operation='Verify Delete User Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_delete_user_policy(): client = get_iam_client() @@ -394,7 +424,9 @@ def test_delete_user_policy(): @attr(operation='Verify Delete User Policy with invalid user') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_delete_user_policy_invalid_user(): client = get_iam_client() @@ -424,7 +456,9 @@ def test_delete_user_policy_invalid_user(): @attr(operation='Verify Delete User Policy with invalid policy name') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_delete_user_policy_invalid_policy_name(): client = get_iam_client() @@ -454,7 +488,9 @@ def test_delete_user_policy_invalid_policy_name(): @attr(operation='Verify Delete multiple User policies for a user') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_delete_user_policy_from_multiple_policies(): client = get_iam_client() @@ -498,7 +534,9 @@ def test_delete_user_policy_from_multiple_policies(): @attr(operation='Verify Allow Bucket Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_allow_bucket_actions_in_user_policy(): client = get_iam_client() s3_client_alt = get_alt_client() @@ -549,7 +587,9 @@ def test_allow_bucket_actions_in_user_policy(): @attr(operation='Verify Deny Bucket Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_deny_bucket_actions_in_user_policy(): client = get_iam_client() @@ -589,7 +629,9 @@ def test_deny_bucket_actions_in_user_policy(): @attr(operation='Verify Allow Object Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_allow_object_actions_in_user_policy(): client = get_iam_client() s3_client_alt = get_alt_client() @@ -632,7 +674,9 @@ def test_allow_object_actions_in_user_policy(): @attr(operation='Verify Deny Object Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_deny_object_actions_in_user_policy(): client = get_iam_client() @@ -676,7 +720,9 @@ def test_deny_object_actions_in_user_policy(): @attr(operation='Verify Allow Multipart Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_allow_multipart_actions_in_user_policy(): client = get_iam_client() s3_client_alt = get_alt_client() @@ -715,7 +761,9 @@ def test_allow_multipart_actions_in_user_policy(): @attr(operation='Verify Deny Multipart Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_deny_multipart_actions_in_user_policy(): client = get_iam_client() @@ -762,7 +810,9 @@ def test_deny_multipart_actions_in_user_policy(): @attr(operation='Verify Allow Tagging Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_allow_tagging_actions_in_user_policy(): client = get_iam_client() @@ -812,7 +862,9 @@ def test_allow_tagging_actions_in_user_policy(): @attr(operation='Verify Deny Tagging Actions in user Policy') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_deny_tagging_actions_in_user_policy(): client = get_iam_client() @@ -868,7 +920,9 @@ def test_deny_tagging_actions_in_user_policy(): @attr(operation='Verify conflicting user policy statements') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_verify_conflicting_user_policy_statements(): s3client = get_alt_client() @@ -904,7 +958,9 @@ def test_verify_conflicting_user_policy_statements(): @attr(operation='Verify conflicting user policies') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam @attr('fails_on_dbstore') def test_verify_conflicting_user_policies(): s3client = get_alt_client() @@ -946,7 +1002,9 @@ def test_verify_conflicting_user_policies(): @attr(operation='Verify Allow Actions for IAM user policies') @attr(assertion='succeeds') @attr('user-policy') +@pytest.mark.user_policy @attr('test_of_iam') +@pytest.mark.test_of_iam def test_verify_allow_iam_actions(): policy1 = json.dumps( {"Version": "2012-10-17", diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 578255e..ce96155 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -21,6 +21,7 @@ import xml.etree.ElementTree as ET import time import operator import nose +import pytest import os import string import random @@ -109,6 +110,7 @@ def test_bucket_list_empty(): @attr(operation='list') @attr(assertion='distinct buckets have different contents') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_list_distinct(): bucket1 = get_new_bucket_resource() bucket2 = get_new_bucket_resource() @@ -156,6 +158,7 @@ def _get_prefixes(response): @attr(operation='list all keys') @attr(assertion='pagination w/max_keys=2, no marker') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_many(): bucket_name = _create_objects(keys=['foo', 'bar', 'baz']) client = get_client() @@ -177,7 +180,9 @@ def test_bucket_list_many(): @attr(operation='list all keys') @attr(assertion='pagination w/max_keys=2, no marker') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_listv2_many(): bucket_name = _create_objects(keys=['foo', 'bar', 'baz']) client = get_client() @@ -199,6 +204,7 @@ def test_bucket_listv2_many(): @attr(operation='list') @attr(assertion='keycount in listobjectsv2') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_basic_key_count(): client = get_client() bucket_names = [] @@ -231,6 +237,7 @@ def test_bucket_list_delimiter_basic(): @attr(operation='list') @attr(assertion='prefixes in multi-component object names') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_basic(): bucket_name = _create_objects(keys=['foo/bar', 'foo/bar/xyzzy', 'quux/thud', 'asdf']) client = get_client() @@ -251,6 +258,7 @@ def test_bucket_listv2_delimiter_basic(): @attr(operation='list') @attr(assertion='test url encoding') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_encoding_basic(): bucket_name = _create_objects(keys=['foo+1/bar', 'foo/bar/xyzzy', 'quux ab/thud', 'asdf+b']) client = get_client() @@ -335,6 +343,7 @@ def validate_bucket_listv2(bucket_name, prefix, delimiter, continuation_token, m @attr(operation='list') @attr(assertion='prefixes in multi-component object names') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_delimiter_prefix(): bucket_name = _create_objects(keys=['asdf', 'boo/bar', 'boo/baz/xyzzy', 'cquux/thud', 'cquux/bla']) @@ -361,7 +370,9 @@ def test_bucket_list_delimiter_prefix(): @attr(operation='list') @attr(assertion='prefixes in multi-component object names') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_listv2_delimiter_prefix(): bucket_name = _create_objects(keys=['asdf', 'boo/bar', 'boo/baz/xyzzy', 'cquux/thud', 'cquux/bla']) @@ -389,6 +400,7 @@ def test_bucket_listv2_delimiter_prefix(): @attr(operation='list') @attr(assertion='prefix and delimiter handling when object ends with delimiter') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_prefix_ends_with_delimiter(): bucket_name = _create_objects(keys=['asdf/']) validate_bucket_listv2(bucket_name, 'asdf/', '/', None, 1000, False, ['asdf/'], [], last=True) @@ -425,6 +437,7 @@ def test_bucket_list_delimiter_alt(): @attr(method='get') @attr(assertion='non-slash delimiter characters') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_alt(): bucket_name = _create_objects(keys=['bar', 'baz', 'cab', 'foo']) client = get_client() @@ -446,6 +459,7 @@ def test_bucket_listv2_delimiter_alt(): @attr(operation='list') @attr(assertion='prefixes starting with underscore') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_delimiter_prefix_underscore(): bucket_name = _create_objects(keys=['_obj1_','_under1/bar', '_under1/baz/xyzzy', '_under2/thud', '_under2/bla']) @@ -471,7 +485,9 @@ def test_bucket_list_delimiter_prefix_underscore(): @attr(operation='list') @attr(assertion='prefixes starting with underscore') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_listv2_delimiter_prefix_underscore(): bucket_name = _create_objects(keys=['_obj1_','_under1/bar', '_under1/baz/xyzzy', '_under2/thud', '_under2/bla']) @@ -516,6 +532,7 @@ def test_bucket_list_delimiter_percentage(): @attr(method='get') @attr(assertion='percentage delimiter characters') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_percentage(): bucket_name = _create_objects(keys=['b%ar', 'b%az', 'c%ab', 'foo']) client = get_client() @@ -554,6 +571,7 @@ def test_bucket_list_delimiter_whitespace(): @attr(method='get') @attr(assertion='whitespace delimiter characters') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_whitespace(): bucket_name = _create_objects(keys=['b ar', 'b az', 'c ab', 'foo']) client = get_client() @@ -592,6 +610,7 @@ def test_bucket_list_delimiter_dot(): @attr(method='get') @attr(assertion='dot delimiter characters') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_dot(): bucket_name = _create_objects(keys=['b.ar', 'b.az', 'c.ab', 'foo']) client = get_client() @@ -628,6 +647,7 @@ def test_bucket_list_delimiter_unreadable(): @attr(method='get') @attr(assertion='non-printable delimiter can be specified') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_unreadable(): key_names=['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -663,6 +683,7 @@ def test_bucket_list_delimiter_empty(): @attr(method='get') @attr(assertion='empty delimiter can be specified') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_empty(): key_names = ['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -699,6 +720,7 @@ def test_bucket_list_delimiter_none(): @attr(method='get') @attr(assertion='unspecified delimiter defaults to none') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_none(): key_names = ['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -714,6 +736,7 @@ def test_bucket_listv2_delimiter_none(): eq(prefixes, []) @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_fetchowner_notempty(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -724,6 +747,7 @@ def test_bucket_listv2_fetchowner_notempty(): eq('Owner' in objs_list[0], True) @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_fetchowner_defaultempty(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -734,6 +758,7 @@ def test_bucket_listv2_fetchowner_defaultempty(): eq('Owner' in objs_list[0], False) @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_fetchowner_empty(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -768,6 +793,7 @@ def test_bucket_list_delimiter_not_exist(): @attr(method='get') @attr(assertion='unused delimiter is not found') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_not_exist(): key_names = ['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -788,6 +814,7 @@ def test_bucket_listv2_delimiter_not_exist(): @attr(operation='list') @attr(assertion='list with delimiter not skip special keys') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_delimiter_not_skip_special(): key_names = ['0/'] + ['0/%s' % i for i in range(1000, 1999)] key_names2 = ['1999', '1999#', '1999+', '2000'] @@ -825,6 +852,7 @@ def test_bucket_list_prefix_basic(): @attr(operation='list under prefix with list-objects-v2') @attr(assertion='returns only objects under prefix') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_basic(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -861,6 +889,7 @@ def test_bucket_list_prefix_alt(): @attr(operation='list under prefix with list-objects-v2') @attr(assertion='prefixes w/o delimiters') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_alt(): key_names = ['bar', 'baz', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -896,6 +925,7 @@ def test_bucket_list_prefix_empty(): @attr(operation='list under prefix with list-objects-v2') @attr(assertion='empty prefix returns everything') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_empty(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -931,6 +961,7 @@ def test_bucket_list_prefix_none(): @attr(operation='list under prefix with list-objects-v2') @attr(assertion='unspecified prefix returns everything') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_none(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -966,6 +997,7 @@ def test_bucket_list_prefix_not_exist(): @attr(operation='list under prefix with list-objects-v2') @attr(assertion='nonexistent prefix returns nothing') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_not_exist(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -1001,6 +1033,7 @@ def test_bucket_list_prefix_unreadable(): @attr(operation='list under prefix with list-objects-v2') @attr(assertion='non-printable prefix can be specified') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_unreadable(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -1037,6 +1070,7 @@ def test_bucket_list_prefix_delimiter_basic(): @attr(operation='list-objects-v2 under prefix w/delimiter') @attr(assertion='returns only objects directly under prefix') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_basic(): key_names = ['foo/bar', 'foo/baz/xyzzy', 'quux/thud', 'asdf'] bucket_name = _create_objects(keys=key_names) @@ -1070,6 +1104,7 @@ def test_bucket_list_prefix_delimiter_alt(): eq(prefixes, ['baza']) @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_alt(): key_names = ['bar', 'bazar', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -1105,6 +1140,7 @@ def test_bucket_list_prefix_delimiter_prefix_not_exist(): @attr(operation='list-objects-v2 under prefix w/delimiter') @attr(assertion='finds nothing w/unmatched prefix') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_prefix_not_exist(): key_names = ['b/a/r', 'b/a/c', 'b/a/g', 'g'] bucket_name = _create_objects(keys=key_names) @@ -1138,6 +1174,7 @@ def test_bucket_list_prefix_delimiter_delimiter_not_exist(): @attr(operation='list-objects-v2 under prefix w/delimiter') @attr(assertion='over-ridden slash ceases to be a delimiter') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_delimiter_not_exist(): key_names = ['b/a/c', 'b/a/g', 'b/a/r', 'g'] bucket_name = _create_objects(keys=key_names) @@ -1171,6 +1208,7 @@ def test_bucket_list_prefix_delimiter_prefix_delimiter_not_exist(): @attr(operation='list-objects-v2 under prefix w/delimiter') @attr(assertion='finds nothing w/unmatched prefix and delimiter') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_prefix_delimiter_not_exist(): key_names = ['b/a/c', 'b/a/g', 'b/a/r', 'g'] bucket_name = _create_objects(keys=key_names) @@ -1188,6 +1226,7 @@ def test_bucket_listv2_prefix_delimiter_prefix_delimiter_not_exist(): @attr(operation='list all keys') @attr(assertion='pagination w/max_keys=1, marker') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_maxkeys_one(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1210,7 +1249,9 @@ def test_bucket_list_maxkeys_one(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='pagination w/max_keys=1, marker') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_listv2_maxkeys_one(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1248,6 +1289,7 @@ def test_bucket_list_maxkeys_zero(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='pagination w/max_keys=0') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_maxkeys_zero(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1279,6 +1321,7 @@ def test_bucket_list_maxkeys_none(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='pagination w/o max_keys') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_maxkeys_none(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1313,6 +1356,7 @@ def parseXmlToJson(xml): @attr(operation='get usage by client') @attr(assertion='account usage api') @attr('fails_on_aws') # allow-unordered is a non-standard extension +@pytest.mark.fails_on_aws def test_account_usage(): # boto3.set_stream_logger(name='botocore') client = get_client() @@ -1336,7 +1380,9 @@ def test_account_usage(): @attr(operation='get usage by client') @attr(assertion='account usage by head bucket') @attr('fails_on_aws') # allow-unordered is a non-standard extension +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_head_bucket_usage(): # boto3.set_stream_logger(name='botocore') client = get_client() @@ -1358,7 +1404,9 @@ def test_head_bucket_usage(): @attr(operation='list all keys') @attr(assertion='bucket list unordered') @attr('fails_on_aws') # allow-unordered is a non-standard extension +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_unordered(): # boto3.set_stream_logger(name='botocore') keys_in = ['ado', 'bot', 'cob', 'dog', 'emu', 'fez', 'gnu', 'hex', @@ -1415,8 +1463,11 @@ def test_bucket_list_unordered(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='bucket list unordered') @attr('fails_on_aws') # allow-unordered is a non-standard extension +@pytest.mark.fails_on_aws @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_listv2_unordered(): # boto3.set_stream_logger(name='botocore') keys_in = ['ado', 'bot', 'cob', 'dog', 'emu', 'fez', 'gnu', 'hex', @@ -1524,6 +1575,7 @@ def test_bucket_list_marker_empty(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='no pagination, empty continuationtoken') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_continuationtoken_empty(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1540,6 +1592,7 @@ def test_bucket_listv2_continuationtoken_empty(): @attr(operation='list keys with list-objects-v2') @attr(assertion='no pagination, non-empty continuationtoken') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_continuationtoken(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1560,7 +1613,9 @@ def test_bucket_listv2_continuationtoken(): @attr(operation='list keys with list-objects-v2') @attr(assertion='no pagination, non-empty continuationtoken and startafter') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_listv2_both_continuationtoken_startafter(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1597,6 +1652,7 @@ def test_bucket_list_marker_unreadable(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='non-printing startafter') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_startafter_unreadable(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1627,6 +1683,7 @@ def test_bucket_list_marker_not_in_list(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='startafter not-in-list') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_startafter_not_in_list(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1657,6 +1714,7 @@ def test_bucket_list_marker_after_list(): @attr(operation='list all keys with list-objects-v2') @attr(assertion='startafter after list') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_startafter_after_list(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1682,6 +1740,7 @@ def _compare_dates(datetime1, datetime2): @attr(operation='compare w/bucket list') @attr(assertion='return same metadata') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_return_data(): key_names = ['bar', 'baz', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -1719,6 +1778,7 @@ def test_bucket_list_return_data(): @attr(assertion='return same metadata') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_list_return_data_versioning(): bucket_name = get_new_bucket() check_configure_versioning_retry(bucket_name, "Enabled", "Enabled") @@ -1772,6 +1832,7 @@ def test_bucket_list_objects_anonymous(): @attr(operation='list all objects (anonymous) with list-objects-v2') @attr(assertion='succeeds') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_objects_anonymous(): bucket_name = get_new_bucket() client = get_client() @@ -1799,6 +1860,7 @@ def test_bucket_list_objects_anonymous_fail(): @attr(operation='list all objects (anonymous) with list-objects-v2') @attr(assertion='fails') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucket_listv2_objects_anonymous_fail(): bucket_name = get_new_bucket() @@ -1828,6 +1890,7 @@ def test_bucket_notexist(): @attr(operation='non-existant bucket with list-objects-v2') @attr(assertion='fails 404') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucketv2_notexist(): bucket_name = get_new_bucket_name() client = get_client() @@ -1961,6 +2024,7 @@ def get_http_response(**kwargs): @attr(operation='read contents that were never written to raise one error response') @attr(assertion='RequestId appears in the error response') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_requestid_matches_header_on_error(): bucket_name = get_new_bucket() client = get_client() @@ -2014,6 +2078,7 @@ def test_multi_object_delete(): @attr(operation='delete multiple objects with list-objects-v2') @attr(assertion='deletes multiple objects with a single call') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_multi_objectv2_delete(): key_names = ['key0', 'key1', 'key2'] bucket_name = _create_objects(keys=key_names) @@ -2208,6 +2273,7 @@ def test_object_set_get_metadata_overwrite_to_empty(): @attr(assertion='UTF-8 values passed through') # TODO: the decoding of this unicode metadata is not happening properly for unknown reasons @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_set_get_unicode_metadata(): bucket_name = get_new_bucket() client = get_client() @@ -3467,6 +3533,7 @@ def test_get_object_ifmodifiedsince_good(): @attr(operation='get w/ If-Modified-Since: after') @attr(assertion='fails 304') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_get_object_ifmodifiedsince_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3492,6 +3559,7 @@ def test_get_object_ifmodifiedsince_failed(): @attr(operation='get w/ If-Unmodified-Since: before') @attr(assertion='fails 412') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_get_object_ifunmodifiedsince_good(): bucket_name = get_new_bucket() client = get_client() @@ -3521,6 +3589,7 @@ def test_get_object_ifunmodifiedsince_failed(): @attr(operation='data re-write w/ If-Match: the latest ETag') @attr(assertion='replaces previous data and metadata') @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_put_object_ifmatch_good(): bucket_name = get_new_bucket() client = get_client() @@ -3546,6 +3615,7 @@ def test_put_object_ifmatch_good(): @attr(operation='get w/ If-Match: bogus ETag') @attr(assertion='fails 412') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_object_ifmatch_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3572,6 +3642,7 @@ def test_put_object_ifmatch_failed(): @attr(operation='overwrite existing object w/ If-Match: *') @attr(assertion='replaces previous data and metadata') @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_put_object_ifmatch_overwrite_existed_good(): bucket_name = get_new_bucket() client = get_client() @@ -3593,7 +3664,9 @@ def test_put_object_ifmatch_overwrite_existed_good(): @attr(operation='overwrite non-existing object w/ If-Match: *') @attr(assertion='fails 412') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_object_ifmatch_nonexisted_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3615,6 +3688,7 @@ def test_put_object_ifmatch_nonexisted_failed(): @attr(operation='overwrite existing object w/ If-None-Match: outdated ETag') @attr(assertion='replaces previous data and metadata') @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_put_object_ifnonmatch_good(): bucket_name = get_new_bucket() client = get_client() @@ -3636,7 +3710,9 @@ def test_put_object_ifnonmatch_good(): @attr(operation='overwrite existing object w/ If-None-Match: the latest ETag') @attr(assertion='fails 412') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_object_ifnonmatch_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3665,6 +3741,7 @@ def test_put_object_ifnonmatch_failed(): @attr(operation='overwrite non-existing object w/ If-None-Match: *') @attr(assertion='succeeds') @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_put_object_ifnonmatch_nonexisted_good(): bucket_name = get_new_bucket() client = get_client() @@ -3682,7 +3759,9 @@ def test_put_object_ifnonmatch_nonexisted_good(): @attr(operation='overwrite existing object w/ If-None-Match: *') @attr(assertion='fails 412') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_object_ifnonmatch_overwrite_existed_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3819,11 +3898,13 @@ def test_bucket_head_notexist(): #eq(error_code, 'NoSuchKey') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr(resource='bucket') @attr(method='head') @attr(operation='read bucket extended information') @attr(assertion='extended information is getting updated') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_head_extended(): bucket_name = get_new_bucket() client = get_client() @@ -4073,6 +4154,7 @@ def check_bad_bucket_name(bucket_name): # AWS does not enforce all documented bucket restrictions. # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html @attr('fails_on_aws') +@pytest.mark.fails_on_aws # Breaks DNS with SubdomainCallingFormat @attr('fails_with_subdomain') @attr(resource='bucket') @@ -4168,6 +4250,7 @@ def _test_bucket_create_naming_good_long(length): @attr(operation='create w/60 byte name') @attr(assertion='fails with subdomain') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_good_long_60(): _test_bucket_create_naming_good_long(60) @@ -4179,6 +4262,7 @@ def test_bucket_create_naming_good_long_60(): @attr(operation='create w/61 byte name') @attr(assertion='fails with subdomain') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_good_long_61(): _test_bucket_create_naming_good_long(61) @@ -4190,6 +4274,7 @@ def test_bucket_create_naming_good_long_61(): @attr(operation='create w/62 byte name') @attr(assertion='fails with subdomain') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_good_long_62(): _test_bucket_create_naming_good_long(62) @@ -4212,6 +4297,7 @@ def test_bucket_create_naming_good_long_63(): @attr(operation='list w/61 byte name') @attr(assertion='fails with subdomain') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_list_long_name(): prefix = get_new_bucket_name() @@ -4230,6 +4316,7 @@ def test_bucket_list_long_name(): # AWS does not enforce all documented bucket restrictions. # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr(resource='bucket') @attr(method='put') @attr(operation='create w/ip address for name') @@ -4243,6 +4330,7 @@ def test_bucket_create_naming_bad_ip(): @attr(operation='create w/underscore in name') @attr(assertion='fails') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_underscore(): invalid_bucketname = 'foo_bar' @@ -4257,6 +4345,7 @@ def test_bucket_create_naming_dns_underscore(): @attr(operation='create w/100 byte name') @attr(assertion='fails with subdomain') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws def test_bucket_create_naming_dns_long(): prefix = get_prefix() assert len(prefix) < 50 @@ -4270,6 +4359,7 @@ def test_bucket_create_naming_dns_long(): @attr(operation='create w/dash at end of name') @attr(assertion='fails') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dash_at_end(): invalid_bucketname = 'foo-' @@ -4285,6 +4375,7 @@ def test_bucket_create_naming_dns_dash_at_end(): @attr(operation='create w/.. in name') @attr(assertion='fails') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dot_dot(): invalid_bucketname = 'foo..bar' @@ -4300,6 +4391,7 @@ def test_bucket_create_naming_dns_dot_dot(): @attr(operation='create w/.- in name') @attr(assertion='fails') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dot_dash(): invalid_bucketname = 'foo.-bar' @@ -4315,6 +4407,7 @@ def test_bucket_create_naming_dns_dot_dash(): @attr(operation='create w/-. in name') @attr(assertion='fails') @attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... +@pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dash_dot(): invalid_bucketname = 'foo-.bar' @@ -4343,6 +4436,7 @@ def test_bucket_create_exists(): @attr(method='get') @attr(operation='get location') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_get_location(): location_constraint = get_main_api_name() if not location_constraint: @@ -4362,6 +4456,7 @@ def test_bucket_get_location(): @attr(operation='re-create by non-owner') @attr(assertion='fails 409') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_create_exists_nonowner(): # Names are shared across a global namespace. As such, no two # users can create a bucket with that same name. @@ -4381,6 +4476,7 @@ def test_bucket_create_exists_nonowner(): @attr(operation='re-create with existing acl') @attr(assertion='fails 409') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_recreate_overwrite_acl(): bucket_name = get_new_bucket_name() client = get_client() @@ -4396,6 +4492,7 @@ def test_bucket_recreate_overwrite_acl(): @attr(operation='re-create with new acl') @attr(assertion='fails 409') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_recreate_new_acl(): bucket_name = get_new_bucket_name() client = get_client() @@ -4472,6 +4569,7 @@ def test_bucket_acl_default(): @attr(operation='public-read acl') @attr(assertion='read back expected defaults') @attr('fails_on_aws') # IllegalLocationConstraintExceptionThe unspecified location constraint is incompatible for the region specific endpoint this request was sent to. +@pytest.mark.fails_on_aws def test_bucket_acl_canned_during_create(): bucket_name = get_new_bucket_name() client = get_client() @@ -4941,6 +5039,7 @@ def test_object_acl_canned_bucketownerfullcontrol(): @attr(operation='set write-acp') @attr(assertion='does not modify owner') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_object_acl_full_control_verify_owner(): bucket_name = get_new_bucket_name() main_client = get_client() @@ -5100,6 +5199,7 @@ def _check_object_acl(permission): @attr(operation='set acl FULL_CONTRO') @attr(assertion='reads back correctly') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} +@pytest.mark.fails_on_aws def test_object_acl(): _check_object_acl('FULL_CONTROL') @@ -5108,6 +5208,7 @@ def test_object_acl(): @attr(operation='set acl WRITE') @attr(assertion='reads back correctly') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} +@pytest.mark.fails_on_aws def test_object_acl_write(): _check_object_acl('WRITE') @@ -5116,6 +5217,7 @@ def test_object_acl_write(): @attr(operation='set acl WRITE_ACP') @attr(assertion='reads back correctly') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} +@pytest.mark.fails_on_aws def test_object_acl_writeacp(): _check_object_acl('WRITE_ACP') @@ -5125,6 +5227,7 @@ def test_object_acl_writeacp(): @attr(operation='set acl READ') @attr(assertion='reads back correctly') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} +@pytest.mark.fails_on_aws def test_object_acl_read(): _check_object_acl('READ') @@ -5134,6 +5237,7 @@ def test_object_acl_read(): @attr(operation='set acl READ_ACP') @attr(assertion='reads back correctly') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} +@pytest.mark.fails_on_aws def test_object_acl_readacp(): _check_object_acl('READ_ACP') @@ -5247,6 +5351,7 @@ def _check_bucket_acl_grant_cant_writeacp(bucket_name): @attr(operation='set acl w/userid FULL_CONTROL') @attr(assertion='can read/write data/acls') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} +@pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_fullcontrol(): bucket_name = _bucket_acl_grant_userid('FULL_CONTROL') @@ -5276,6 +5381,7 @@ def test_bucket_acl_grant_userid_fullcontrol(): @attr(operation='set acl w/userid READ') @attr(assertion='can read data, no other r/w') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_read(): bucket_name = _bucket_acl_grant_userid('READ') @@ -5293,6 +5399,7 @@ def test_bucket_acl_grant_userid_read(): @attr(operation='set acl w/userid READ_ACP') @attr(assertion='can read acl, no other r/w') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_readacp(): bucket_name = _bucket_acl_grant_userid('READ_ACP') @@ -5311,6 +5418,7 @@ def test_bucket_acl_grant_userid_readacp(): @attr(operation='set acl w/userid WRITE') @attr(assertion='can write data, no other r/w') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_write(): bucket_name = _bucket_acl_grant_userid('WRITE') @@ -5328,6 +5436,7 @@ def test_bucket_acl_grant_userid_write(): @attr(operation='set acl w/userid WRITE_ACP') @attr(assertion='can write acls, no other r/w') @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_writeacp(): bucket_name = _bucket_acl_grant_userid('WRITE_ACP') @@ -5422,7 +5531,9 @@ def _get_acl_header(user_id=None, perms=None): @attr(operation='add all grants to user through headers') @attr(assertion='adds all grants individually to second user') @attr('fails_on_dho') +@pytest.mark.fails_on_dho @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_object_header_acl_grants(): bucket_name = get_new_bucket() client = get_client() @@ -5494,7 +5605,9 @@ def test_object_header_acl_grants(): @attr(operation='add all grants to user through headers') @attr(assertion='adds all grants individually to second user') @attr('fails_on_dho') +@pytest.mark.fails_on_dho @attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} +@pytest.mark.fails_on_aws def test_bucket_header_acl_grants(): headers = _get_acl_header() bucket_name = get_new_bucket_name() @@ -5578,6 +5691,7 @@ def test_bucket_header_acl_grants(): @attr(operation='add second FULL_CONTROL user') @attr(assertion='works for S3, fails for DHO') @attr('fails_on_aws') # AmbiguousGrantByEmailAddressThe e-mail address you provided is associated with more than one account. Please retry your request using a different identification method or after resolving the ambiguity. +@pytest.mark.fails_on_aws def test_bucket_acl_grant_email(): bucket_name = get_new_bucket() client = get_client() @@ -5678,6 +5792,7 @@ def test_bucket_acl_revoke_all(): @attr(operation='set/enable/disable logging target') @attr(assertion='operations succeed') @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_logging_toggle(): bucket_name = get_new_bucket() client = get_client() @@ -5771,6 +5886,7 @@ def test_access_bucket_private_object_private(): @attr(operation='set bucket/object acls: private/private with list-objects-v2') @attr(assertion='public has no access to bucket or objects') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_access_bucket_private_objectv2_private(): # all the test_access_* tests follow this template bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='private') @@ -5827,6 +5943,7 @@ def test_access_bucket_private_object_publicread(): @attr(operation='set bucket/object acls: private/public-read with list-objects-v2') @attr(assertion='public can only read readable object') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_access_bucket_private_objectv2_publicread(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='public-read') @@ -5876,6 +5993,7 @@ def test_access_bucket_private_object_publicreadwrite(): @attr(operation='set bucket/object acls: private/public-read/write with list-objects-v2') @attr(assertion='public can only read the readable object') @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_access_bucket_private_objectv2_publicreadwrite(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='public-read-write') alt_client = get_alt_client() @@ -6083,6 +6201,7 @@ def test_buckets_list_ctime(): @attr(operation='list all buckets (anonymous)') @attr(assertion='succeeds') @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_list_buckets_anonymous(): # Get a connection with bad authorization, then change it to be our new Anonymous auth mechanism, # emulating standard HTTP access. @@ -6178,6 +6297,7 @@ def test_bucket_recreate_not_overriding(): @attr(operation='create and list objects with special names') @attr(assertion='special names work') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_create_special_key_names(): key_names = [ ' ', @@ -6228,6 +6348,7 @@ def test_bucket_list_special_prefix(): @attr(operation='copy zero sized object in same bucket') @attr(assertion='works') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_zero_size(): key = 'foo123bar' bucket_name = _create_objects(keys=[key]) @@ -6246,6 +6367,7 @@ def test_object_copy_zero_size(): @attr(operation='copy 16mb object in same bucket') @attr(assertion='works') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_16m(): bucket_name = get_new_bucket() key1 = 'obj1' @@ -6263,6 +6385,7 @@ def test_object_copy_16m(): @attr(operation='copy object in same bucket') @attr(assertion='works') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_same_bucket(): bucket_name = get_new_bucket() client = get_client() @@ -6281,6 +6404,7 @@ def test_object_copy_same_bucket(): @attr(operation='copy object with content-type') @attr(assertion='works') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_verify_contenttype(): bucket_name = get_new_bucket() client = get_client() @@ -6319,6 +6443,7 @@ def test_object_copy_to_itself(): @attr(operation='modify object metadata by copying') @attr(assertion='fails') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_to_itself_with_metadata(): bucket_name = get_new_bucket() client = get_client() @@ -6335,6 +6460,7 @@ def test_object_copy_to_itself_with_metadata(): @attr(operation='copy object from different bucket') @attr(assertion='works') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_diff_bucket(): bucket_name1 = get_new_bucket() bucket_name2 = get_new_bucket() @@ -6400,6 +6526,7 @@ def test_object_copy_not_owned_object_bucket(): @attr(operation='copy object and change acl') @attr(assertion='works') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_canned_acl(): bucket_name = get_new_bucket() client = get_client() @@ -6423,6 +6550,7 @@ def test_object_copy_canned_acl(): @attr(method='put') @attr(operation='copy object and retain metadata') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_retaining_metadata(): for size in [3, 1024 * 1024]: bucket_name = get_new_bucket() @@ -6445,6 +6573,7 @@ def test_object_copy_retaining_metadata(): @attr(method='put') @attr(operation='copy object and replace metadata') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_replacing_metadata(): for size in [3, 1024 * 1024]: bucket_name = get_new_bucket() @@ -6495,6 +6624,7 @@ def test_object_copy_key_not_found(): @attr(assertion='works') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_versioned_bucket(): bucket_name = get_new_bucket() client = get_client() @@ -6564,6 +6694,7 @@ def test_object_copy_versioned_bucket(): @attr(assertion='works') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_versioned_url_encoding(): bucket = get_new_bucket_resource() check_configure_versioning_retry(bucket.name, "Enabled", "Enabled") @@ -6632,6 +6763,7 @@ def _multipart_upload(bucket_name, key, size, part_size=5*1024*1024, client=None @attr(assertion='successful') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_copy_versioning_multipart_upload(): bucket_name = get_new_bucket() client = get_client() @@ -6727,6 +6859,7 @@ def test_multipart_upload_empty(): @attr(method='put') @attr(operation='check multipart uploads with single small part') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_upload_small(): bucket_name = get_new_bucket() client = get_client() @@ -6805,6 +6938,7 @@ def _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name, ver @attr(method='put') @attr(operation='check multipart copies with single small part') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_copy_small(): src_key = 'foo' src_bucket_name = _create_key_with_random_content(src_key) @@ -6848,6 +6982,7 @@ def test_multipart_copy_invalid_range(): @attr(operation='check multipart copy with an improperly formatted range') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/40795 is resolved @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_multipart_copy_improper_range(): client = get_client() src_key = 'source' @@ -6909,6 +7044,7 @@ def test_multipart_copy_without_range(): @attr(method='put') @attr(operation='check multipart copies with single small part') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_copy_special_names(): src_bucket_name = get_new_bucket() @@ -6947,7 +7083,9 @@ def _check_content_using_range(key, bucket_name, data, step): @attr(operation='complete multi-part upload') @attr(assertion='successful') @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_upload(): bucket_name = get_new_bucket() key="mymultipart" @@ -7013,6 +7151,7 @@ def check_configure_versioning_retry(bucket_name, status, expected_string): @attr(operation='check multipart copies of versioned objects') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_copy_versioned(): src_bucket_name = get_new_bucket() dest_bucket_name = get_new_bucket() @@ -7066,6 +7205,7 @@ def _check_upload_multipart_resend(bucket_name, key, objlen, resend_parts): @attr(operation='complete multi-part upload') @attr(assertion='successful') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_upload_resend_part(): bucket_name = get_new_bucket() key="mymultipart" @@ -7109,6 +7249,7 @@ def test_multipart_upload_multiple_sizes(): @attr(assertion='successful') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_copy_multiple_sizes(): src_key = 'foo' src_bucket_name = _create_key_with_random_content(src_key, 12*1024*1024) @@ -7201,6 +7342,7 @@ def _do_test_multipart_upload_contents(bucket_name, key, num_parts): @attr(operation='check contents of multi-part upload') @attr(assertion='successful') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_multipart_upload_contents(): bucket_name = get_new_bucket() _do_test_multipart_upload_contents(bucket_name, 'mymultipart', 3) @@ -7274,6 +7416,7 @@ def test_abort_multipart_upload_not_found(): @attr(operation='concurrent multi-part uploads') @attr(assertion='successful') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_list_multipart_upload(): bucket_name = get_new_bucket() client = get_client() @@ -7309,6 +7452,7 @@ def test_list_multipart_upload(): @attr(operation='list multipart uploads with different owners') @attr(assertion='successful') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_list_multipart_upload_owner(): bucket_name = get_new_bucket() @@ -7644,6 +7788,7 @@ def test_cors_header_option(): @attr(operation='put tags') @attr(assertion='succeeds') @attr('tagging') +@pytest.mark.tagging def test_set_bucket_tagging(): bucket_name = get_new_bucket() client = get_client() @@ -7945,6 +8090,7 @@ def _test_atomic_conditional_write(file_size): @attr(operation='write atomicity') @attr(assertion='1MB successful') @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_atomic_conditional_write_1mb(): _test_atomic_conditional_write(1024*1024) @@ -7986,8 +8132,10 @@ def _test_atomic_dual_conditional_write(file_size): @attr(operation='write one or the other') @attr(assertion='1MB successful') @attr('fails_on_aws') +@pytest.mark.fails_on_aws # TODO: test not passing with SSL, fix this @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_atomic_dual_conditional_write_1mb(): _test_atomic_dual_conditional_write(1024*1024) @@ -7996,8 +8144,10 @@ def test_atomic_dual_conditional_write_1mb(): @attr(operation='write file in deleted bucket') @attr(assertion='fail 404') @attr('fails_on_aws') +@pytest.mark.fails_on_aws # TODO: test not passing with SSL, fix this @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_atomic_write_bucket_gone(): bucket_name = get_new_bucket() client = get_client() @@ -8120,6 +8270,7 @@ def test_multipart_resend_first_finishes_last(): @attr(operation='range') @attr(assertion='returns correct data, 206') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_ranged_request_response_code(): content = 'testcontent' @@ -8142,6 +8293,7 @@ def _generate_random_string(size): @attr(operation='range') @attr(assertion='returns correct data, 206') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_ranged_big_request_response_code(): content = _generate_random_string(8*1024*1024) @@ -8161,6 +8313,7 @@ def test_ranged_big_request_response_code(): @attr(operation='range') @attr(assertion='returns correct data, 206') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_ranged_request_skip_leading_bytes_response_code(): content = 'testcontent' @@ -8180,6 +8333,7 @@ def test_ranged_request_skip_leading_bytes_response_code(): @attr(operation='range') @attr(assertion='returns correct data, 206') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_ranged_request_return_trailing_bytes_response_code(): content = 'testcontent' @@ -8594,6 +8748,7 @@ def test_versioning_obj_create_versions_remove_special_names(): @attr(assertion='everything works') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_versioning_obj_create_overwrite_multipart(): bucket_name = get_new_bucket() client = get_client() @@ -8688,6 +8843,7 @@ def test_versioning_obj_list_marker(): @attr(assertion='everything works') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_versioning_copy_obj_version(): bucket_name = get_new_bucket() client = get_client() @@ -8810,6 +8966,7 @@ def test_versioning_multi_object_delete_with_marker(): @attr(assertion='returns correct marker version id') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_versioning_multi_object_delete_with_marker_create(): bucket_name = get_new_bucket() client = get_client() @@ -8906,6 +9063,7 @@ def test_versioned_object_acl(): @attr(assertion='works') @attr('versioning') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_versioned_object_acl_no_version_specified(): bucket_name = get_new_bucket() client = get_client() @@ -8998,6 +9156,7 @@ def _do_clear_versioned_bucket_concurrent(client, bucket_name): @attr(assertion='works') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/39142 is resolved @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw @attr('versioning') def test_versioned_concurrent_object_create_concurrent_remove(): bucket_name = get_new_bucket() @@ -9060,6 +9219,7 @@ def test_versioned_concurrent_object_create_and_remove(): @attr(method='put') @attr(operation='set lifecycle config') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set(): bucket_name = get_new_bucket() client = get_client() @@ -9073,6 +9233,7 @@ def test_lifecycle_set(): @attr(method='get') @attr(operation='get lifecycle config') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_get(): bucket_name = get_new_bucket() client = get_client() @@ -9087,6 +9248,7 @@ def test_lifecycle_get(): @attr(method='get') @attr(operation='get lifecycle config no id') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_get_no_id(): bucket_name = get_new_bucket() client = get_client() @@ -9121,9 +9283,13 @@ def test_lifecycle_get_no_id(): @attr(method='put') @attr(operation='test lifecycle expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration(): bucket_name = _create_objects(keys=['expire1/foo', 'expire1/bar', 'keep2/foo', 'keep2/bar', 'expire3/foo', 'expire3/bar']) @@ -9158,10 +9324,15 @@ def test_lifecycle_expiration(): @attr(method='put') @attr(operation='test lifecycle expiration with list-objects-v2') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('list-objects-v2') +@pytest.mark.list_objects_v2 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecyclev2_expiration(): bucket_name = _create_objects(keys=['expire1/foo', 'expire1/bar', 'keep2/foo', 'keep2/bar', 'expire3/foo', 'expire3/bar']) @@ -9196,8 +9367,11 @@ def test_lifecyclev2_expiration(): @attr(method='put') @attr(operation='test lifecycle expiration on versioning enabled bucket') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_lifecycle_expiration_versioning_enabled(): bucket_name = get_new_bucket() client = get_client() @@ -9223,8 +9397,11 @@ def test_lifecycle_expiration_versioning_enabled(): @attr(method='put') @attr(operation='test lifecycle expiration with 1 tag') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_lifecycle_expiration_tags1(): bucket_name = get_new_bucket() client = get_client() @@ -9334,9 +9511,13 @@ def setup_lifecycle_tags2(client, bucket_name): @attr(method='put') @attr(operation='test lifecycle expiration with 2 tags') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration_tags2(): bucket_name = get_new_bucket() client = get_client() @@ -9355,9 +9536,13 @@ def test_lifecycle_expiration_tags2(): @attr(method='put') @attr(operation='test lifecycle expiration with versioning and 2 tags') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration_versioned_tags2(): bucket_name = get_new_bucket() client = get_client() @@ -9428,9 +9613,13 @@ def verify_lifecycle_expiration_noncur_tags(client, bucket_name, secs): @attr(method='put') @attr(operation='test lifecycle noncurrent expiration with 1 tag filter') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration_noncur_tags1(): bucket_name = get_new_bucket() client = get_client() @@ -9459,6 +9648,7 @@ def test_lifecycle_expiration_noncur_tags1(): @attr(method='put') @attr(operation='id too long in lifecycle rule') @attr('lifecycle') +@pytest.mark.lifecycle @attr(assertion='fails 400') def test_lifecycle_id_too_long(): bucket_name = get_new_bucket() @@ -9475,6 +9665,7 @@ def test_lifecycle_id_too_long(): @attr(method='put') @attr(operation='same id') @attr('lifecycle') +@pytest.mark.lifecycle @attr(assertion='fails 400') def test_lifecycle_same_id(): bucket_name = get_new_bucket() @@ -9492,6 +9683,7 @@ def test_lifecycle_same_id(): @attr(method='put') @attr(operation='invalid status in lifecycle rule') @attr('lifecycle') +@pytest.mark.lifecycle @attr(assertion='fails 400') def test_lifecycle_invalid_status(): bucket_name = get_new_bucket() @@ -9524,6 +9716,7 @@ def test_lifecycle_invalid_status(): @attr(method='put') @attr(operation='set lifecycle config with expiration date') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set_date(): bucket_name = get_new_bucket() client = get_client() @@ -9537,6 +9730,7 @@ def test_lifecycle_set_date(): @attr(method='put') @attr(operation='set lifecycle config with not iso8601 date') @attr('lifecycle') +@pytest.mark.lifecycle @attr(assertion='fails 400') def test_lifecycle_set_invalid_date(): bucket_name = get_new_bucket() @@ -9552,9 +9746,13 @@ def test_lifecycle_set_invalid_date(): @attr(method='put') @attr(operation='test lifecycle expiration with date') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration_date(): bucket_name = _create_objects(keys=['past/foo', 'future/bar']) client = get_client() @@ -9579,7 +9777,9 @@ def test_lifecycle_expiration_date(): @attr(method='put') @attr(operation='test lifecycle expiration days 0') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration def test_lifecycle_expiration_days0(): bucket_name = _create_objects(keys=['days0/foo', 'days0/bar']) client = get_client() @@ -9633,7 +9833,9 @@ def check_lifecycle_expiration_header(response, start_time, rule_id, @attr(method='put') @attr(operation='test lifecycle expiration header put') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration def test_lifecycle_expiration_header_put(): bucket_name = get_new_bucket() client = get_client() @@ -9647,8 +9849,11 @@ def test_lifecycle_expiration_header_put(): @attr(method='head') @attr(operation='test lifecycle expiration header head') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration_header_head(): bucket_name = get_new_bucket() client = get_client() @@ -9668,8 +9873,11 @@ def test_lifecycle_expiration_header_head(): @attr(method='head') @attr(operation='test lifecycle expiration header head with tags') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_expiration_header_tags_head(): bucket_name = get_new_bucket() client = get_client() @@ -9727,7 +9935,9 @@ def test_lifecycle_expiration_header_tags_head(): @attr(method='head') @attr(operation='test lifecycle expiration header head with tags and And') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration def test_lifecycle_expiration_header_and_tags_head(): now = datetime.datetime.now(None) bucket_name = get_new_bucket() @@ -9775,6 +9985,7 @@ def test_lifecycle_expiration_header_and_tags_head(): @attr(method='put') @attr(operation='set lifecycle config with noncurrent version expiration') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set_noncurrent(): bucket_name = _create_objects(keys=['past/foo', 'future/bar']) client = get_client() @@ -9788,9 +9999,13 @@ def test_lifecycle_set_noncurrent(): @attr(method='put') @attr(operation='test lifecycle non-current version expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_noncur_expiration(): bucket_name = get_new_bucket() client = get_client() @@ -9820,6 +10035,7 @@ def test_lifecycle_noncur_expiration(): @attr(method='put') @attr(operation='set lifecycle config with delete marker expiration') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set_deletemarker(): bucket_name = get_new_bucket() client = get_client() @@ -9832,6 +10048,7 @@ def test_lifecycle_set_deletemarker(): @attr(method='put') @attr(operation='set lifecycle config with Filter') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set_filter(): bucket_name = get_new_bucket() client = get_client() @@ -9844,6 +10061,7 @@ def test_lifecycle_set_filter(): @attr(method='put') @attr(operation='set lifecycle config with empty Filter') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set_empty_filter(): bucket_name = get_new_bucket() client = get_client() @@ -9856,9 +10074,13 @@ def test_lifecycle_set_empty_filter(): @attr(method='put') @attr(operation='test lifecycle delete marker expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_deletemarker_expiration(): bucket_name = get_new_bucket() client = get_client() @@ -9894,6 +10116,7 @@ def test_lifecycle_deletemarker_expiration(): @attr(method='put') @attr(operation='set lifecycle config with multipart expiration') @attr('lifecycle') +@pytest.mark.lifecycle def test_lifecycle_set_multipart(): bucket_name = get_new_bucket() client = get_client() @@ -9911,9 +10134,13 @@ def test_lifecycle_set_multipart(): @attr(method='put') @attr(operation='test lifecycle multipart expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_multipart_expiration(): bucket_name = get_new_bucket() client = get_client() @@ -9949,6 +10176,7 @@ def test_lifecycle_multipart_expiration(): @attr(method='put') @attr(operation='set lifecycle config transition with not iso8601 date') @attr('lifecycle') +@pytest.mark.lifecycle @attr(assertion='fails 400') def test_lifecycle_transition_set_invalid_date(): bucket_name = get_new_bucket() @@ -9990,8 +10218,11 @@ def _test_encryption_sse_customer_write(file_size): @attr(method='put') @attr(operation='test lifecycle transition') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_lifecycle_transition(): sc = configured_storage_classes() if len(sc) < 3: @@ -10038,8 +10269,11 @@ def test_lifecycle_transition(): @attr(method='put') @attr(operation='test lifecycle expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_lifecycle_transition_single_rule_multi_trans(): sc = configured_storage_classes() if len(sc) < 3: @@ -10084,7 +10318,9 @@ def test_lifecycle_transition_single_rule_multi_trans(): @attr(method='put') @attr(operation='set lifecycle config with noncurrent version expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition def test_lifecycle_set_noncurrent_transition(): sc = configured_storage_classes() if len(sc) < 3: @@ -10123,9 +10359,13 @@ def test_lifecycle_set_noncurrent_transition(): @attr(method='put') @attr(operation='test lifecycle non-current version expiration') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws def test_lifecycle_noncur_transition(): sc = configured_storage_classes() if len(sc) < 3: @@ -10204,10 +10444,15 @@ def verify_object(client, bucket, key, content=None, sc=None): @attr(method='put') @attr(operation='test lifecycle transition for cloud') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('cloud_transition') +@pytest.mark.cloud_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_cloud_transition(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: @@ -10289,10 +10534,15 @@ def test_lifecycle_cloud_transition(): @attr(method='put') @attr(operation='test lifecycle transition for cloud') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('cloud_transition') +@pytest.mark.cloud_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_cloud_multiple_transition(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: @@ -10355,11 +10605,17 @@ def test_lifecycle_cloud_multiple_transition(): @attr(method='put') @attr(operation='test lifecycle non-current version expiration on cloud transition') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_expiration') +@pytest.mark.lifecycle_expiration @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('cloud_transition') +@pytest.mark.cloud_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_noncur_cloud_transition(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: @@ -10448,10 +10704,15 @@ def test_lifecycle_noncur_cloud_transition(): @attr(method='put') @attr(operation='test lifecycle transition for cloud') @attr('lifecycle') +@pytest.mark.lifecycle @attr('lifecycle_transition') +@pytest.mark.lifecycle_transition @attr('cloud_transition') +@pytest.mark.cloud_transition @attr('fails_on_aws') +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_lifecycle_cloud_transition_large_obj(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: @@ -10506,7 +10767,9 @@ def test_lifecycle_cloud_transition_large_obj(): @attr(operation='Test SSE-C encrypted transfer 1 byte') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encrypted_transfer_1b(): _test_encryption_sse_customer_write(1) @@ -10516,7 +10779,9 @@ def test_encrypted_transfer_1b(): @attr(operation='Test SSE-C encrypted transfer 1KB') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encrypted_transfer_1kb(): _test_encryption_sse_customer_write(1024) @@ -10526,7 +10791,9 @@ def test_encrypted_transfer_1kb(): @attr(operation='Test SSE-C encrypted transfer 1MB') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encrypted_transfer_1MB(): _test_encryption_sse_customer_write(1024*1024) @@ -10536,13 +10803,16 @@ def test_encrypted_transfer_1MB(): @attr(operation='Test SSE-C encrypted transfer 13 bytes') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encrypted_transfer_13b(): _test_encryption_sse_customer_write(13) @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption def test_encryption_sse_c_method_head(): bucket_name = get_new_bucket() client = get_client() @@ -10572,6 +10842,7 @@ def test_encryption_sse_c_method_head(): @attr(operation='write encrypted with SSE-C and read without SSE-C') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_encryption_sse_c_present(): bucket_name = get_new_bucket() client = get_client() @@ -10596,6 +10867,7 @@ def test_encryption_sse_c_present(): @attr(operation='write encrypted with SSE-C but read with other key') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_encryption_sse_c_other_key(): bucket_name = get_new_bucket() client = get_client() @@ -10627,6 +10899,7 @@ def test_encryption_sse_c_other_key(): @attr(operation='write encrypted with SSE-C, but md5 is bad') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_encryption_sse_c_invalid_md5(): bucket_name = get_new_bucket() client = get_client() @@ -10649,6 +10922,7 @@ def test_encryption_sse_c_invalid_md5(): @attr(operation='write encrypted with SSE-C, but dont provide MD5') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_encryption_sse_c_no_md5(): bucket_name = get_new_bucket() client = get_client() @@ -10668,6 +10942,7 @@ def test_encryption_sse_c_no_md5(): @attr(operation='declare SSE-C but do not provide key') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_encryption_sse_c_no_key(): bucket_name = get_new_bucket() client = get_client() @@ -10686,6 +10961,7 @@ def test_encryption_sse_c_no_key(): @attr(operation='Do not declare SSE-C but provide key and MD5') @attr(assertion='operation successfull, no encryption') @attr('encryption') +@pytest.mark.encryption def test_encryption_key_no_sse_c(): bucket_name = get_new_bucket() client = get_client() @@ -10758,8 +11034,11 @@ def _check_content_using_range_enc(client, bucket_name, key, data, step, enc_hea @attr(operation='complete multi-part upload') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_aws') # allow-unordered is a non-standard extension +@pytest.mark.fails_on_aws @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_upload(): bucket_name = get_new_bucket() client = get_client() @@ -10808,8 +11087,10 @@ def test_encryption_sse_c_multipart_upload(): @attr(operation='multipart upload with bad key for uploading chunks') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption # TODO: remove this fails_on_rgw when I fix it @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_encryption_sse_c_multipart_invalid_chunks_1(): bucket_name = get_new_bucket() client = get_client() @@ -10840,8 +11121,10 @@ def test_encryption_sse_c_multipart_invalid_chunks_1(): @attr(operation='multipart upload with bad md5 for chunks') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption # TODO: remove this fails_on_rgw when I fix it @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_encryption_sse_c_multipart_invalid_chunks_2(): bucket_name = get_new_bucket() client = get_client() @@ -10872,7 +11155,9 @@ def test_encryption_sse_c_multipart_invalid_chunks_2(): @attr(operation='complete multi-part upload and download with bad key') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_bad_download(): bucket_name = get_new_bucket() client = get_client() @@ -10925,7 +11210,9 @@ def test_encryption_sse_c_multipart_bad_download(): @attr(operation='authenticated browser based upload via POST request') @attr(assertion='succeeds and returns written data') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_encryption_sse_c_post_object_authenticated_request(): bucket_name = get_new_bucket() client = get_client() @@ -10980,7 +11267,9 @@ def test_encryption_sse_c_post_object_authenticated_request(): @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'): """ Tests Create a file of A's, use it to set_contents_from_file. @@ -11013,7 +11302,9 @@ def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'): @attr(operation='Test SSE-KMS encrypted does perform head properly') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_method_head(): kms_keyid = get_main_kms_keyid() bucket_name = get_new_bucket() @@ -11044,7 +11335,9 @@ def test_sse_kms_method_head(): @attr(operation='write encrypted with SSE-KMS and read without SSE-KMS') @attr(assertion='operation success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_present(): kms_keyid = get_main_kms_keyid() bucket_name = get_new_bucket() @@ -11069,6 +11362,7 @@ def test_sse_kms_present(): @attr(operation='declare SSE-KMS but do not provide key_id') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_sse_kms_no_key(): bucket_name = get_new_bucket() client = get_client() @@ -11089,6 +11383,7 @@ def test_sse_kms_no_key(): @attr(operation='Do not declare SSE-KMS but provide key_id') @attr(assertion='operation successfull, no encryption') @attr('encryption') +@pytest.mark.encryption def test_sse_kms_not_declared(): bucket_name = get_new_bucket() client = get_client() @@ -11110,7 +11405,9 @@ def test_sse_kms_not_declared(): @attr(operation='complete KMS multi-part upload') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_multipart_upload(): kms_keyid = get_main_kms_keyid() bucket_name = get_new_bucket() @@ -11161,7 +11458,9 @@ def test_sse_kms_multipart_upload(): @attr(operation='multipart KMS upload with bad key_id for uploading chunks') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_multipart_invalid_chunks_1(): kms_keyid = get_main_kms_keyid() kms_keyid2 = get_secondary_kms_keyid() @@ -11192,7 +11491,9 @@ def test_sse_kms_multipart_invalid_chunks_1(): @attr(operation='multipart KMS upload with unexistent key_id for chunks') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_multipart_invalid_chunks_2(): kms_keyid = get_main_kms_keyid() bucket_name = get_new_bucket() @@ -11222,7 +11523,9 @@ def test_sse_kms_multipart_invalid_chunks_2(): @attr(operation='authenticated KMS browser based upload via POST request') @attr(assertion='succeeds and returns written data') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_post_object_authenticated_request(): kms_keyid = get_main_kms_keyid() bucket_name = get_new_bucket() @@ -11272,7 +11575,9 @@ def test_sse_kms_post_object_authenticated_request(): @attr(operation='Test SSE-KMS encrypted transfer 1 byte') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_transfer_1b(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -11285,7 +11590,9 @@ def test_sse_kms_transfer_1b(): @attr(operation='Test SSE-KMS encrypted transfer 1KB') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_transfer_1kb(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -11298,7 +11605,9 @@ def test_sse_kms_transfer_1kb(): @attr(operation='Test SSE-KMS encrypted transfer 1MB') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_transfer_1MB(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -11311,7 +11620,9 @@ def test_sse_kms_transfer_1MB(): @attr(operation='Test SSE-KMS encrypted transfer 13 bytes') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_transfer_13b(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -11324,6 +11635,7 @@ def test_sse_kms_transfer_13b(): @attr(operation='write encrypted with SSE-KMS and read with SSE-KMS') @attr(assertion='operation fails') @attr('encryption') +@pytest.mark.encryption def test_sse_kms_read_declare(): bucket_name = get_new_bucket() client = get_client() @@ -11347,6 +11659,7 @@ def test_sse_kms_read_declare(): @attr(operation='Test Bucket Policy') @attr(assertion='succeeds') @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy(): bucket_name = get_new_bucket() client = get_client() @@ -11376,7 +11689,9 @@ def test_bucket_policy(): eq(len(response['Contents']), 1) @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucketv2_policy(): bucket_name = get_new_bucket() client = get_client() @@ -11410,6 +11725,7 @@ def test_bucketv2_policy(): @attr(operation='Test Bucket Policy and ACL') @attr(assertion='fails') @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy_acl(): bucket_name = get_new_bucket() client = get_client() @@ -11449,7 +11765,9 @@ def test_bucket_policy_acl(): @attr(operation='Test Bucket Policy and ACL with list-objects-v2') @attr(assertion='fails') @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucketv2_policy_acl(): bucket_name = get_new_bucket() client = get_client() @@ -11489,8 +11807,10 @@ def test_bucketv2_policy_acl(): @attr(operation='Test Bucket Policy for a user belonging to a different tenant') @attr(assertion='succeeds') @attr('bucket-policy') +@pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_policy_different_tenant(): bucket_name = get_new_bucket() client = get_client() @@ -11537,9 +11857,12 @@ def test_bucket_policy_different_tenant(): @attr(operation='Test Bucket Policy for a user belonging to a different tenant') @attr(assertion='succeeds') @attr('bucket-policy') +@pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucketv2_policy_different_tenant(): bucket_name = get_new_bucket() client = get_client() @@ -11586,6 +11909,7 @@ def test_bucketv2_policy_different_tenant(): @attr(operation='Test Bucket Policy on another bucket') @attr(assertion='succeeds') @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy_another_bucket(): bucket_name = get_new_bucket() bucket_name2 = get_new_bucket() @@ -11627,7 +11951,9 @@ def test_bucket_policy_another_bucket(): @attr(operation='Test Bucket Policy on another bucket with list-objects-v2') @attr(assertion='succeeds') @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('list-objects-v2') +@pytest.mark.list_objects_v2 def test_bucketv2_policy_another_bucket(): bucket_name = get_new_bucket() bucket_name2 = get_new_bucket() @@ -11668,8 +11994,10 @@ def test_bucketv2_policy_another_bucket(): @attr(method='put') @attr(operation='Test put condition operator end with ifExists') @attr('bucket-policy') +@pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_policy_set_condition_operator_end_with_IfExists(): bucket_name = get_new_bucket() client = get_client() @@ -11743,7 +12071,9 @@ def _make_random_string(size): @attr(operation='Test Get/PutObjTagging output') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_get_obj_tagging(): key = 'testputtags' bucket_name = _create_key_with_random_content(key) @@ -11762,6 +12092,7 @@ def test_get_obj_tagging(): @attr(operation='Test HEAD obj tagging output') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging def test_get_obj_head_tagging(): key = 'testputtags' bucket_name = _create_key_with_random_content(key) @@ -11781,7 +12112,9 @@ def test_get_obj_head_tagging(): @attr(operation='Test Put max allowed tags') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_max_tags(): key = 'testputmaxtags' bucket_name = _create_key_with_random_content(key) @@ -11799,6 +12132,7 @@ def test_put_max_tags(): @attr(operation='Test Put max allowed tags') @attr(assertion='fails') @attr('tagging') +@pytest.mark.tagging def test_put_excess_tags(): key = 'testputmaxtags' bucket_name = _create_key_with_random_content(key) @@ -11818,6 +12152,7 @@ def test_put_excess_tags(): @attr(operation='Test Put max allowed k-v size') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging def test_put_max_kvsize_tags(): key = 'testputmaxkeysize' bucket_name = _create_key_with_random_content(key) @@ -11843,6 +12178,7 @@ def test_put_max_kvsize_tags(): @attr(operation='Test exceed key size') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging def test_put_excess_key_tags(): key = 'testputexcesskeytags' bucket_name = _create_key_with_random_content(key) @@ -11869,6 +12205,7 @@ def test_put_excess_key_tags(): @attr(operation='Test exceed val size') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging def test_put_excess_val_tags(): key = 'testputexcesskeytags' bucket_name = _create_key_with_random_content(key) @@ -11895,7 +12232,9 @@ def test_put_excess_val_tags(): @attr(operation='Test PUT modifies existing tags') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_modify_tags(): key = 'testputmodifytags' bucket_name = _create_key_with_random_content(key) @@ -11929,7 +12268,9 @@ def test_put_modify_tags(): @attr(operation='Test Delete tags') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_delete_tags(): key = 'testputmodifytags' bucket_name = _create_key_with_random_content(key) @@ -11952,8 +12293,10 @@ def test_put_delete_tags(): @attr(method='post') @attr(operation='anonymous browser based upload via POST request') @attr('tagging') +@pytest.mark.tagging @attr(assertion='succeeds and returns written data') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_post_object_tags_anonymous_request(): bucket_name = get_new_bucket_name() client = get_client() @@ -11988,6 +12331,7 @@ def test_post_object_tags_anonymous_request(): @attr(method='post') @attr(operation='authenticated browser based upload via POST request') @attr('tagging') +@pytest.mark.tagging @attr(assertion='succeeds and returns written data') def test_post_object_tags_authenticated_request(): bucket_name = get_new_bucket() @@ -12039,7 +12383,9 @@ def test_post_object_tags_authenticated_request(): @attr(operation='Test PutObj with tagging headers') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_obj_with_tags(): bucket_name = get_new_bucket() client = get_client() @@ -12075,8 +12421,11 @@ def _make_arn_resource(path="*"): @attr(operation='Test GetObjTagging public read') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_get_tags_acl_public(): key = 'testputtagsacl' bucket_name = _create_key_with_random_content(key) @@ -12102,8 +12451,11 @@ def test_get_tags_acl_public(): @attr(operation='Test PutObjTagging public wrote') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_put_tags_acl_public(): key = 'testputtagsacl' bucket_name = _create_key_with_random_content(key) @@ -12128,7 +12480,9 @@ def test_put_tags_acl_public(): @attr(operation='test deleteobjtagging public') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy def test_delete_tags_obj_public(): key = 'testputtagsacl' bucket_name = _create_key_with_random_content(key) @@ -12237,8 +12591,11 @@ def test_versioning_bucket_multipart_upload_return_version_id(): @attr(operation='Test ExistingObjectTag conditional on get object') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_get_obj_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) client = get_client() @@ -12296,8 +12653,11 @@ def test_bucket_policy_get_obj_existing_tag(): @attr(operation='Test ExistingObjectTag conditional on get object tagging') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_get_obj_tagging_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) client = get_client() @@ -12362,8 +12722,11 @@ def test_bucket_policy_get_obj_tagging_existing_tag(): @attr(operation='Test ExistingObjectTag conditional on put object tagging') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_tagging_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) client = get_client() @@ -12435,8 +12798,11 @@ def test_bucket_policy_put_obj_tagging_existing_tag(): @attr(operation='Test copy-source conditional on put obj') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_copy_source(): bucket_name = _create_objects(keys=['public/foo', 'public/bar', 'private/foo']) client = get_client() @@ -12486,8 +12852,11 @@ def test_bucket_policy_put_obj_copy_source(): @attr(operation='Test copy-source conditional on put obj') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_copy_source_meta(): src_bucket_name = _create_objects(keys=['public/foo', 'public/bar']) client = get_client() @@ -12541,7 +12910,9 @@ def test_bucket_policy_put_obj_copy_source_meta(): @attr(operation='Test put obj with canned-acl not to be public') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy_put_obj_acl(): bucket_name = get_new_bucket() client = get_client() @@ -12586,6 +12957,7 @@ def test_bucket_policy_put_obj_acl(): @attr(operation='Test put obj with amz-grant back to bucket-owner') @attr(assertion='success') @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy_put_obj_grant(): bucket_name = get_new_bucket() @@ -12652,6 +13024,7 @@ def test_bucket_policy_put_obj_grant(): @attr(operation='Deny put obj specifying both sse-c and sse-s3') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption def test_put_obj_enc_conflict_c_s3(): bucket_name = get_new_bucket() client = get_v2_client() @@ -12679,6 +13052,7 @@ def test_put_obj_enc_conflict_c_s3(): @attr(operation='Deny put obj specifying both sse-c and sse-kms') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption def test_put_obj_enc_conflict_c_kms(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -12710,6 +13084,7 @@ def test_put_obj_enc_conflict_c_kms(): @attr(operation='Deny put obj specifying sse-s3 with kms key id') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption def test_put_obj_enc_conflict_s3_kms(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -12738,6 +13113,7 @@ def test_put_obj_enc_conflict_s3_kms(): @attr(operation='Deny put obj specifying invalid algorithm' ) @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption def test_put_obj_enc_conflict_bad_enc_kms(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -12765,9 +13141,13 @@ def test_put_obj_enc_conflict_bad_enc_kms(): @attr(operation='Deny put obj requests if not sse-s3: without encryption') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_s3_noenc(): bucket_name = get_new_bucket() client = get_v2_client() @@ -12823,8 +13203,11 @@ def test_bucket_policy_put_obj_s3_noenc(): @attr(operation='Deny put obj requests if not sse-s3: kms') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('sse-s3') +@pytest.mark.sse_s3 def test_bucket_policy_put_obj_s3_kms(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -12873,8 +13256,11 @@ def test_bucket_policy_put_obj_s3_kms(): @attr(operation='Deny put obj requests if not sse-kms: without encryption') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy_put_obj_kms_noenc(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -12924,7 +13310,9 @@ def test_bucket_policy_put_obj_kms_noenc(): @attr(operation='Deny put obj requests if not sse-kms: s3') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-policy') +@pytest.mark.bucket_policy def test_bucket_policy_put_obj_kms_s3(): bucket_name = get_new_bucket() client = get_v2_client() @@ -12969,9 +13357,12 @@ def test_bucket_policy_put_obj_kms_s3(): @attr(operation='put obj with RequestObjectTag') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_bucket_policy_put_obj_request_obj_tag(): bucket_name = get_new_bucket() client = get_client() @@ -13003,8 +13394,11 @@ def test_bucket_policy_put_obj_request_obj_tag(): @attr(operation='Test ExistingObjectTag conditional on get object acl') @attr(assertion='success') @attr('tagging') +@pytest.mark.tagging @attr('bucket-policy') +@pytest.mark.bucket_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_bucket_policy_get_obj_acl_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) client = get_client() @@ -13069,6 +13463,7 @@ def test_bucket_policy_get_obj_acl_existing_tag(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock(): bucket_name = get_new_bucket_name() client = get_client() @@ -13129,6 +13524,7 @@ def test_object_lock_put_obj_lock_invalid_bucket(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_with_days_and_years(): bucket_name = get_new_bucket_name() client = get_client() @@ -13153,6 +13549,7 @@ def test_object_lock_put_obj_lock_with_days_and_years(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_days(): bucket_name = get_new_bucket_name() client = get_client() @@ -13176,6 +13573,7 @@ def test_object_lock_put_obj_lock_invalid_days(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_years(): bucket_name = get_new_bucket_name() client = get_client() @@ -13199,6 +13597,7 @@ def test_object_lock_put_obj_lock_invalid_years(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_mode(): bucket_name = get_new_bucket_name() client = get_client() @@ -13234,6 +13633,7 @@ attr(resource='bucket') @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_status(): bucket_name = get_new_bucket_name() client = get_client() @@ -13257,6 +13657,7 @@ attr(resource='bucket') @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_suspend_versioning(): bucket_name = get_new_bucket_name() client = get_client() @@ -13273,6 +13674,7 @@ def test_object_lock_suspend_versioning(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_get_obj_lock(): bucket_name = get_new_bucket_name() client = get_client() @@ -13312,6 +13714,7 @@ def test_object_lock_get_obj_lock_invalid_bucket(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention(): bucket_name = get_new_bucket_name() client = get_client() @@ -13350,6 +13753,7 @@ def test_object_lock_put_obj_retention_invalid_bucket(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_invalid_mode(): bucket_name = get_new_bucket_name() client = get_client() @@ -13375,6 +13779,7 @@ def test_object_lock_put_obj_retention_invalid_mode(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_get_obj_retention(): bucket_name = get_new_bucket_name() client = get_client() @@ -13395,6 +13800,7 @@ def test_object_lock_get_obj_retention(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_get_obj_retention_iso8601(): bucket_name = get_new_bucket_name() client = get_client() @@ -13435,6 +13841,7 @@ def test_object_lock_get_obj_retention_invalid_bucket(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_versionid(): bucket_name = get_new_bucket_name() client = get_client() @@ -13456,6 +13863,7 @@ def test_object_lock_put_obj_retention_versionid(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_override_default_retention(): bucket_name = get_new_bucket_name() client = get_client() @@ -13486,6 +13894,7 @@ def test_object_lock_put_obj_retention_override_default_retention(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_increase_period(): bucket_name = get_new_bucket_name() client = get_client() @@ -13508,6 +13917,7 @@ def test_object_lock_put_obj_retention_increase_period(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_shorten_period(): bucket_name = get_new_bucket_name() client = get_client() @@ -13531,6 +13941,7 @@ def test_object_lock_put_obj_retention_shorten_period(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_shorten_period_bypass(): bucket_name = get_new_bucket_name() client = get_client() @@ -13553,6 +13964,7 @@ def test_object_lock_put_obj_retention_shorten_period_bypass(): @attr(assertion='retention period make effects') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_retention(): bucket_name = get_new_bucket_name() client = get_client() @@ -13576,6 +13988,7 @@ def test_object_lock_delete_object_with_retention(): @attr(assertion='retention period make effects') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_retention_and_marker(): bucket_name = get_new_bucket_name() client = get_client() @@ -13606,6 +14019,7 @@ def test_object_lock_delete_object_with_retention_and_marker(): @attr(assertion='retention period make effects') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_multi_delete_object_with_retention(): bucket_name = get_new_bucket_name() client = get_client() @@ -13678,6 +14092,7 @@ def test_object_lock_multi_delete_object_with_retention(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_legal_hold(): bucket_name = get_new_bucket_name() client = get_client() @@ -13715,6 +14130,7 @@ def test_object_lock_put_legal_hold_invalid_bucket(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_put_legal_hold_invalid_status(): bucket_name = get_new_bucket_name() client = get_client() @@ -13734,6 +14150,7 @@ def test_object_lock_put_legal_hold_invalid_status(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_get_legal_hold(): bucket_name = get_new_bucket_name() client = get_client() @@ -13773,6 +14190,7 @@ def test_object_lock_get_legal_hold_invalid_bucket(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_legal_hold_on(): bucket_name = get_new_bucket_name() client = get_client() @@ -13793,6 +14211,7 @@ def test_object_lock_delete_object_with_legal_hold_on(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_legal_hold_off(): bucket_name = get_new_bucket_name() client = get_client() @@ -13810,6 +14229,7 @@ def test_object_lock_delete_object_with_legal_hold_off(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_get_obj_metadata(): bucket_name = get_new_bucket_name() client = get_client() @@ -13835,6 +14255,7 @@ def test_object_lock_get_obj_metadata(): @attr(assertion='success') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_uploading_obj(): bucket_name = get_new_bucket_name() client = get_client() @@ -13856,6 +14277,7 @@ def test_object_lock_uploading_obj(): @attr(assertion='succeeds') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_changing_mode_from_governance_with_bypass(): bucket_name = get_new_bucket_name() key = 'file1' @@ -13875,6 +14297,7 @@ def test_object_lock_changing_mode_from_governance_with_bypass(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_changing_mode_from_governance_without_bypass(): bucket_name = get_new_bucket_name() key = 'file1' @@ -13897,6 +14320,7 @@ def test_object_lock_changing_mode_from_governance_without_bypass(): @attr(assertion='fails') @attr('object-lock') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_object_lock_changing_mode_from_compliance(): bucket_name = get_new_bucket_name() key = 'file1' @@ -13918,6 +14342,7 @@ def test_object_lock_changing_mode_from_compliance(): @attr(operation='copy w/ x-amz-copy-source-if-match: the latest ETag') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_copy_object_ifmatch_good(): bucket_name = get_new_bucket() client = get_client() @@ -13934,6 +14359,7 @@ def test_copy_object_ifmatch_good(): @attr(assertion='fails 412') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/40808 is resolved @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_copy_object_ifmatch_failed(): bucket_name = get_new_bucket() client = get_client() @@ -13950,6 +14376,7 @@ def test_copy_object_ifmatch_failed(): @attr(assertion='fails 412') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/40808 is resolved @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_copy_object_ifnonematch_good(): bucket_name = get_new_bucket() client = get_client() @@ -13965,6 +14392,7 @@ def test_copy_object_ifnonematch_good(): @attr(operation='copy w/ x-amz-copy-source-if-none-match: bogus ETag') @attr(assertion='succeeds') @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_copy_object_ifnonematch_failed(): bucket_name = get_new_bucket() client = get_client() @@ -13981,6 +14409,7 @@ def test_copy_object_ifnonematch_failed(): @attr(assertion='fails 400') # TODO: results in a 404 instead of 400 on the RGW @attr('fails_on_rgw') +@pytest.mark.fails_on_rgw def test_object_read_unreadable(): bucket_name = get_new_bucket() client = get_client() @@ -14387,6 +14816,7 @@ def _put_bucket_encryption_kms(client, bucket_name): @attr(operation='put bucket encryption on bucket - s3') @attr(assertion='succeeds') @attr('sse-s3') +@pytest.mark.sse_s3 def test_put_bucket_encryption_s3(): bucket_name = get_new_bucket() client = get_client() @@ -14397,6 +14827,7 @@ def test_put_bucket_encryption_s3(): @attr(operation='put bucket encryption on bucket - kms') @attr(assertion='succeeds') @attr('encryption') +@pytest.mark.encryption def test_put_bucket_encryption_kms(): bucket_name = get_new_bucket() client = get_client() @@ -14408,6 +14839,7 @@ def test_put_bucket_encryption_kms(): @attr(operation='get bucket encryption on bucket - s3') @attr(assertion='succeeds') @attr('sse-s3') +@pytest.mark.sse_s3 def test_get_bucket_encryption_s3(): bucket_name = get_new_bucket() client = get_client() @@ -14432,6 +14864,7 @@ def test_get_bucket_encryption_s3(): @attr(operation='get bucket encryption on bucket - kms') @attr(assertion='succeeds') @attr('encryption') +@pytest.mark.encryption def test_get_bucket_encryption_kms(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -14460,6 +14893,7 @@ def test_get_bucket_encryption_kms(): @attr(operation='delete bucket encryption on bucket - s3') @attr(assertion='succeeds') @attr('sse-s3') +@pytest.mark.sse_s3 def test_delete_bucket_encryption_s3(): bucket_name = get_new_bucket() client = get_client() @@ -14486,6 +14920,7 @@ def test_delete_bucket_encryption_s3(): @attr(operation='delete bucket encryption on bucket - kms') @attr(assertion='succeeds') @attr('encryption') +@pytest.mark.encryption def test_delete_bucket_encryption_kms(): bucket_name = get_new_bucket() client = get_client() @@ -14530,9 +14965,13 @@ def _test_sse_s3_default_upload(file_size): @attr(operation='Test 1 byte upload to SSE-S3 default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_1b(): _test_sse_s3_default_upload(1) @@ -14541,9 +14980,13 @@ def test_sse_s3_default_upload_1b(): @attr(operation='Test 1KB upload to SSE-S3 default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_1kb(): _test_sse_s3_default_upload(1024) @@ -14552,9 +14995,13 @@ def test_sse_s3_default_upload_1kb(): @attr(operation='Test 1MB upload to SSE-S3 default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_1mb(): _test_sse_s3_default_upload(1024*1024) @@ -14563,9 +15010,13 @@ def test_sse_s3_default_upload_1mb(): @attr(operation='Test 8MB upload to SSE-S3 default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_8mb(): _test_sse_s3_default_upload(8*1024*1024) @@ -14598,9 +15049,13 @@ def _test_sse_kms_default_upload(file_size): @attr(operation='Test 1 byte upload to SSE-KMS default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_1b(): _test_sse_kms_default_upload(1) @@ -14609,9 +15064,13 @@ def test_sse_kms_default_upload_1b(): @attr(operation='Test 1KB upload to SSE-KMS default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_1kb(): _test_sse_kms_default_upload(1024) @@ -14620,9 +15079,13 @@ def test_sse_kms_default_upload_1kb(): @attr(operation='Test 1MB upload to SSE-KMS default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_1mb(): _test_sse_kms_default_upload(1024*1024) @@ -14631,9 +15094,13 @@ def test_sse_kms_default_upload_1mb(): @attr(operation='Test 8MB upload to SSE-KMS default-encrypted bucket') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_8mb(): _test_sse_kms_default_upload(8*1024*1024) @@ -14644,9 +15111,13 @@ def test_sse_kms_default_upload_8mb(): @attr(operation='Test head operation on SSE-S3 default-encrypted object') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_method_head(): bucket_name = get_new_bucket() client = get_client() @@ -14673,9 +15144,13 @@ def test_sse_s3_default_method_head(): @attr(operation='complete SSE-S3 multi-part upload') @attr(assertion='successful') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_multipart_upload(): bucket_name = get_new_bucket() client = get_client() @@ -14724,9 +15199,13 @@ def test_sse_s3_default_multipart_upload(): @attr(operation='authenticated SSE-S3 browser based upload via POST request') @attr(assertion='succeeds and returns written data') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') +@pytest.mark.bucket_encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_default_post_object_authenticated_request(): bucket_name = get_new_bucket() client = get_client() @@ -14774,9 +15253,11 @@ def test_sse_s3_default_post_object_authenticated_request(): @attr(operation='authenticated SSE-kMS browser based upload via POST request') @attr(assertion='succeeds and returns written data') @attr('encryption') +@pytest.mark.encryption @attr('bucket-encryption') -@attr('encryption') +@pytest.mark.bucket_encryption @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_kms_default_post_object_authenticated_request(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: @@ -14845,8 +15326,11 @@ def _test_sse_s3_encrypted_upload(file_size): @attr(operation='Test 1 byte upload with SSE-S3 encryption') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_1b(): _test_sse_s3_encrypted_upload(1) @@ -14855,8 +15339,11 @@ def test_sse_s3_encrypted_upload_1b(): @attr(operation='Test 1Kb upload with SSE-S3 encryption') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_1kb(): _test_sse_s3_encrypted_upload(1024) @@ -14865,8 +15352,11 @@ def test_sse_s3_encrypted_upload_1kb(): @attr(operation='Test 1MB upload with SSE-S3 encryption') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_1mb(): _test_sse_s3_encrypted_upload(1024*1024) @@ -14875,7 +15365,10 @@ def test_sse_s3_encrypted_upload_1mb(): @attr(operation='Test 8MB upload with SSE-S3 encryption') @attr(assertion='success') @attr('encryption') +@pytest.mark.encryption @attr('sse-s3') +@pytest.mark.sse_s3 @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_8mb(): _test_sse_s3_encrypted_upload(8*1024*1024) diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 0b5ccda..5fa4f1c 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -1,4 +1,5 @@ import nose +import pytest import random import string import re @@ -79,11 +80,13 @@ def generate_s3select_expression_projection(bucket_name,obj_name): assert( abs(float(res.split("\n")[1]) - eval(e)) < epsilon ) @attr('s3select') +@pytest.mark.s3select def get_random_string(): return uuid.uuid4().hex[:6].upper() @attr('s3select') +@pytest.mark.s3select def test_generate_where_clause(): # create small csv file for testing the random expressions @@ -96,6 +99,7 @@ def test_generate_where_clause(): generate_s3select_where_clause(bucket_name,obj_name) @attr('s3select') +@pytest.mark.s3select def test_generate_projection(): # create small csv file for testing the random expressions @@ -312,6 +316,7 @@ def create_list_of_int(column_pos,obj,field_split=",",row_split="\n"): return list_of_int @attr('s3select') +@pytest.mark.s3select def test_count_operation(): csv_obj_name = get_random_string() bucket_name = "test" @@ -323,6 +328,7 @@ def test_count_operation(): s3select_assert_result( num_of_rows, int( res )) @attr('s3select') +@pytest.mark.s3select def test_column_sum_min_max(): csv_obj = create_random_csv_object(10000,10) @@ -388,6 +394,7 @@ def test_column_sum_min_max(): s3select_assert_result( int(count)*4 , int(sum1)-int(sum2) ) @attr('s3select') +@pytest.mark.s3select def test_nullif_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -443,6 +450,7 @@ def test_nullif_expressions(): s3select_assert_result( res_s3select_nullif, res_s3select) @attr('s3select') +@pytest.mark.s3select def test_nulliftrue_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -470,6 +478,7 @@ def test_nulliftrue_expressions(): s3select_assert_result( res_s3select_nullif, res_s3select) @attr('s3select') +@pytest.mark.s3select def test_is_not_null_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -491,6 +500,7 @@ def test_is_not_null_expressions(): s3select_assert_result( res_s3select_null, res_s3select) @attr('s3select') +@pytest.mark.s3select def test_lowerupper_expressions(): csv_obj = create_random_csv_object(1,10) @@ -508,6 +518,7 @@ def test_lowerupper_expressions(): s3select_assert_result( res_s3select, "AB12CD$$") @attr('s3select') +@pytest.mark.s3select def test_in_expressions(): # purpose of test: engine is process correctly several projections containing aggregation-functions @@ -578,6 +589,7 @@ def test_in_expressions(): s3select_assert_result( res_s3select_in, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_true_false_in_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -623,6 +635,7 @@ def test_true_false_in_expressions(): s3select_assert_result( res_s3select_in, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_like_expressions(): csv_obj = create_random_csv_object_string(1000,10) @@ -710,6 +723,7 @@ def test_like_expressions(): s3select_assert_result( res_s3select_like, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_truefalselike_expressions(): csv_obj = create_random_csv_object_string(1000,10) @@ -755,6 +769,7 @@ def test_truefalselike_expressions(): s3select_assert_result( res_s3select_like, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_nullif_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -782,6 +797,7 @@ def test_nullif_expressions(): nose.tools.assert_equal( res_s3select_nullif, res_s3select) @attr('s3select') +@pytest.mark.s3select def test_lowerupper_expressions(): csv_obj = create_random_csv_object(1,10) @@ -799,6 +815,7 @@ def test_lowerupper_expressions(): nose.tools.assert_equal( res_s3select, "AB12CD$$") @attr('s3select') +@pytest.mark.s3select def test_in_expressions(): # purpose of test: engine is process correctly several projections containing aggregation-functions @@ -839,6 +856,7 @@ def test_in_expressions(): nose.tools.assert_equal( res_s3select_in, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_like_expressions(): csv_obj = create_random_csv_object_string(10000,10) @@ -885,6 +903,7 @@ def test_like_expressions(): @attr('s3select') +@pytest.mark.s3select def test_complex_expressions(): # purpose of test: engine is process correctly several projections containing aggregation-functions @@ -917,6 +936,7 @@ def test_complex_expressions(): s3select_assert_result( res_s3select_between_numbers, res_s3select_eq_modolu) @attr('s3select') +@pytest.mark.s3select def test_alias(): # purpose: test is comparing result of exactly the same queries , one with alias the other without. @@ -938,6 +958,7 @@ def test_alias(): @attr('s3select') +@pytest.mark.s3select def test_alias_cyclic_refernce(): number_of_rows = 10000 @@ -956,6 +977,7 @@ def test_alias_cyclic_refernce(): assert int(find_res) >= 0 @attr('s3select') +@pytest.mark.s3select def test_datetime(): # purpose of test is to validate date-time functionality is correct, @@ -987,6 +1009,7 @@ def test_datetime(): s3select_assert_result( res_s3select_date_time_to_timestamp, res_s3select_substring) @attr('s3select') +@pytest.mark.s3select def test_true_false_datetime(): # purpose of test is to validate date-time functionality is correct, @@ -1021,6 +1044,7 @@ def test_true_false_datetime(): s3select_assert_result( res_s3select_date_time_utcnow, res_s3select_count) @attr('s3select') +@pytest.mark.s3select def test_csv_parser(): # purpuse: test default csv values(, \n " \ ), return value may contain meta-char @@ -1061,6 +1085,7 @@ def test_csv_parser(): s3select_assert_result( res_s3select_alias, 'null') @attr('s3select') +@pytest.mark.s3select def test_csv_definition(): number_of_rows = 10000 @@ -1091,6 +1116,7 @@ def test_csv_definition(): @attr('s3select') +@pytest.mark.s3select def test_schema_definition(): number_of_rows = 10000 @@ -1126,6 +1152,7 @@ def test_schema_definition(): assert ((res_multiple_defintion.find("multiple definition of column {c4} as schema-column and alias")) >= 0) @attr('s3select') +@pytest.mark.s3select def test_when_then_else_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -1155,6 +1182,7 @@ def test_when_then_else_expressions(): s3select_assert_result( str(count3) , res2) @attr('s3select') +@pytest.mark.s3select def test_coalesce_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -1177,6 +1205,7 @@ def test_coalesce_expressions(): @attr('s3select') +@pytest.mark.s3select def test_cast_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -1198,6 +1227,7 @@ def test_cast_expressions(): s3select_assert_result( res_s3select, res) @attr('s3select') +@pytest.mark.s3select def test_version(): return @@ -1216,6 +1246,7 @@ def test_version(): s3select_assert_result( res_version, "41.a," ) @attr('s3select') +@pytest.mark.s3select def test_trim_expressions(): csv_obj = create_random_csv_object_trim(10000,10) @@ -1255,6 +1286,7 @@ def test_trim_expressions(): s3select_assert_result( res_s3select_trim, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_truefalse_trim_expressions(): csv_obj = create_random_csv_object_trim(10000,10) @@ -1294,6 +1326,7 @@ def test_truefalse_trim_expressions(): s3select_assert_result( res_s3select_trim, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_escape_expressions(): csv_obj = create_random_csv_object_escape(10000,10) @@ -1315,6 +1348,7 @@ def test_escape_expressions(): s3select_assert_result( res_s3select_escape, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_case_value_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -1330,6 +1364,7 @@ def test_case_value_expressions(): s3select_assert_result( res_s3select_case, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_bool_cast_expressions(): csv_obj = create_random_csv_object(10000,10) @@ -1345,6 +1380,7 @@ def test_bool_cast_expressions(): s3select_assert_result( res_s3select_cast, res_s3select ) @attr('s3select') +@pytest.mark.s3select def test_progress_expressions(): csv_obj = create_random_csv_object(1000000,10) @@ -1372,6 +1408,7 @@ def test_progress_expressions(): s3select_assert_result({}, res_s3select_response[total_response-1]) @attr('s3select') +@pytest.mark.s3select def test_output_serial_expressions(): return # TODO fix test diff --git a/s3tests_boto3/functional/test_sts.py b/s3tests_boto3/functional/test_sts.py index 16b967c..1f8bbdc 100644 --- a/s3tests_boto3/functional/test_sts.py +++ b/s3tests_boto3/functional/test_sts.py @@ -5,6 +5,7 @@ from botocore.exceptions import ParamValidationError from nose.tools import eq_ as eq from nose.plugins.attrib import attr from nose.plugins.skip import SkipTest +import pytest import isodate import email.utils import datetime @@ -156,7 +157,9 @@ def get_s3_resource_using_iam_creds(): @attr(operation='check') @attr(assertion='s3 ops only accessible by temporary credentials') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_get_session_token(): iam_client=get_iam_client() sts_client=get_sts_client() @@ -190,7 +193,9 @@ def test_get_session_token(): @attr(operation='check') @attr(assertion='s3 ops denied by permanent credentials') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_get_session_token_permanent_creds_denied(): s3bucket_error=None iam_client=get_iam_client() @@ -227,7 +232,9 @@ def test_get_session_token_permanent_creds_denied(): @attr(operation='check') @attr(assertion='role policy allows all s3 ops') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_allow(): iam_client=get_iam_client() sts_client=get_sts_client() @@ -264,7 +271,9 @@ def test_assume_role_allow(): @attr(operation='check') @attr(assertion='role policy denies all s3 ops') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_deny(): s3bucket_error=None iam_client=get_iam_client() @@ -303,7 +312,9 @@ def test_assume_role_deny(): @attr(operation='check') @attr(assertion='creds expire so all s3 ops fails') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_creds_expiry(): iam_client=get_iam_client() sts_client=get_sts_client() @@ -342,7 +353,9 @@ def test_assume_role_creds_expiry(): @attr(operation='check') @attr(assertion='HEAD fails with 403 when role policy denies s3:ListBucket') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_deny_head_nonexistent(): # create a bucket with the normal s3 client bucket_name = get_new_bucket_name() @@ -384,7 +397,9 @@ def test_assume_role_deny_head_nonexistent(): @attr(operation='check') @attr(assertion='HEAD fails with 404 when role policy allows s3:ListBucket') @attr('test_of_sts') +@pytest.mark.test_of_sts @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_allow_head_nonexistent(): # create a bucket with the normal s3 client bucket_name = get_new_bucket_name() @@ -427,8 +442,11 @@ def test_assume_role_allow_head_nonexistent(): @attr(operation='check') @attr(assertion='assuming role through web token') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('token_claims_trust_policy_test') +@pytest.mark.token_claims_trust_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity(): check_webidentity() iam_client=get_iam_client() @@ -481,6 +499,7 @@ def test_assume_role_with_web_identity(): @attr(operation='check') @attr(assertion='assume_role_with_web_token creds expire') @attr('webidentity_test') +@pytest.mark.webidentity_test def test_assume_role_with_web_identity_invalid_webtoken(): resp_error=None iam_client=get_iam_client() @@ -531,8 +550,11 @@ def test_assume_role_with_web_identity_invalid_webtoken(): @attr(operation='check') @attr(assertion='checking session policy working for two different buckets') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_check_on_different_buckets(): check_webidentity() iam_client=get_iam_client() @@ -604,8 +626,11 @@ def test_session_policy_check_on_different_buckets(): @attr(operation='check') @attr(assertion='checking session policy working for same bucket') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_check_on_same_bucket(): check_webidentity() iam_client=get_iam_client() @@ -665,8 +690,11 @@ def test_session_policy_check_on_same_bucket(): @attr(operation='check') @attr(assertion='checking put_obj op denial') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_check_put_obj_denial(): check_webidentity() iam_client=get_iam_client() @@ -731,8 +759,11 @@ def test_session_policy_check_put_obj_denial(): @attr(operation='check') @attr(assertion='checking put_obj working by swapping policies') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_swapping_role_policy_and_session_policy(): check_webidentity() iam_client=get_iam_client() @@ -792,8 +823,11 @@ def test_swapping_role_policy_and_session_policy(): @attr(operation='check') @attr(assertion='checking put_obj working by setting different permissions to role and session policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_check_different_op_permissions(): check_webidentity() iam_client=get_iam_client() @@ -858,8 +892,11 @@ def test_session_policy_check_different_op_permissions(): @attr(operation='check') @attr(assertion='checking op behaviour with deny effect') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_check_with_deny_effect(): check_webidentity() iam_client=get_iam_client() @@ -923,8 +960,11 @@ def test_session_policy_check_with_deny_effect(): @attr(operation='check') @attr(assertion='checking put_obj working with deny and allow on same op') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_check_with_deny_on_same_op(): check_webidentity() iam_client=get_iam_client() @@ -988,8 +1028,11 @@ def test_session_policy_check_with_deny_on_same_op(): @attr(operation='check') @attr(assertion='checking op when bucket policy has role arn') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_bucket_policy_role_arn(): check_webidentity() iam_client=get_iam_client() @@ -1068,8 +1111,11 @@ def test_session_policy_bucket_policy_role_arn(): @attr(operation='check') @attr(assertion='checking op when bucket policy has session arn') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_bucket_policy_session_arn(): check_webidentity() iam_client=get_iam_client() @@ -1146,8 +1192,11 @@ def test_session_policy_bucket_policy_session_arn(): @attr(operation='check') @attr(assertion='checking copy object op with role, session and bucket policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_copy_object(): check_webidentity() iam_client=get_iam_client() @@ -1231,8 +1280,11 @@ def test_session_policy_copy_object(): @attr(operation='check') @attr(assertion='checking op is denied when no role policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_no_bucket_role_policy(): check_webidentity() iam_client=get_iam_client() @@ -1287,8 +1339,11 @@ def test_session_policy_no_bucket_role_policy(): @attr(operation='check') @attr(assertion='checking op is denied when resource policy denies') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('session_policy') +@pytest.mark.session_policy @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_session_policy_bucket_policy_deny(): check_webidentity() iam_client=get_iam_client() @@ -1365,8 +1420,11 @@ def test_session_policy_bucket_policy_deny(): @attr(operation='check') @attr(assertion='assuming role using web token using sub in trust policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('token_claims_trust_policy_test') +@pytest.mark.token_claims_trust_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_sub(): check_webidentity() iam_client=get_iam_client() @@ -1418,8 +1476,11 @@ def test_assume_role_with_web_identity_with_sub(): @attr(operation='check') @attr(assertion='assuming role using web token using azp in trust policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('token_claims_trust_policy_test') +@pytest.mark.token_claims_trust_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_azp(): check_webidentity() iam_client=get_iam_client() @@ -1471,9 +1532,13 @@ def test_assume_role_with_web_identity_with_azp(): @attr(operation='check') @attr(assertion='assuming role using web token using aws:RequestTag in trust policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_request_tag_trust_policy_test') +@pytest.mark.token_request_tag_trust_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_request_tag(): check_webidentity() iam_client=get_iam_client() @@ -1524,9 +1589,13 @@ def test_assume_role_with_web_identity_with_request_tag(): @attr(operation='check') @attr(assertion='assuming role using web token with aws:PrincipalTag in role policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_principal_tag_role_policy_test') +@pytest.mark.token_principal_tag_role_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_principal_tag(): check_webidentity() iam_client=get_iam_client() @@ -1577,9 +1646,13 @@ def test_assume_role_with_web_identity_with_principal_tag(): @attr(operation='check') @attr(assertion='assuming role using web token with aws:PrincipalTag in role policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_principal_tag_role_policy_test') +@pytest.mark.token_principal_tag_role_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_for_all_values(): check_webidentity() iam_client=get_iam_client() @@ -1630,9 +1703,13 @@ def test_assume_role_with_web_identity_for_all_values(): @attr(operation='check') @attr(assertion='assuming role using web token with aws:PrincipalTag in role policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_principal_tag_role_policy_test') +@pytest.mark.token_principal_tag_role_policy_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_for_all_values_deny(): check_webidentity() iam_client=get_iam_client() @@ -1685,9 +1762,13 @@ def test_assume_role_with_web_identity_for_all_values_deny(): @attr(operation='check') @attr(assertion='assuming role using web token with aws:TagKeys in trust policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_tag_keys_test') +@pytest.mark.token_tag_keys_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_tag_keys_trust_policy(): check_webidentity() iam_client=get_iam_client() @@ -1738,9 +1819,13 @@ def test_assume_role_with_web_identity_tag_keys_trust_policy(): @attr(operation='check') @attr(assertion='assuming role using web token with aws:TagKeys in role permission policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_tag_keys_test') +@pytest.mark.token_tag_keys_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_tag_keys_role_policy(): check_webidentity() iam_client=get_iam_client() @@ -1791,9 +1876,13 @@ def test_assume_role_with_web_identity_tag_keys_role_policy(): @attr(operation='check') @attr(assertion='assuming role using web token with s3:ResourceTag in role permission policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_resource_tags_test') +@pytest.mark.token_resource_tags_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag(): check_webidentity() iam_client=get_iam_client() @@ -1854,9 +1943,13 @@ def test_assume_role_with_web_identity_resource_tag(): @attr(operation='check') @attr(assertion='assuming role using web token with s3:ResourceTag with missing tags on bucket') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_resource_tags_test') +@pytest.mark.token_resource_tags_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag_deny(): check_webidentity() iam_client=get_iam_client() @@ -1917,9 +2010,13 @@ def test_assume_role_with_web_identity_resource_tag_deny(): @attr(operation='check') @attr(assertion='assuming role using web token with s3:ResourceTag with wrong resource tag in policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_resource_tags_test') +@pytest.mark.token_resource_tags_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_wrong_resource_tag_deny(): check_webidentity() iam_client=get_iam_client() @@ -1983,9 +2080,13 @@ def test_assume_role_with_web_identity_wrong_resource_tag_deny(): @attr(operation='check') @attr(assertion='assuming role using web token with s3:ResourceTag matching aws:PrincipalTag in role permission policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_resource_tags_test') +@pytest.mark.token_resource_tags_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag_princ_tag(): check_webidentity() iam_client=get_iam_client() @@ -2051,9 +2152,13 @@ def test_assume_role_with_web_identity_resource_tag_princ_tag(): @attr(operation='check') @attr(assertion='assuming role using web token with s3:ResourceTag used to test copy object') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_resource_tags_test') +@pytest.mark.token_resource_tags_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag_copy_obj(): check_webidentity() iam_client=get_iam_client() @@ -2146,9 +2251,13 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): @attr(operation='check') @attr(assertion='assuming role using web token with iam:ResourceTag in role trust policy') @attr('webidentity_test') +@pytest.mark.webidentity_test @attr('abac_test') +@pytest.mark.abac_test @attr('token_role_tags_test') +@pytest.mark.token_role_tags_test @attr('fails_on_dbstore') +@pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_role_resource_tag(): check_webidentity() iam_client=get_iam_client() From c80e9d2118ca5bcdec5285898a532f8e18faa6b6 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Fri, 20 Jan 2023 17:04:30 -0500 Subject: [PATCH 05/12] pytest: replace @nose.with_setup with fixtures Signed-off-by: Casey Bodley --- s3tests/functional/test_headers.py | 59 ++------------------ s3tests/functional/test_s3_website.py | 79 +++++++-------------------- s3tests_boto3/functional/test_s3.py | 24 ++++---- 3 files changed, 37 insertions(+), 125 deletions(-) diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index c9ba342..ecc90ae 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -147,6 +147,10 @@ def _clear_custom_headers(): _custom_headers = {} _remove_headers = [] +@pytest.fixture(autouse=True) +def clear_custom_headers(setup_teardown): + yield + _clear_custom_headers() # clear headers before teardown() def _add_custom_headers(headers=None, remove=None): """ Define header customizations (additions, replacements, removals) @@ -187,7 +191,6 @@ def tag(*tags): @attr(assertion='fails 411') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_contentlength_none(): key = _setup_bad_object(remove=('Content-Length',)) @@ -202,7 +205,6 @@ def test_object_create_bad_contentlength_none(): @attr(method='put') @attr(operation='create w/content length too long') @attr(assertion='fails 400') -@nose.with_setup(teardown=_clear_custom_headers) @attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_above(): @@ -229,7 +231,6 @@ def test_object_create_bad_contentlength_mismatch_above(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_empty(): key = _setup_bad_object({'Authorization': ''}) @@ -245,7 +246,6 @@ def test_object_create_bad_authorization_empty(): @attr(assertion='succeeds') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_date_and_amz_date(): date = formatdate(usegmt=True) key = _setup_bad_object({'Date': date, 'X-Amz-Date': date}) @@ -258,7 +258,6 @@ def test_object_create_date_and_amz_date(): @attr(assertion='succeeds') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_amz_date_and_no_date(): date = formatdate(usegmt=True) key = _setup_bad_object({'X-Amz-Date': date}, ('Date',)) @@ -273,7 +272,6 @@ def test_object_create_amz_date_and_no_date(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_none(): key = _setup_bad_object(remove=('Authorization',)) @@ -290,7 +288,6 @@ def test_object_create_bad_authorization_none(): @attr(assertion='succeeds') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) get_new_bucket() @@ -303,7 +300,6 @@ def test_bucket_create_contentlength_none(): @attr(assertion='succeeds') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_acl_create_contentlength_none(): bucket = get_new_bucket() key = bucket.new_key('foo') @@ -331,7 +327,6 @@ def _create_new_connection(): @attr(method='put') @attr(operation='create w/empty content length') @attr(assertion='fails 400') -@nose.with_setup(teardown=_clear_custom_headers) @attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_empty(): @@ -349,7 +344,6 @@ def test_bucket_create_bad_contentlength_empty(): @attr(assertion='succeeds') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) bucket = get_new_bucket() @@ -362,7 +356,6 @@ def test_bucket_create_bad_contentlength_none(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_authorization_empty(): _add_custom_headers({'Authorization': ''}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) @@ -379,7 +372,6 @@ def test_bucket_create_bad_authorization_empty(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_authorization_none(): _add_custom_headers(remove=('Authorization',)) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) @@ -398,7 +390,6 @@ def test_bucket_create_bad_authorization_none(): @attr(assertion='fails 400') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_contentlength_mismatch_below_aws2(): check_aws2_support() content = 'bar' @@ -417,7 +408,6 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_incorrect_aws2(): check_aws2_support() key = _setup_bad_object({'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='}) @@ -428,7 +418,6 @@ def test_object_create_bad_authorization_incorrect_aws2(): @tag('auth_aws2') -@nose.with_setup(teardown=_clear_custom_headers) @attr(resource='object') @attr(method='put') @attr(operation='create w/invalid authorization') @@ -450,7 +439,6 @@ def test_object_create_bad_authorization_invalid_aws2(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_none_aws2(): check_aws2_support() key = _setup_bad_object(remove=('Date',)) @@ -465,7 +453,6 @@ def test_object_create_bad_date_none_aws2(): @attr(method='put') @attr(operation='create w/invalid authorization') @attr(assertion='fails 400') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_authorization_invalid_aws2(): check_aws2_support() _add_custom_headers({'Authorization': 'AWS HAHAHA'}) @@ -481,7 +468,6 @@ def test_bucket_create_bad_authorization_invalid_aws2(): @attr(assertion='fails 403') @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_none_aws2(): check_aws2_support() _add_custom_headers(remove=('Date',)) @@ -508,7 +494,6 @@ def check_aws2_support(): @attr(method='put') @attr(operation='create w/invalid MD5') @attr(assertion='fails 400') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_md5_invalid_garbage_aws4(): check_aws4_support() key = _setup_bad_object({'Content-MD5':'AWS4 HAHAHA'}) @@ -524,7 +509,6 @@ def test_object_create_bad_md5_invalid_garbage_aws4(): @attr(method='put') @attr(operation='create w/content length too short') @attr(assertion='fails 400') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_contentlength_mismatch_below_aws4(): check_aws4_support() content = 'bar' @@ -542,7 +526,6 @@ def test_object_create_bad_contentlength_mismatch_below_aws4(): @attr(method='put') @attr(operation='create w/incorrect authorization') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_authorization_incorrect_aws4(): check_aws4_support() key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=AKIAIGR7ZNNBHC5BKSUB/20150930/us-east-1/s3/aws4_request,SignedHeaders=host;user-agent,Signature=FWeDfwojDSdS2Ztmpfeubhd9isU='}) @@ -554,7 +537,6 @@ def test_object_create_bad_authorization_incorrect_aws4(): @tag('auth_aws4') -@nose.with_setup(teardown=_clear_custom_headers) @attr(resource='object') @attr(method='put') @attr(operation='create w/invalid authorization') @@ -574,7 +556,6 @@ def test_object_create_bad_authorization_invalid_aws4(): @attr(method='put') @attr(operation='create w/empty user agent') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_ua_empty_aws4(): check_aws4_support() key = _setup_bad_object({'User-Agent': ''}) @@ -590,7 +571,6 @@ def test_object_create_bad_ua_empty_aws4(): @attr(method='put') @attr(operation='create w/no user agent') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_ua_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('User-Agent',)) @@ -606,7 +586,6 @@ def test_object_create_bad_ua_none_aws4(): @attr(method='put') @attr(operation='create w/invalid date') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Bad Date'}) @@ -618,7 +597,6 @@ def test_object_create_bad_date_invalid_aws4(): @attr(method='put') @attr(operation='create w/invalid x-amz-date') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': 'Bad Date'}) @@ -634,7 +612,6 @@ def test_object_create_bad_amz_date_invalid_aws4(): @attr(method='put') @attr(operation='create w/empty date') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_empty_aws4(): check_aws4_support() key = _setup_bad_object({'Date': ''}) @@ -646,7 +623,6 @@ def test_object_create_bad_date_empty_aws4(): @attr(method='put') @attr(operation='create w/empty x-amz-date') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_empty_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': ''}) @@ -662,7 +638,6 @@ def test_object_create_bad_amz_date_empty_aws4(): @attr(method='put') @attr(operation='create w/no date') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('Date',)) @@ -674,7 +649,6 @@ def test_object_create_bad_date_none_aws4(): @attr(method='put') @attr(operation='create w/no x-amz-date') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('X-Amz-Date',)) @@ -690,7 +664,6 @@ def test_object_create_bad_amz_date_none_aws4(): @attr(method='put') @attr(operation='create w/date in past') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_before_today_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'}) @@ -702,7 +675,6 @@ def test_object_create_bad_date_before_today_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date in past') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_before_today_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '20100707T215304Z'}) @@ -718,7 +690,6 @@ def test_object_create_bad_amz_date_before_today_aws4(): @attr(method='put') @attr(operation='create w/date in future') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_after_today_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'}) @@ -730,7 +701,6 @@ def test_object_create_bad_date_after_today_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date in future') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_after_today_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '20300707T215304Z'}) @@ -746,7 +716,6 @@ def test_object_create_bad_amz_date_after_today_aws4(): @attr(method='put') @attr(operation='create w/date before epoch') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_before_epoch_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'}) @@ -758,7 +727,6 @@ def test_object_create_bad_date_before_epoch_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date before epoch') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_before_epoch_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '19500707T215304Z'}) @@ -774,7 +742,6 @@ def test_object_create_bad_amz_date_before_epoch_aws4(): @attr(method='put') @attr(operation='create w/date after 9999') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_date_after_end_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}) @@ -786,7 +753,6 @@ def test_object_create_bad_date_after_end_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date after 9999') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_bad_amz_date_after_end_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '99990707T215304Z'}) @@ -802,7 +768,6 @@ def test_object_create_bad_amz_date_after_end_aws4(): @attr(method='put') @attr(operation='create with missing signed custom header') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_missing_signed_custom_header_aws4(): check_aws4_support() method='PUT' @@ -833,7 +798,6 @@ def test_object_create_missing_signed_custom_header_aws4(): @attr(method='put') @attr(opearation='create with missing signed header') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_object_create_missing_signed_header_aws4(): check_aws4_support() method='PUT' @@ -865,7 +829,6 @@ def test_object_create_missing_signed_header_aws4(): @attr(method='put') @attr(operation='create w/invalid authorization') @attr(assertion='fails 400') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_authorization_invalid_aws4(): check_aws4_support() _add_custom_headers({'Authorization': 'AWS4 HAHAHA'}) @@ -881,7 +844,6 @@ def test_bucket_create_bad_authorization_invalid_aws4(): @attr(method='put') @attr(operation='create w/empty user agent') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_ua_empty_aws4(): check_aws4_support() _add_custom_headers({'User-Agent': ''}) @@ -896,7 +858,6 @@ def test_bucket_create_bad_ua_empty_aws4(): @attr(method='put') @attr(operation='create w/no user agent') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_ua_none_aws4(): check_aws4_support() _add_custom_headers(remove=('User-Agent',)) @@ -912,7 +873,6 @@ def test_bucket_create_bad_ua_none_aws4(): @attr(method='put') @attr(operation='create w/invalid date') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_invalid_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Bad Date'}) @@ -924,7 +884,6 @@ def test_bucket_create_bad_date_invalid_aws4(): @attr(method='put') @attr(operation='create w/invalid x-amz-date') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_amz_date_invalid_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': 'Bad Date'}) @@ -940,7 +899,6 @@ def test_bucket_create_bad_amz_date_invalid_aws4(): @attr(method='put') @attr(operation='create w/empty date') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_empty_aws4(): check_aws4_support() _add_custom_headers({'Date': ''}) @@ -952,7 +910,6 @@ def test_bucket_create_bad_date_empty_aws4(): @attr(method='put') @attr(operation='create w/empty x-amz-date') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_amz_date_empty_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': ''}) @@ -967,7 +924,6 @@ def test_bucket_create_bad_amz_date_empty_aws4(): @attr(method='put') @attr(operation='create w/no date') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_none_aws4(): check_aws4_support() _add_custom_headers(remove=('Date',)) @@ -979,7 +935,6 @@ def test_bucket_create_bad_date_none_aws4(): @attr(method='put') @attr(operation='create w/no x-amz-date') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_amz_date_none_aws4(): check_aws4_support() _add_custom_headers(remove=('X-Amz-Date',)) @@ -995,7 +950,6 @@ def test_bucket_create_bad_amz_date_none_aws4(): @attr(method='put') @attr(operation='create w/date in past') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_before_today_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'}) @@ -1007,7 +961,6 @@ def test_bucket_create_bad_date_before_today_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date in past') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_amz_date_before_today_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '20100707T215304Z'}) @@ -1023,7 +976,6 @@ def test_bucket_create_bad_amz_date_before_today_aws4(): @attr(method='put') @attr(operation='create w/date in future') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_after_today_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'}) @@ -1035,7 +987,6 @@ def test_bucket_create_bad_date_after_today_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date in future') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_amz_date_after_today_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '20300707T215304Z'}) @@ -1051,7 +1002,6 @@ def test_bucket_create_bad_amz_date_after_today_aws4(): @attr(method='put') @attr(operation='create w/date before epoch') @attr(assertion='succeeds') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_date_before_epoch_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'}) @@ -1063,7 +1013,6 @@ def test_bucket_create_bad_date_before_epoch_aws4(): @attr(method='put') @attr(operation='create w/x-amz-date before epoch') @attr(assertion='fails 403') -@nose.with_setup(teardown=_clear_custom_headers) def test_bucket_create_bad_amz_date_before_epoch_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '19500707T215304Z'}) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 79646c0..7d5cd99 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -46,37 +46,27 @@ ERRORDOC_TEMPLATE = '

ErrorDoc

{random}' CAN_WEBSITE = None +@pytest.fixture(autouse=True, scope="module") def check_can_test_website(): - global CAN_WEBSITE - # This is a bit expensive, so we cache this - if CAN_WEBSITE is None: - bucket = get_new_bucket() - try: - wsconf = bucket.get_website_configuration() - CAN_WEBSITE = True - except boto.exception.S3ResponseError as e: - if e.status == 404 and e.reason == 'Not Found' and e.error_code in ['NoSuchWebsiteConfiguration', 'NoSuchKey']: - CAN_WEBSITE = True - elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed': - # rgw_enable_static_website is false - CAN_WEBSITE = False - elif e.status == 403 and e.reason == 'SignatureDoesNotMatch' and e.error_code == 'Forbidden': - # This is older versions that do not support the website code - CAN_WEBSITE = False - elif e.status == 501 and e.error_code == 'NotImplemented': - CAN_WEBSITE = False - else: - raise RuntimeError("Unknown response in checking if WebsiteConf is supported", e) - finally: - bucket.delete() - - if CAN_WEBSITE is True: + bucket = get_new_bucket() + try: + wsconf = bucket.get_website_configuration() return True - elif CAN_WEBSITE is False: - raise SkipTest - else: - raise RuntimeError("Unknown cached response in checking if WebsiteConf is supported") - + except boto.exception.S3ResponseError as e: + if e.status == 404 and e.reason == 'Not Found' and e.error_code in ['NoSuchWebsiteConfiguration', 'NoSuchKey']: + return True + elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed': + # rgw_enable_static_website is false + raise SkipTest + elif e.status == 403 and e.reason == 'SignatureDoesNotMatch' and e.error_code == 'Forbidden': + # This is older versions that do not support the website code + raise SkipTest + elif e.status == 501 and e.error_code == 'NotImplemented': + raise SkipTest + else: + raise RuntimeError("Unknown response in checking if WebsiteConf is supported", e) + finally: + bucket.delete() def make_website_config(xml_fragment): """ @@ -257,7 +247,6 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET', tim @pytest.mark.s3website @attr('fails_on_rgw') @pytest.mark.fails_on_rgw -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_nonexistant_bucket_s3(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') @@ -274,7 +263,6 @@ def test_website_nonexistant_bucket_s3(): @pytest.mark.fails_on_s3 @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_nonexistant_bucket_rgw(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') @@ -290,7 +278,6 @@ def test_website_nonexistant_bucket_rgw(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) @timed(10) def test_website_public_bucket_list_public_index(): bucket = get_new_bucket() @@ -321,7 +308,6 @@ def test_website_public_bucket_list_public_index(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_public_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -354,7 +340,6 @@ def test_website_private_bucket_list_public_index(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -373,7 +358,6 @@ def test_website_private_bucket_list_empty(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -391,7 +375,6 @@ def test_website_public_bucket_list_empty(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -419,7 +402,6 @@ def test_website_public_bucket_list_private_index(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -448,7 +430,6 @@ def test_website_private_bucket_list_private_index(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -467,7 +448,6 @@ def test_website_private_bucket_list_empty_missingerrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -485,7 +465,6 @@ def test_website_public_bucket_list_empty_missingerrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -512,7 +491,6 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -540,7 +518,6 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -571,7 +548,6 @@ def test_website_private_bucket_list_empty_blockederrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_pubilc_errordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -625,7 +601,6 @@ def test_website_public_bucket_list_pubilc_errordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -655,7 +630,6 @@ def test_website_public_bucket_list_empty_blockederrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -691,7 +665,6 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -726,7 +699,6 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): @attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc') @attr('s3website') @pytest.mark.s3website -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_empty_gooderrordoc(): @@ -754,7 +726,6 @@ def test_website_private_bucket_list_empty_gooderrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -781,7 +752,6 @@ def test_website_public_bucket_list_empty_gooderrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -813,7 +783,6 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -846,7 +815,6 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_base(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -866,7 +834,6 @@ def test_website_bucket_private_redirectall_base(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_path(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -888,7 +855,6 @@ def test_website_bucket_private_redirectall_path(): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_path_upgrade(): bucket = get_new_bucket() x = string.Template(WEBSITE_CONFIGS_XMLFRAG['RedirectAll+Protocol']).safe_substitute(RedirectAllRequestsTo_Protocol='https') @@ -914,7 +880,6 @@ def test_website_bucket_private_redirectall_path_upgrade(): @pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_nonwebsite(): bucket = get_new_bucket() #f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -950,7 +915,6 @@ def test_website_xredirect_nonwebsite(): @pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_public_relative(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -981,7 +945,6 @@ def test_website_xredirect_public_relative(): @pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_public_abs(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -1012,7 +975,6 @@ def test_website_xredirect_public_abs(): @pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_private_relative(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -1043,7 +1005,6 @@ def test_website_xredirect_private_relative(): @pytest.mark.s3website_redirect_location @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_private_abs(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -1268,7 +1229,6 @@ def routing_teardown(**kwargs): print('Deleting', str(o)) o.delete() -@common.with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) #@timed(10) def routing_check(*args, **kwargs): bucket = kwargs['bucket'] @@ -1310,7 +1270,6 @@ def routing_check(*args, **kwargs): @pytest.mark.s3website @attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_routing_generator(): for t in ROUTING_RULES_TESTS: if 'xml' in t and 'RoutingRules' in t['xml'] and len(t['xml']['RoutingRules']) > 0: diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index ce96155..665528a 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -6235,30 +6235,34 @@ def test_list_buckets_bad_auth(): eq(status, 403) eq(error_code, 'SignatureDoesNotMatch') +@pytest.fixture +def override_prefix_a(): + nuke_prefixed_buckets(prefix='a'+get_prefix()) + yield + nuke_prefixed_buckets(prefix='a'+get_prefix()) + @attr(resource='bucket') @attr(method='put') @attr(operation='create bucket') @attr(assertion='name starts with alphabetic works') # this test goes outside the user-configure prefix because it needs to # control the initial character of the bucket name -@nose.with_setup( - setup=lambda: nuke_prefixed_buckets(prefix='a'+get_prefix()), - teardown=lambda: nuke_prefixed_buckets(prefix='a'+get_prefix()), - ) -def test_bucket_create_naming_good_starts_alpha(): +def test_bucket_create_naming_good_starts_alpha(override_prefix_a): check_good_bucket_name('foo', _prefix='a'+get_prefix()) +@pytest.fixture +def override_prefix_0(): + nuke_prefixed_buckets(prefix='0'+get_prefix()) + yield + nuke_prefixed_buckets(prefix='0'+get_prefix()) + @attr(resource='bucket') @attr(method='put') @attr(operation='create bucket') @attr(assertion='name starts with numeric works') # this test goes outside the user-configure prefix because it needs to # control the initial character of the bucket name -@nose.with_setup( - setup=lambda: nuke_prefixed_buckets(prefix='0'+get_prefix()), - teardown=lambda: nuke_prefixed_buckets(prefix='0'+get_prefix()), - ) -def test_bucket_create_naming_good_starts_digit(): +def test_bucket_create_naming_good_starts_digit(override_prefix_0): check_good_bucket_name('foo', _prefix='0'+get_prefix()) @attr(resource='bucket') From 7e7e8d5a4237414d7cec891fbb6573215a236597 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Sat, 21 Jan 2023 13:37:40 -0500 Subject: [PATCH 06/12] pytest: replace nose SkipTest with pytest.skip() Signed-off-by: Casey Bodley --- s3tests/functional/test_headers.py | 5 ++-- s3tests/functional/test_s3.py | 9 +++---- s3tests/functional/test_s3_website.py | 8 +++--- s3tests_boto3/functional/test_s3.py | 38 +++++++++++++-------------- s3tests_boto3/functional/test_sts.py | 1 - 5 files changed, 27 insertions(+), 34 deletions(-) diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index ecc90ae..eb3090c 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -21,7 +21,6 @@ from boto.s3.connection import S3Connection from nose.tools import eq_ as eq from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest from .utils import assert_raises @@ -482,11 +481,11 @@ def test_bucket_create_bad_date_none_aws2(): def check_aws4_support(): if 'S3_USE_SIGV4' not in os.environ: - raise SkipTest + pytest.skip('sigv4 tests not enabled by S3_USE_SIGV4') def check_aws2_support(): if 'S3_USE_SIGV4' in os.environ: - raise SkipTest + pytest.skip('sigv2 tests disabled by S3_USE_SIGV4') @tag('auth_aws4') diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index 5fca0ca..b15d6a2 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -30,7 +30,6 @@ from urllib.parse import urlparse from nose.tools import eq_ as eq from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest from . import utils from .utils import assert_raises @@ -432,7 +431,7 @@ def lc_transitions(transitions=None): def test_object_storage_class(): sc = configured_storage_classes() if len(sc) < 2: - raise SkipTest + pytest.skip('requires multiple storage classes') bucket = get_new_bucket() @@ -454,7 +453,7 @@ def test_object_storage_class(): def test_object_storage_class_multipart(): sc = configured_storage_classes() if len(sc) < 2: - raise SkipTest + pytest.skip('requires multiple storage classes') bucket = get_new_bucket() size = 11 * 1024 * 1024 @@ -470,7 +469,7 @@ def test_object_storage_class_multipart(): def _do_test_object_modify_storage_class(obj_write_func, size): sc = configured_storage_classes() if len(sc) < 2: - raise SkipTest + pytest.skip('requires multiple storage classes') bucket = get_new_bucket() @@ -515,7 +514,7 @@ def test_object_modify_storage_class_multipart(): def _do_test_object_storage_class_copy(obj_write_func, size): sc = configured_storage_classes() if len(sc) < 2: - raise SkipTest + pytest.skip('requires multiple storage classes') src_bucket = get_new_bucket() dest_bucket = get_new_bucket() diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 7d5cd99..4d3b110 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -15,7 +15,6 @@ from urllib.parse import urlparse from nose.tools import eq_ as eq, ok_ as ok from nose.plugins.attrib import attr from nose.tools import timed -from nose.plugins.skip import SkipTest from .. import common @@ -56,13 +55,12 @@ def check_can_test_website(): if e.status == 404 and e.reason == 'Not Found' and e.error_code in ['NoSuchWebsiteConfiguration', 'NoSuchKey']: return True elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed': - # rgw_enable_static_website is false - raise SkipTest + pytest.skip('rgw_enable_static_website is false') elif e.status == 403 and e.reason == 'SignatureDoesNotMatch' and e.error_code == 'Forbidden': # This is older versions that do not support the website code - raise SkipTest + pytest.skip('static website is not implemented') elif e.status == 501 and e.error_code == 'NotImplemented': - raise SkipTest + pytest.skip('static website is not implemented') else: raise RuntimeError("Unknown response in checking if WebsiteConf is supported", e) finally: diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 665528a..c3d8148 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -4,7 +4,6 @@ from botocore.exceptions import ClientError from botocore.exceptions import ParamValidationError from nose.tools import eq_ as eq from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest import isodate import email.utils import datetime @@ -4440,7 +4439,7 @@ def test_bucket_create_exists(): def test_bucket_get_location(): location_constraint = get_main_api_name() if not location_constraint: - raise SkipTest + pytest.skip('no api_name configured') bucket_name = get_new_bucket_name() client = get_client() @@ -10230,7 +10229,7 @@ def _test_encryption_sse_customer_write(file_size): def test_lifecycle_transition(): sc = configured_storage_classes() if len(sc) < 3: - raise SkipTest + pytest.skip('requires 3 or more storage classes') bucket_name = _create_objects(keys=['expire1/foo', 'expire1/bar', 'keep2/foo', 'keep2/bar', 'expire3/foo', 'expire3/bar']) @@ -10281,7 +10280,7 @@ def test_lifecycle_transition(): def test_lifecycle_transition_single_rule_multi_trans(): sc = configured_storage_classes() if len(sc) < 3: - raise SkipTest + pytest.skip('requires 3 or more storage classes') bucket_name = _create_objects(keys=['expire1/foo', 'expire1/bar', 'keep2/foo', 'keep2/bar', 'expire3/foo', 'expire3/bar']) @@ -10328,7 +10327,7 @@ def test_lifecycle_transition_single_rule_multi_trans(): def test_lifecycle_set_noncurrent_transition(): sc = configured_storage_classes() if len(sc) < 3: - raise SkipTest + pytest.skip('requires 3 or more storage classes') bucket = get_new_bucket() client = get_client() @@ -10373,7 +10372,7 @@ def test_lifecycle_set_noncurrent_transition(): def test_lifecycle_noncur_transition(): sc = configured_storage_classes() if len(sc) < 3: - raise SkipTest + pytest.skip('requires 3 or more storage classes') bucket = get_new_bucket() client = get_client() @@ -10460,7 +10459,7 @@ def verify_object(client, bucket, key, content=None, sc=None): def test_lifecycle_cloud_transition(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: - raise SkipTest + pytest.skip('no cloud_storage_class configured') retain_head_object = get_cloud_retain_head_object() target_path = get_cloud_target_path() @@ -10550,7 +10549,7 @@ def test_lifecycle_cloud_transition(): def test_lifecycle_cloud_multiple_transition(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: - raise SkipTest + pytest.skip('[s3 cloud] section missing cloud_storage_class') retain_head_object = get_cloud_retain_head_object() target_path = get_cloud_target_path() @@ -10559,7 +10558,7 @@ def test_lifecycle_cloud_multiple_transition(): sc1 = get_cloud_regular_storage_class() if (sc1 == None): - raise SkipTest + pytest.skip('[s3 cloud] section missing storage_class') sc = ['STANDARD', sc1, cloud_sc] @@ -10623,16 +10622,15 @@ def test_lifecycle_cloud_multiple_transition(): def test_lifecycle_noncur_cloud_transition(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: - raise SkipTest + pytest.skip('[s3 cloud] section missing cloud_storage_class') retain_head_object = get_cloud_retain_head_object() target_path = get_cloud_target_path() target_sc = get_cloud_target_storage_class() sc1 = get_cloud_regular_storage_class() - if (sc1 == None): - raise SkipTest + pytest.skip('[s3 cloud] section missing storage_class') sc = ['STANDARD', sc1, cloud_sc] @@ -10720,7 +10718,7 @@ def test_lifecycle_noncur_cloud_transition(): def test_lifecycle_cloud_transition_large_obj(): cloud_sc = get_cloud_storage_class() if cloud_sc == None: - raise SkipTest + pytest.skip('[s3 cloud] section missing cloud_storage_class') retain_head_object = get_cloud_retain_head_object() target_path = get_cloud_target_path() @@ -11585,7 +11583,7 @@ def test_sse_kms_post_object_authenticated_request(): def test_sse_kms_transfer_1b(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') _test_sse_kms_customer_write(1, key_id = kms_keyid) @@ -11600,7 +11598,7 @@ def test_sse_kms_transfer_1b(): def test_sse_kms_transfer_1kb(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') _test_sse_kms_customer_write(1024, key_id = kms_keyid) @@ -11615,7 +11613,7 @@ def test_sse_kms_transfer_1kb(): def test_sse_kms_transfer_1MB(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') _test_sse_kms_customer_write(1024*1024, key_id = kms_keyid) @@ -11630,7 +11628,7 @@ def test_sse_kms_transfer_1MB(): def test_sse_kms_transfer_13b(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') _test_sse_kms_customer_write(13, key_id = kms_keyid) @@ -13268,7 +13266,7 @@ def test_bucket_policy_put_obj_s3_kms(): def test_bucket_policy_put_obj_kms_noenc(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') bucket_name = get_new_bucket() client = get_v2_client() @@ -15032,7 +15030,7 @@ def _test_sse_kms_default_upload(file_size): """ kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') bucket_name = get_new_bucket() client = get_client() _put_bucket_encryption_kms(client, bucket_name) @@ -15265,7 +15263,7 @@ def test_sse_s3_default_post_object_authenticated_request(): def test_sse_kms_default_post_object_authenticated_request(): kms_keyid = get_main_kms_keyid() if kms_keyid is None: - raise SkipTest + pytest.skip('[s3 main] section missing kms_keyid') bucket_name = get_new_bucket() client = get_client() _put_bucket_encryption_kms(client, bucket_name) diff --git a/s3tests_boto3/functional/test_sts.py b/s3tests_boto3/functional/test_sts.py index 1f8bbdc..f926403 100644 --- a/s3tests_boto3/functional/test_sts.py +++ b/s3tests_boto3/functional/test_sts.py @@ -4,7 +4,6 @@ from botocore.exceptions import ClientError from botocore.exceptions import ParamValidationError from nose.tools import eq_ as eq from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest import pytest import isodate import email.utils From f5d0bc9be3a24e91142ad8359b1c64e655aaee48 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Sat, 21 Jan 2023 14:05:46 -0500 Subject: [PATCH 07/12] pytest: replace nose eq() with assert == Signed-off-by: Casey Bodley --- s3tests/functional/test_headers.py | 185 +- s3tests/functional/test_s3.py | 73 +- s3tests/functional/test_s3_website.py | 43 +- s3tests/functional/test_utils.py | 12 +- s3tests/functional/utils.py | 4 +- s3tests_boto3/functional/test_headers.py | 107 +- s3tests_boto3/functional/test_iam.py | 227 ++- s3tests_boto3/functional/test_s3.py | 2251 ++++++++++----------- s3tests_boto3/functional/test_s3select.py | 40 +- s3tests_boto3/functional/test_sts.py | 343 ++-- s3tests_boto3/functional/test_utils.py | 12 +- s3tests_boto3/functional/utils.py | 2 - 12 files changed, 1639 insertions(+), 1660 deletions(-) diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index eb3090c..2ca16c5 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -19,7 +19,6 @@ from urllib.parse import urlparse from boto.s3.connection import S3Connection -from nose.tools import eq_ as eq from nose.plugins.attrib import attr from .utils import assert_raises @@ -194,9 +193,9 @@ def test_object_create_bad_contentlength_none(): key = _setup_bad_object(remove=('Content-Length',)) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 411) - eq(e.reason, 'Length Required') - eq(e.error_code,'MissingContentLength') + assert e.status == 411 + assert e.reason == 'Length Required' + assert e.error_code == 'MissingContentLength' @tag('auth_common') @@ -218,9 +217,9 @@ def test_object_create_bad_contentlength_mismatch_above(): key.should_retry = no_retry e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, content) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'RequestTimeout') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'RequestTimeout' @tag('auth_common') @@ -234,9 +233,9 @@ def test_object_create_bad_authorization_empty(): key = _setup_bad_object({'Authorization': ''}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' @tag('auth_common') @attr(resource='object') @@ -275,9 +274,9 @@ def test_object_create_bad_authorization_none(): key = _setup_bad_object(remove=('Authorization',)) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' @tag('auth_common') @@ -332,8 +331,8 @@ def test_bucket_create_bad_contentlength_empty(): conn = _create_new_connection() _add_custom_headers({'Content-Length': ''}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, conn) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case @tag('auth_common') @@ -358,9 +357,9 @@ def test_bucket_create_bad_contentlength_none(): def test_bucket_create_bad_authorization_empty(): _add_custom_headers({'Authorization': ''}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' # the teardown is really messed up here. check it out @@ -374,9 +373,9 @@ def test_bucket_create_bad_authorization_empty(): def test_bucket_create_bad_authorization_none(): _add_custom_headers(remove=('Authorization',)) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' # # AWS2 specific tests @@ -395,9 +394,9 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): length = len(content) - 1 key = _setup_bad_object({'Content-Length': length}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, content) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'BadDigest') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'BadDigest' @tag('auth_aws2') @@ -411,8 +410,8 @@ def test_object_create_bad_authorization_incorrect_aws2(): check_aws2_support() key = _setup_bad_object({'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch', 'InvalidAccessKeyId') @@ -427,9 +426,9 @@ def test_object_create_bad_authorization_invalid_aws2(): check_aws2_support() key = _setup_bad_object({'Authorization': 'AWS HAHAHA'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'InvalidArgument') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'InvalidArgument' @tag('auth_aws2') @attr(resource='object') @@ -442,9 +441,9 @@ def test_object_create_bad_date_none_aws2(): check_aws2_support() key = _setup_bad_object(remove=('Date',)) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' @tag('auth_aws2') @@ -456,9 +455,9 @@ def test_bucket_create_bad_authorization_invalid_aws2(): check_aws2_support() _add_custom_headers({'Authorization': 'AWS HAHAHA'}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'InvalidArgument') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'InvalidArgument' @tag('auth_aws2') @attr(resource='bucket') @@ -471,9 +470,9 @@ def test_bucket_create_bad_date_none_aws2(): check_aws2_support() _add_custom_headers(remove=('Date',)) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' # # AWS4 specific tests @@ -498,9 +497,9 @@ def test_object_create_bad_md5_invalid_garbage_aws4(): key = _setup_bad_object({'Content-MD5':'AWS4 HAHAHA'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'InvalidDigest') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'InvalidDigest' @tag('auth_aws4') @@ -515,9 +514,9 @@ def test_object_create_bad_contentlength_mismatch_below_aws4(): key = _setup_bad_object({'Content-Length': length}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, content) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'XAmzContentSHA256Mismatch') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'XAmzContentSHA256Mismatch' @tag('auth_aws4') @@ -530,8 +529,8 @@ def test_object_create_bad_authorization_incorrect_aws4(): key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=AKIAIGR7ZNNBHC5BKSUB/20150930/us-east-1/s3/aws4_request,SignedHeaders=host;user-agent,Signature=FWeDfwojDSdS2Ztmpfeubhd9isU='}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch', 'InvalidAccessKeyId') @@ -545,8 +544,8 @@ def test_object_create_bad_authorization_invalid_aws4(): key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=HAHAHA'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case assert e.error_code in ('AuthorizationHeaderMalformed', 'InvalidArgument') @@ -560,9 +559,9 @@ def test_object_create_bad_ua_empty_aws4(): key = _setup_bad_object({'User-Agent': ''}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'SignatureDoesNotMatch') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'SignatureDoesNotMatch' @tag('auth_aws4') @@ -575,9 +574,9 @@ def test_object_create_bad_ua_none_aws4(): key = _setup_bad_object(remove=('User-Agent',)) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'SignatureDoesNotMatch') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'SignatureDoesNotMatch' @tag('auth_aws4') @@ -601,8 +600,8 @@ def test_object_create_bad_amz_date_invalid_aws4(): key = _setup_bad_object({'X-Amz-Date': 'Bad Date'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @@ -627,8 +626,8 @@ def test_object_create_bad_amz_date_empty_aws4(): key = _setup_bad_object({'X-Amz-Date': ''}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @@ -653,8 +652,8 @@ def test_object_create_bad_amz_date_none_aws4(): key = _setup_bad_object(remove=('X-Amz-Date',)) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @@ -679,8 +678,8 @@ def test_object_create_bad_amz_date_before_today_aws4(): key = _setup_bad_object({'X-Amz-Date': '20100707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') @@ -705,8 +704,8 @@ def test_object_create_bad_amz_date_after_today_aws4(): key = _setup_bad_object({'X-Amz-Date': '20300707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') @@ -731,8 +730,8 @@ def test_object_create_bad_amz_date_before_epoch_aws4(): key = _setup_bad_object({'X-Amz-Date': '19500707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @@ -757,8 +756,8 @@ def test_object_create_bad_amz_date_after_end_aws4(): key = _setup_bad_object({'X-Amz-Date': '99990707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar') - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') @@ -788,8 +787,8 @@ def test_object_create_missing_signed_custom_header_aws4(): res =_make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure) - eq(res.status, 403) - eq(res.reason, 'Forbidden') + assert res.status == 403 + assert res.reason == 'Forbidden' @tag('auth_aws4') @@ -819,8 +818,8 @@ def test_object_create_missing_signed_header_aws4(): res =_make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure) - eq(res.status, 403) - eq(res.reason, 'Forbidden') + assert res.status == 403 + assert res.reason == 'Forbidden' @tag('auth_aws4') @@ -833,9 +832,9 @@ def test_bucket_create_bad_authorization_invalid_aws4(): _add_custom_headers({'Authorization': 'AWS4 HAHAHA'}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'InvalidArgument') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'InvalidArgument' @tag('auth_aws4') @@ -848,9 +847,9 @@ def test_bucket_create_bad_ua_empty_aws4(): _add_custom_headers({'User-Agent': ''}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'SignatureDoesNotMatch') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'SignatureDoesNotMatch' @tag('auth_aws4') @attr(resource='bucket') @@ -862,9 +861,9 @@ def test_bucket_create_bad_ua_none_aws4(): _add_custom_headers(remove=('User-Agent',)) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'SignatureDoesNotMatch') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'SignatureDoesNotMatch' @tag('auth_aws4') @@ -888,8 +887,8 @@ def test_bucket_create_bad_amz_date_invalid_aws4(): _add_custom_headers({'X-Amz-Date': 'Bad Date'}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @@ -914,8 +913,8 @@ def test_bucket_create_bad_amz_date_empty_aws4(): _add_custom_headers({'X-Amz-Date': ''}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @tag('auth_aws4') @@ -939,8 +938,8 @@ def test_bucket_create_bad_amz_date_none_aws4(): _add_custom_headers(remove=('X-Amz-Date',)) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @@ -965,8 +964,8 @@ def test_bucket_create_bad_amz_date_before_today_aws4(): _add_custom_headers({'X-Amz-Date': '20100707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') @@ -991,8 +990,8 @@ def test_bucket_create_bad_amz_date_after_today_aws4(): _add_custom_headers({'X-Amz-Date': '20300707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') @@ -1017,6 +1016,6 @@ def test_bucket_create_bad_amz_date_before_epoch_aws4(): _add_custom_headers({'X-Amz-Date': '19500707T215304Z'}) e = assert_raises(boto.exception.S3ResponseError, get_new_bucket) - eq(e.status, 403) - eq(e.reason, 'Forbidden') + assert e.status == 403 + assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index b15d6a2..fc84220 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -28,7 +28,6 @@ import re from collections import defaultdict from urllib.parse import urlparse -from nose.tools import eq_ as eq from nose.plugins.attrib import attr from . import utils @@ -55,9 +54,9 @@ from . import ( def check_access_denied(fn, *args, **kwargs): e = assert_raises(boto.exception.S3ResponseError, fn, *args, **kwargs) - eq(e.status, 403) - eq(e.reason, 'Forbidden') - eq(e.error_code, 'AccessDenied') + assert e.status == 403 + assert e.reason == 'Forbidden' + assert e.error_code == 'AccessDenied' def check_bad_bucket_name(name): """ @@ -65,9 +64,9 @@ def check_bad_bucket_name(name): that the request fails because of an invalid bucket name. """ e = assert_raises(boto.exception.S3ResponseError, get_new_bucket, targets.main.default, name) - eq(e.status, 400) - eq(e.reason.lower(), 'bad request') # some proxies vary the case - eq(e.error_code, 'InvalidBucketName') + assert e.status == 400 + assert e.reason.lower() == 'bad request' # some proxies vary the case + assert e.error_code == 'InvalidBucketName' def _create_keys(bucket=None, keys=[]): """ @@ -108,9 +107,9 @@ def test_bucket_create_naming_bad_punctuation(): def check_versioning(bucket, status): try: - eq(bucket.get_versioning_status()['Versioning'], status) + assert bucket.get_versioning_status()['Versioning'] == status except KeyError: - eq(status, None) + assert status == None # amazon is eventual consistent, retry a bit if failed def check_configure_versioning_retry(bucket, status, expected_string): @@ -129,7 +128,7 @@ def check_configure_versioning_retry(bucket, status, expected_string): time.sleep(1) - eq(expected_string, read_status) + assert expected_string == read_status @attr(resource='object') @attr(method='create') @@ -152,7 +151,7 @@ def test_versioning_obj_read_not_exist_null(): key.set_contents_from_string(content) key = bucket.get_key(objname, version_id='null') - eq(key, None) + assert key == None @attr(resource='object') @attr(method='put') @@ -177,11 +176,11 @@ def test_append_object(): res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path1, body='abc', secure=s3.main.is_secure) path2 = path + '&append&position=3' res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path2, body='abc', secure=s3.main.is_secure) - eq(res.status, 200) - eq(res.reason, 'OK') + assert res.status == 200 + assert res.reason == 'OK' key = bucket.get_key('foo') - eq(key.size, 6) + assert key.size == 6 @attr(resource='object') @attr(method='put') @@ -205,7 +204,7 @@ def test_append_normal_object(): path = o.path + '?' + o.query path = path + '&append&position=3' res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path, body='abc', secure=s3.main.is_secure) - eq(res.status, 409) + assert res.status == 409 @attr(resource='object') @@ -231,8 +230,8 @@ def test_append_object_position_wrong(): res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path1, body='abc', secure=s3.main.is_secure) path2 = path + '&append&position=9' res = _make_raw_request(host=s3.main.host, port=s3.main.port, method='PUT', path=path2, body='abc', secure=s3.main.is_secure) - eq(res.status, 409) - eq(int(res.getheader('x-rgw-next-append-position')), 3) + assert res.status == 409 + assert int(res.getheader('x-rgw-next-append-position')) == 3 # TODO rgw log_bucket.set_as_logging_target() gives 403 Forbidden @@ -329,13 +328,13 @@ def gen_rand_string(size, chars=string.ascii_uppercase + string.digits): def verify_object(bucket, k, data=None, storage_class=None): if storage_class: - eq(k.storage_class, storage_class) + assert k.storage_class == storage_class if data: read_data = k.get_contents_as_string() equal = data == read_data # avoid spamming log if data not equal - eq(equal, True) + assert equal == True def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storage_class): query_args=None @@ -351,7 +350,7 @@ def copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, storag res = dest_bucket.connection.make_request('PUT', dest_bucket.name, dest_key.name, query_args=query_args, headers=headers) - eq(res.status, 200) + assert res.status == 200 def _populate_multipart_key(bucket, kname, size, storage_class=None): (upload, data) = _multipart_upload(bucket, kname, size, storage_class=storage_class) @@ -463,8 +462,8 @@ def test_object_storage_class_multipart(): (upload, data) = _multipart_upload(bucket, key, size, storage_class=storage_class) upload.complete_upload() key2 = bucket.get_key(key) - eq(key2.size, size) - eq(key2.storage_class, storage_class) + assert key2.size == size + assert key2.storage_class == storage_class def _do_test_object_modify_storage_class(obj_write_func, size): sc = configured_storage_classes() @@ -609,7 +608,7 @@ class FakeFileVerifier(object): if self.char == None: self.char = data[0] self.size += size - eq(data.decode(), self.char*size) + assert data.decode() == self.char*size def _verify_atomic_key_data(key, size=-1, char=None): """ @@ -618,7 +617,7 @@ def _verify_atomic_key_data(key, size=-1, char=None): fp_verify = FakeFileVerifier(char) key.get_contents_to_file(fp_verify) if size >= 0: - eq(fp_verify.size, size) + assert fp_verify.size == size def _test_atomic_dual_conditional_write(file_size): """ @@ -647,9 +646,9 @@ def _test_atomic_dual_conditional_write(file_size): # key.set_contents_from_file(fp_c, headers={'If-Match': etag_fp_a}) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_file, fp_c, headers={'If-Match': etag_fp_a}) - eq(e.status, 412) - eq(e.reason, 'Precondition Failed') - eq(e.error_code, 'PreconditionFailed') + assert e.status == 412 + assert e.reason == 'Precondition Failed' + assert e.error_code == 'PreconditionFailed' # verify the file _verify_atomic_key_data(key, file_size, 'B') @@ -684,9 +683,9 @@ def test_atomic_write_bucket_gone(): key = bucket.new_key('foo') fp_a = FakeWriteFile(1024*1024, 'A', remove_bucket) e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_file, fp_a) - eq(e.status, 404) - eq(e.reason, 'Not Found') - eq(e.error_code, 'NoSuchBucket') + assert e.status == 404 + assert e.reason == 'Not Found' + assert e.error_code == 'NoSuchBucket' def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024, do_list=None, init_headers=None, part_headers=None, @@ -740,7 +739,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_1(): _multipart_upload_enc, bucket, key, objlen, init_headers=init_headers, part_headers=part_headers, metadata={'foo': 'bar'}) - eq(e.status, 400) + assert e.status == 400 @attr(resource='object') @attr(method='put') @@ -770,7 +769,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_2(): _multipart_upload_enc, bucket, key, objlen, init_headers=init_headers, part_headers=part_headers, metadata={'foo': 'bar'}) - eq(e.status, 400) + assert e.status == 400 @attr(resource='bucket') @attr(method='get') @@ -841,18 +840,18 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists(): } ] }''' % bucket.name - eq(bucket.set_policy(policy), True) + assert bucket.set_policy(policy) == True res = _make_request('GET', bucket.name, bucket.get_key("foo"), request_headers={'referer': 'http://www.example.com/'}) - eq(res.status, 200) + assert res.status == 200 res = _make_request('GET', bucket.name, bucket.get_key("foo"), request_headers={'referer': 'http://www.example.com/index.html'}) - eq(res.status, 200) + assert res.status == 200 res = _make_request('GET', bucket.name, bucket.get_key("foo")) - eq(res.status, 200) + assert res.status == 200 res = _make_request('GET', bucket.name, bucket.get_key("foo"), request_headers={'referer': 'http://example.com'}) - eq(res.status, 403) + assert res.status == 403 def _make_arn_resource(path="*"): return "arn:aws:s3:::{}".format(path) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 4d3b110..76eb60c 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -12,7 +12,6 @@ import socket from urllib.parse import urlparse -from nose.tools import eq_ as eq, ok_ as ok from nose.plugins.attrib import attr from nose.tools import timed @@ -160,7 +159,7 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail= # Cleanup for our validation common.assert_xml_equal(config_xmlcmp, config_xmlnew) #print("config_xmlcmp\n", config_xmlcmp) - #eq (config_xmlnew, config_xmlcmp) + #assert config_xmlnew == config_xmlcmp f['WebsiteConfiguration'] = config_xmlcmp return f @@ -171,9 +170,9 @@ def __website_expected_reponse_status(res, status, reason): reason = set([reason]) if status is not IGNORE_FIELD: - ok(res.status in status, 'HTTP code was %s should be %s' % (res.status, status)) + assert res.status in status, 'HTTP code was %s should be %s' % (res.status, status) if reason is not IGNORE_FIELD: - ok(res.reason in reason, 'HTTP reason was was %s should be %s' % (res.reason, reason)) + assert res.reason in reason, 'HTTP reason was was %s should be %s' % (res.reason, reason) def _website_expected_default_html(**kwargs): fields = [] @@ -203,22 +202,22 @@ def _website_expected_error_response(res, bucket_name, status, reason, code, con errorcode = res.getheader('x-amz-error-code', None) if errorcode is not None: if code is not IGNORE_FIELD: - eq(errorcode, code) + assert errorcode == code if not isinstance(content, collections.Container): content = set([content]) for f in content: if f is not IGNORE_FIELD and f is not None: f = bytes(f, 'utf-8') - ok(f in body, 'HTML should contain "%s"' % (f, )) + assert f in body, 'HTML should contain "%s"' % (f, ) def _website_expected_redirect_response(res, status, reason, new_url): body = res.read() print(body) __website_expected_reponse_status(res, status, reason) loc = res.getheader('Location', None) - eq(loc, new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,)) - ok(len(body) == 0, 'Body of a redirect should be empty') + assert loc == new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,) + assert len(body) == 0, 'Body of a redirect should be empty' def _website_request(bucket_name, path, connect_hostname=None, method='GET', timeout=None): url = get_website_url(proto='http', bucket=bucket_name, path=path) @@ -293,7 +292,7 @@ def test_website_public_bucket_list_public_index(): body = res.read() print(body) indexstring = bytes(indexstring, 'utf-8') - eq(body, indexstring) # default content should match index.html set content + assert body == indexstring # default content should match index.html set content __website_expected_reponse_status(res, 200, 'OK') indexhtml.delete() bucket.delete() @@ -324,7 +323,7 @@ def test_website_private_bucket_list_public_index(): body = res.read() print(body) indexstring = bytes(indexstring, 'utf-8') - eq(body, indexstring, 'default content should match index.html set content') + assert body == indexstring, 'default content should match index.html set content' indexhtml.delete() bucket.delete() @@ -533,7 +532,7 @@ def test_website_private_bucket_list_empty_blockederrordoc(): print(body) _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body) errorstring = bytes(errorstring, 'utf-8') - ok(errorstring not in body, 'error content should NOT match error.html set content') + assert errorstring not in body, 'error content should NOT match error.html set content' errorhtml.delete() bucket.delete() @@ -586,7 +585,7 @@ def test_website_public_bucket_list_pubilc_errordoc(): except socket.timeout: print('no invalid payload') - ok(resp_len == 0, 'invalid payload') + assert resp_len == 0, 'invalid payload' errorhtml.delete() bucket.delete() @@ -615,7 +614,7 @@ def test_website_public_bucket_list_empty_blockederrordoc(): print(body) _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'), body=body) errorstring = bytes(errorstring, 'utf-8') - ok(errorstring not in body, 'error content should match error.html set content') + assert errorstring not in body, 'error content should match error.html set content' errorhtml.delete() bucket.delete() @@ -649,7 +648,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): print(body) _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body) errorstring = bytes(errorstring, 'utf-8') - ok(errorstring not in body, 'error content should match error.html set content') + assert errorstring not in body, 'error content should match error.html set content' indexhtml.delete() errorhtml.delete() @@ -684,7 +683,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): print(body) _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body) errorstring = bytes(errorstring, 'utf-8') - ok(errorstring not in body, 'error content should match error.html set content') + assert errorstring not in body, 'error content should match error.html set content' indexhtml.delete() errorhtml.delete() @@ -889,7 +888,7 @@ def test_website_xredirect_nonwebsite(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='public-read') redirect = k.get_redirect() - eq(k.get_redirect(), redirect_dest) + assert k.get_redirect() == redirect_dest res = _website_request(bucket.name, '/page') body = res.read() @@ -924,7 +923,7 @@ def test_website_xredirect_public_relative(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='public-read') redirect = k.get_redirect() - eq(k.get_redirect(), redirect_dest) + assert k.get_redirect() == redirect_dest res = _website_request(bucket.name, '/page') #new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest) @@ -954,7 +953,7 @@ def test_website_xredirect_public_abs(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='public-read') redirect = k.get_redirect() - eq(k.get_redirect(), redirect_dest) + assert k.get_redirect() == redirect_dest res = _website_request(bucket.name, '/page') new_url = get_website_url(proto='http', hostname='example.com', path='/foo') @@ -984,7 +983,7 @@ def test_website_xredirect_private_relative(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='private') redirect = k.get_redirect() - eq(k.get_redirect(), redirect_dest) + assert k.get_redirect() == redirect_dest res = _website_request(bucket.name, '/page') # We get a 403 because the page is private @@ -1014,7 +1013,7 @@ def test_website_xredirect_private_abs(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='private') redirect = k.get_redirect() - eq(k.get_redirect(), redirect_dest) + assert k.get_redirect() == redirect_dest res = _website_request(bucket.name, '/page') new_url = get_website_url(proto='http', hostname='example.com', path='/foo') @@ -1253,8 +1252,8 @@ def routing_check(*args, **kwargs): if args['code'] >= 200 and args['code'] < 300: #body = res.read() #print(body) - #eq(body, args['content'], 'default content should match index.html set content') - ok(int(res.getheader('Content-Length', -1)) > 0) + #assert body == args['content'], 'default content should match index.html set content' + assert int(res.getheader('Content-Length', -1)) > 0 elif args['code'] >= 300 and args['code'] < 400: _website_expected_redirect_response(res, args['code'], IGNORE_FIELD, new_url) elif args['code'] >= 400: diff --git a/s3tests/functional/test_utils.py b/s3tests/functional/test_utils.py index 59c3c74..c0dd398 100644 --- a/s3tests/functional/test_utils.py +++ b/s3tests/functional/test_utils.py @@ -1,11 +1,9 @@ -from nose.tools import eq_ as eq - from . import utils def test_generate(): FIVE_MB = 5 * 1024 * 1024 - eq(len(''.join(utils.generate_random(0))), 0) - eq(len(''.join(utils.generate_random(1))), 1) - eq(len(''.join(utils.generate_random(FIVE_MB - 1))), FIVE_MB - 1) - eq(len(''.join(utils.generate_random(FIVE_MB))), FIVE_MB) - eq(len(''.join(utils.generate_random(FIVE_MB + 1))), FIVE_MB + 1) + assert len(''.join(utils.generate_random(0))) == 0 + assert len(''.join(utils.generate_random(1))) == 1 + assert len(''.join(utils.generate_random(FIVE_MB - 1))) == FIVE_MB - 1 + assert len(''.join(utils.generate_random(FIVE_MB))) == FIVE_MB + assert len(''.join(utils.generate_random(FIVE_MB + 1))) == FIVE_MB + 1 diff --git a/s3tests/functional/utils.py b/s3tests/functional/utils.py index 85bcaf7..3083415 100644 --- a/s3tests/functional/utils.py +++ b/s3tests/functional/utils.py @@ -3,8 +3,6 @@ import requests import string import time -from nose.tools import eq_ as eq - def assert_raises(excClass, callableObj, *args, **kwargs): """ Like unittest.TestCase.assertRaises, but returns the exception. @@ -48,7 +46,7 @@ def region_sync_meta(targets, region): conf = r.conf if conf.sync_agent_addr: ret = requests.post('http://{addr}:{port}/metadata/incremental'.format(addr = conf.sync_agent_addr, port = conf.sync_agent_port)) - eq(ret.status_code, 200) + assert ret.status_code == 200 if conf.sync_meta_wait: time.sleep(conf.sync_meta_wait) diff --git a/s3tests_boto3/functional/test_headers.py b/s3tests_boto3/functional/test_headers.py index c7fcec6..479b431 100644 --- a/s3tests_boto3/functional/test_headers.py +++ b/s3tests_boto3/functional/test_headers.py @@ -1,5 +1,4 @@ import boto3 -from nose.tools import eq_ as eq from nose.plugins.attrib import attr import nose import pytest @@ -171,8 +170,8 @@ def tag(*tags): def test_object_create_bad_md5_invalid_short(): e = _add_header_create_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidDigest') + assert status == 400 + assert error_code == 'InvalidDigest' @tag('auth_common') @attr(resource='object') @@ -182,8 +181,8 @@ def test_object_create_bad_md5_invalid_short(): def test_object_create_bad_md5_bad(): e = _add_header_create_bad_object({'Content-MD5':'rL0Y20xC+Fzt72VPzMSk2A=='}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'BadDigest') + assert status == 400 + assert error_code == 'BadDigest' @tag('auth_common') @attr(resource='object') @@ -193,8 +192,8 @@ def test_object_create_bad_md5_bad(): def test_object_create_bad_md5_empty(): e = _add_header_create_bad_object({'Content-MD5':''}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidDigest') + assert status == 400 + assert error_code == 'InvalidDigest' @tag('auth_common') @attr(resource='object') @@ -247,7 +246,7 @@ def test_object_create_bad_expect_none(): def test_object_create_bad_contentlength_empty(): e = _add_header_create_bad_object({'Content-Length':''}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @tag('auth_common') @attr(resource='object') @@ -262,7 +261,7 @@ def test_object_create_bad_contentlength_negative(): key_name = 'foo' e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key_name, ContentLength=-1) status = _get_status(e.response) - eq(status, 400) + assert status == 400 @tag('auth_common') @attr(resource='object') @@ -276,8 +275,8 @@ def test_object_create_bad_contentlength_none(): remove = 'Content-Length' e = _remove_header_create_bad_object('Content-Length') status, error_code = _get_status_and_error_code(e.response) - eq(status, 411) - eq(error_code, 'MissingContentLength') + assert status == 411 + assert error_code == 'MissingContentLength' @tag('auth_common') @attr(resource='object') @@ -324,7 +323,7 @@ def test_object_create_bad_contenttype_none(): def test_object_create_bad_authorization_empty(): e = _add_header_create_bad_object({'Authorization': ''}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @tag('auth_common') @attr(resource='object') @@ -366,7 +365,7 @@ def test_object_create_amz_date_and_no_date(): def test_object_create_bad_authorization_none(): e = _remove_header_create_bad_object('Authorization') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @tag('auth_common') @attr(resource='bucket') @@ -416,7 +415,7 @@ def test_bucket_put_bad_canned_acl(): e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, ACL='public-read') status = _get_status(e.response) - eq(status, 400) + assert status == 400 @tag('auth_common') @attr(resource='bucket') @@ -454,7 +453,7 @@ def test_bucket_create_bad_contentlength_empty(): headers = {'Content-Length': ''} e = _add_header_create_bad_bucket(headers) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @tag('auth_common') @attr(resource='bucket') @@ -467,7 +466,7 @@ def test_bucket_create_bad_contentlength_negative(): headers = {'Content-Length': '-1'} e = _add_header_create_bad_bucket(headers) status = _get_status(e.response) - eq(status, 400) + assert status == 400 @tag('auth_common') @attr(resource='bucket') @@ -493,8 +492,8 @@ def test_bucket_create_bad_authorization_empty(): headers = {'Authorization': ''} e = _add_header_create_bad_bucket(headers) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_common') @attr(resource='bucket') @@ -507,8 +506,8 @@ def test_bucket_create_bad_authorization_empty(): def test_bucket_create_bad_authorization_none(): e = _remove_header_create_bad_bucket('Authorization') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='object') @@ -520,8 +519,8 @@ def test_object_create_bad_md5_invalid_garbage_aws2(): headers = {'Content-MD5': 'AWS HAHAHA'} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidDigest') + assert status == 400 + assert error_code == 'InvalidDigest' @tag('auth_aws2') @attr(resource='object') @@ -538,8 +537,8 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): headers = {'Content-Length': str(length)} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'BadDigest') + assert status == 400 + assert error_code == 'BadDigest' @tag('auth_aws2') @attr(resource='object') @@ -554,8 +553,8 @@ def test_object_create_bad_authorization_incorrect_aws2(): headers = {'Authorization': 'AWS AKIAIGR7ZNNBHC5BKSUB:FWeDfwojDSdS2Ztmpfeubhd9isU='} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'InvalidDigest') + assert status == 403 + assert error_code == 'InvalidDigest' @tag('auth_aws2') @attr(resource='object') @@ -570,8 +569,8 @@ def test_object_create_bad_authorization_invalid_aws2(): headers = {'Authorization': 'AWS HAHAHA'} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @tag('auth_aws2') @attr(resource='object') @@ -605,8 +604,8 @@ def test_object_create_bad_date_invalid_aws2(): headers = {'x-amz-date': 'Bad Date'} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='object') @@ -618,8 +617,8 @@ def test_object_create_bad_date_empty_aws2(): headers = {'x-amz-date': ''} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='object') @@ -634,8 +633,8 @@ def test_object_create_bad_date_none_aws2(): remove = 'x-amz-date' e = _remove_header_create_bad_object(remove, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='object') @@ -647,8 +646,8 @@ def test_object_create_bad_date_before_today_aws2(): headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'RequestTimeTooSkewed') + assert status == 403 + assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') @attr(resource='object') @@ -660,8 +659,8 @@ def test_object_create_bad_date_before_epoch_aws2(): headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='object') @@ -673,8 +672,8 @@ def test_object_create_bad_date_after_end_aws2(): headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'} e = _add_header_create_bad_object(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'RequestTimeTooSkewed') + assert status == 403 + assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') @attr(resource='bucket') @@ -689,8 +688,8 @@ def test_bucket_create_bad_authorization_invalid_aws2(): headers = {'Authorization': 'AWS HAHAHA'} e = _add_header_create_bad_bucket(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @tag('auth_aws2') @attr(resource='bucket') @@ -722,8 +721,8 @@ def test_bucket_create_bad_date_invalid_aws2(): headers = {'x-amz-date': 'Bad Date'} e = _add_header_create_bad_bucket(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='bucket') @@ -735,8 +734,8 @@ def test_bucket_create_bad_date_empty_aws2(): headers = {'x-amz-date': ''} e = _add_header_create_bad_bucket(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='bucket') @@ -751,8 +750,8 @@ def test_bucket_create_bad_date_none_aws2(): remove = 'x-amz-date' e = _remove_header_create_bad_bucket(remove, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @tag('auth_aws2') @attr(resource='bucket') @@ -764,8 +763,8 @@ def test_bucket_create_bad_date_before_today_aws2(): headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'} e = _add_header_create_bad_bucket(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'RequestTimeTooSkewed') + assert status == 403 + assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') @attr(resource='bucket') @@ -777,8 +776,8 @@ def test_bucket_create_bad_date_after_today_aws2(): headers = {'x-amz-date': 'Tue, 07 Jul 2030 21:53:04 GMT'} e = _add_header_create_bad_bucket(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'RequestTimeTooSkewed') + assert status == 403 + assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') @attr(resource='bucket') @@ -790,5 +789,5 @@ def test_bucket_create_bad_date_before_epoch_aws2(): headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'} e = _add_header_create_bad_bucket(headers, v2_client) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' diff --git a/s3tests_boto3/functional/test_iam.py b/s3tests_boto3/functional/test_iam.py index 0f6b124..9285ad7 100644 --- a/s3tests_boto3/functional/test_iam.py +++ b/s3tests_boto3/functional/test_iam.py @@ -2,7 +2,6 @@ import json from botocore.exceptions import ClientError from nose.plugins.attrib import attr -from nose.tools import eq_ as eq import pytest from s3tests_boto3.functional.utils import assert_raises @@ -40,10 +39,10 @@ def test_put_user_policy(): ) response = client.put_user_policy(PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -67,7 +66,7 @@ def test_put_user_policy_invalid_user(): e = assert_raises(ClientError, client.put_user_policy, PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName="some-non-existing-user-id") status = _get_status(e.response) - eq(status, 404) + assert status == 404 @attr(resource='user-policy') @@ -92,7 +91,7 @@ def test_put_user_policy_parameter_limit(): e = assert_raises(ClientError, client.put_user_policy, PolicyDocument=policy_document, PolicyName='AllAccessPolicy' * 10, UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 400) + assert status == 400 @attr(resource='user-policy') @@ -120,7 +119,7 @@ def test_put_user_policy_invalid_element(): e = assert_raises(ClientError, client.put_user_policy, PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 400) + assert status == 400 # With no Statement policy_document = json.dumps( @@ -131,7 +130,7 @@ def test_put_user_policy_invalid_element(): e = assert_raises(ClientError, client.put_user_policy, PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 400) + assert status == 400 # with same Sid for 2 statements policy_document = json.dumps( @@ -150,7 +149,7 @@ def test_put_user_policy_invalid_element(): e = assert_raises(ClientError, client.put_user_policy, PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 400) + assert status == 400 # with Principal policy_document = json.dumps( @@ -165,7 +164,7 @@ def test_put_user_policy_invalid_element(): e = assert_raises(ClientError, client.put_user_policy, PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 400) + assert status == 400 @attr(resource='user-policy') @@ -189,7 +188,7 @@ def test_put_existing_user_policy(): ) response = client.put_user_policy(PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 client.put_user_policy(PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) @@ -216,9 +215,9 @@ def test_list_user_policy(): ) response = client.put_user_policy(PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.list_user_policies(UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) @@ -234,7 +233,7 @@ def test_list_user_policy_invalid_user(): client = get_iam_client() e = assert_raises(ClientError, client.list_user_policies, UserName="some-non-existing-user-id") status = _get_status(e.response) - eq(status, 404) + assert status == 404 @attr(resource='user-policy') @@ -257,13 +256,13 @@ def test_get_user_policy(): ) response = client.put_user_policy(PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -286,11 +285,11 @@ def test_get_user_policy_invalid_user(): ) response = client.put_user_policy(PolicyDocument=policy_document, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, client.get_user_policy, PolicyName='AllAccessPolicy', UserName="some-non-existing-user-id") status = _get_status(e.response) - eq(status, 404) + assert status == 404 client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) @@ -319,7 +318,7 @@ def test_get_user_policy_invalid_policy_name(): e = assert_raises(ClientError, client.get_user_policy, PolicyName='non-existing-policy-name', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 404) + assert status == 404 client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) @@ -349,7 +348,7 @@ def test_get_deleted_user_policy(): e = assert_raises(ClientError, client.get_user_policy, PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 404) + assert status == 404 @attr(resource='user-policy') @@ -374,21 +373,21 @@ def test_get_user_policy_from_multiple_policies(): response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy1', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy2', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_user_policy(PolicyName='AllowAccessPolicy2', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllowAccessPolicy1', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllowAccessPolicy2', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -413,10 +412,10 @@ def test_delete_user_policy(): response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -441,14 +440,14 @@ def test_delete_user_policy_invalid_user(): response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, client.delete_user_policy, PolicyName='AllAccessPolicy', UserName="some-non-existing-user-id") status = _get_status(e.response) - eq(status, 404) + assert status == 404 response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -473,14 +472,14 @@ def test_delete_user_policy_invalid_policy_name(): response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, client.delete_user_policy, PolicyName='non-existing-policy-name', UserName=get_alt_user_id()) status = _get_status(e.response) - eq(status, 404) + assert status == 404 response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -505,28 +504,28 @@ def test_delete_user_policy_from_multiple_policies(): response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy1', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy2', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy3', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllowAccessPolicy1', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllowAccessPolicy2', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_user_policy(PolicyName='AllowAccessPolicy3', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='AllowAccessPolicy3', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -555,7 +554,7 @@ def test_allow_bucket_actions_in_user_policy(): response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = s3_client_alt.list_objects(Bucket=bucket) object_found = False @@ -567,10 +566,10 @@ def test_allow_bucket_actions_in_user_policy(): raise AssertionError("Object is not listed") response = s3_client_iam.delete_object(Bucket=bucket, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = s3_client_alt.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = s3_client_iam.list_buckets() for bucket in response['Buckets']: @@ -579,7 +578,7 @@ def test_allow_bucket_actions_in_user_policy(): response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -607,21 +606,21 @@ def test_deny_bucket_actions_in_user_policy(): response = client.put_user_policy(PolicyDocument=policy_document_deny, PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, s3_client.list_buckets, Bucket=bucket) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' e = assert_raises(ClientError, s3_client.delete_bucket, Bucket=bucket) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = client.delete_user_policy(PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = s3_client.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 @attr(resource='user-policy') @@ -647,26 +646,26 @@ def test_allow_object_actions_in_user_policy(): ) response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_alt.put_object(Bucket=bucket, Key='foo', Body='bar') response = s3_client_alt.get_object(Bucket=bucket, Key='foo') body = response['Body'].read() if type(body) is bytes: body = body.decode() - eq(body, "bar") + assert body == "bar" response = s3_client_alt.delete_object(Bucket=bucket, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 e = assert_raises(ClientError, s3_client_iam.get_object, Bucket=bucket, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' response = s3_client_iam.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -699,20 +698,20 @@ def test_deny_object_actions_in_user_policy(): e = assert_raises(ClientError, s3_client_alt.put_object, Bucket=bucket, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' e = assert_raises(ClientError, s3_client_alt.get_object, Bucket=bucket, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' e = assert_raises(ClientError, s3_client_alt.delete_object, Bucket=bucket, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = client.delete_user_policy(PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -738,22 +737,22 @@ def test_allow_multipart_actions_in_user_policy(): ) response = client.put_user_policy(PolicyDocument=policy_document_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 key = "mymultipart" mb = 1024 * 1024 (upload_id, _, _) = _multipart_upload(client=s3_client_iam, bucket_name=bucket, key=key, size=5 * mb) response = s3_client_alt.list_multipart_uploads(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = s3_client_alt.abort_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = s3_client_iam.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -780,7 +779,7 @@ def test_deny_multipart_actions_in_user_policy(): response = client.put_user_policy(PolicyDocument=policy_document_deny, PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 key = "mymultipart" mb = 1024 * 1024 @@ -789,20 +788,20 @@ def test_deny_multipart_actions_in_user_policy(): e = assert_raises(ClientError, s3_client.list_multipart_uploads, Bucket=bucket) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' e = assert_raises(ClientError, s3_client.abort_multipart_upload, Bucket=bucket, Key=key, UploadId=upload_id) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = s3_client.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.delete_user_policy(PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -833,28 +832,28 @@ def test_allow_tagging_actions_in_user_policy(): tags = {'TagSet': [{'Key': 'Hello', 'Value': 'World'}, ]} response = s3_client_alt.put_bucket_tagging(Bucket=bucket, Tagging=tags) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = s3_client_alt.get_bucket_tagging(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['TagSet'][0]['Key'], 'Hello') - eq(response['TagSet'][0]['Value'], 'World') + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['TagSet'][0]['Key'] == 'Hello' + assert response['TagSet'][0]['Value'] == 'World' obj_key = 'obj' response = s3_client_iam.put_object(Bucket=bucket, Key=obj_key, Body='obj_body') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = s3_client_alt.put_object_tagging(Bucket=bucket, Key=obj_key, Tagging=tags) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = s3_client_alt.get_object_tagging(Bucket=bucket, Key=obj_key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['TagSet'], tags['TagSet']) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['TagSet'] == tags['TagSet'] response = s3_client_iam.delete_object(Bucket=bucket, Key=obj_key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = s3_client_iam.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -885,34 +884,34 @@ def test_deny_tagging_actions_in_user_policy(): e = assert_raises(ClientError, s3_client.put_bucket_tagging, Bucket=bucket, Tagging=tags) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' e = assert_raises(ClientError, s3_client.get_bucket_tagging, Bucket=bucket) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' obj_key = 'obj' response = s3_client.put_object(Bucket=bucket, Key=obj_key, Body='obj_body') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, s3_client.put_object_tagging, Bucket=bucket, Key=obj_key, Tagging=tags) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' e = assert_raises(ClientError, s3_client.delete_object_tagging, Bucket=bucket, Key=obj_key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = s3_client.delete_object(Bucket=bucket, Key=obj_key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = s3_client.delete_bucket(Bucket=bucket) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.delete_user_policy(PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -943,14 +942,14 @@ def test_verify_conflicting_user_policy_statements(): client = get_iam_client() response = client.put_user_policy(PolicyDocument=policy_document, PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, s3client.list_objects, Bucket=bucket) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = client.delete_user_policy(PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -982,20 +981,20 @@ def test_verify_conflicting_user_policies(): client = get_iam_client() response = client.put_user_policy(PolicyDocument=policy_allow, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.put_user_policy(PolicyDocument=policy_deny, PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, s3client.list_objects, Bucket=bucket) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = client.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.delete_user_policy(PolicyName='DenyAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='user-policy') @@ -1019,12 +1018,12 @@ def test_verify_allow_iam_actions(): response = client1.put_user_policy(PolicyDocument=policy1, PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = iam_client_alt.get_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = iam_client_alt.list_user_policies(UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = iam_client_alt.delete_user_policy(PolicyName='AllowAccessPolicy', UserName=get_alt_user_id()) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index c3d8148..52cef44 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -2,7 +2,6 @@ import boto3 import botocore.session from botocore.exceptions import ClientError from botocore.exceptions import ParamValidationError -from nose.tools import eq_ as eq from nose.plugins.attrib import attr import isodate import email.utils @@ -102,7 +101,7 @@ def _bucket_is_empty(bucket): def test_bucket_list_empty(): bucket = get_new_bucket_resource() is_empty = _bucket_is_empty(bucket) - eq(is_empty, True) + assert is_empty == True @attr(resource='bucket') @attr(method='get') @@ -115,7 +114,7 @@ def test_bucket_list_distinct(): bucket2 = get_new_bucket_resource() obj = bucket1.put_object(Body='str', Key='asdf') is_empty = _bucket_is_empty(bucket2) - eq(is_empty, True) + assert is_empty == True def _create_objects(bucket=None, bucket_name=None, keys=[]): """ @@ -164,15 +163,15 @@ def test_bucket_list_many(): response = client.list_objects(Bucket=bucket_name, MaxKeys=2) keys = _get_keys(response) - eq(len(keys), 2) - eq(keys, ['bar', 'baz']) - eq(response['IsTruncated'], True) + assert len(keys) == 2 + assert keys == ['bar', 'baz'] + assert response['IsTruncated'] == True response = client.list_objects(Bucket=bucket_name, Marker='baz',MaxKeys=2) keys = _get_keys(response) - eq(len(keys), 1) - eq(response['IsTruncated'], False) - eq(keys, ['foo']) + assert len(keys) == 1 + assert response['IsTruncated'] == False + assert keys == ['foo'] @attr(resource='bucket') @attr(method='get') @@ -188,15 +187,15 @@ def test_bucket_listv2_many(): response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=2) keys = _get_keys(response) - eq(len(keys), 2) - eq(keys, ['bar', 'baz']) - eq(response['IsTruncated'], True) + assert len(keys) == 2 + assert keys == ['bar', 'baz'] + assert response['IsTruncated'] == True response = client.list_objects_v2(Bucket=bucket_name, StartAfter='baz',MaxKeys=2) keys = _get_keys(response) - eq(len(keys), 1) - eq(response['IsTruncated'], False) - eq(keys, ['foo']) + assert len(keys) == 1 + assert response['IsTruncated'] == False + assert keys == ['foo'] @attr(resource='bucket') @attr(method='get') @@ -212,7 +211,7 @@ def test_basic_key_count(): for j in range(5): client.put_object(Bucket=bucket_name, Key=str(j)) response1 = client.list_objects_v2(Bucket=bucket_name) - eq(response1['KeyCount'], 5) + assert response1['KeyCount'] == 5 @attr(resource='bucket') @attr(method='get') @@ -223,13 +222,13 @@ def test_bucket_list_delimiter_basic(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='/') - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) - eq(keys, ['asdf']) + assert keys == ['asdf'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) - eq(prefixes, ['foo/', 'quux/']) + assert len(prefixes) == 2 + assert prefixes == ['foo/', 'quux/'] @attr(resource='bucket') @attr(method='get') @@ -242,14 +241,14 @@ def test_bucket_listv2_delimiter_basic(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/') - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) - eq(keys, ['asdf']) + assert keys == ['asdf'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) - eq(prefixes, ['foo/', 'quux/']) - eq(response['KeyCount'], len(prefixes) + len(keys)) + assert len(prefixes) == 2 + assert prefixes == ['foo/', 'quux/'] + assert response['KeyCount'] == len(prefixes) + len(keys) @attr(resource='bucket') @@ -263,13 +262,13 @@ def test_bucket_listv2_encoding_basic(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/', EncodingType='url') - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) - eq(keys, ['asdf%2Bb']) + assert keys == ['asdf%2Bb'] prefixes = _get_prefixes(response) - eq(len(prefixes), 3) - eq(prefixes, ['foo%2B1/', 'foo/', 'quux%20ab/']) + assert len(prefixes) == 3 + assert prefixes == ['foo%2B1/', 'foo/', 'quux%20ab/'] @attr(resource='bucket') @attr(method='get') @@ -281,13 +280,13 @@ def test_bucket_list_encoding_basic(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='/', EncodingType='url') - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) - eq(keys, ['asdf%2Bb']) + assert keys == ['asdf%2Bb'] prefixes = _get_prefixes(response) - eq(len(prefixes), 3) - eq(prefixes, ['foo%2B1/', 'foo/', 'quux%20ab/']) + assert len(prefixes) == 3 + assert prefixes == ['foo%2B1/', 'foo/', 'quux%20ab/'] def validate_bucket_list(bucket_name, prefix, delimiter, marker, max_keys, @@ -295,18 +294,18 @@ def validate_bucket_list(bucket_name, prefix, delimiter, marker, max_keys, client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter=delimiter, Marker=marker, MaxKeys=max_keys, Prefix=prefix) - eq(response['IsTruncated'], is_truncated) + assert response['IsTruncated'] == is_truncated if 'NextMarker' not in response: response['NextMarker'] = None - eq(response['NextMarker'], next_marker) + assert response['NextMarker'] == next_marker keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(len(keys), len(check_objs)) - eq(len(prefixes), len(check_prefixes)) - eq(keys, check_objs) - eq(prefixes, check_prefixes) + assert len(keys) == len(check_objs) + assert len(prefixes) == len(check_prefixes) + assert keys == check_objs + assert prefixes == check_prefixes return response['NextMarker'] @@ -320,20 +319,20 @@ def validate_bucket_listv2(bucket_name, prefix, delimiter, continuation_token, m else: params['StartAfter'] = '' response = client.list_objects_v2(**params) - eq(response['IsTruncated'], is_truncated) + assert response['IsTruncated'] == is_truncated if 'NextContinuationToken' not in response: response['NextContinuationToken'] = None if last: - eq(response['NextContinuationToken'], None) + assert response['NextContinuationToken'] == None keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(len(keys), len(check_objs)) - eq(len(prefixes), len(check_prefixes)) - eq(keys, check_objs) - eq(prefixes, check_prefixes) + assert len(keys) == len(check_objs) + assert len(prefixes) == len(check_prefixes) + assert keys == check_objs + assert prefixes == check_prefixes return response['NextContinuationToken'] @@ -421,16 +420,16 @@ def test_bucket_list_delimiter_alt(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='a') - eq(response['Delimiter'], 'a') + assert response['Delimiter'] == 'a' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] # bar, baz, and cab should be broken up by the 'a' delimiters prefixes = _get_prefixes(response) - eq(len(prefixes), 2) - eq(prefixes, ['ba', 'ca']) + assert len(prefixes) == 2 + assert prefixes == ['ba', 'ca'] @attr(resource='bucket') @attr(method='get') @@ -442,16 +441,16 @@ def test_bucket_listv2_delimiter_alt(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='a') - eq(response['Delimiter'], 'a') + assert response['Delimiter'] == 'a' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] # bar, baz, and cab should be broken up by the 'a' delimiters prefixes = _get_prefixes(response) - eq(len(prefixes), 2) - eq(prefixes, ['ba', 'ca']) + assert len(prefixes) == 2 + assert prefixes == ['ba', 'ca'] @attr(resource='bucket') @attr(method='get') @@ -517,15 +516,15 @@ def test_bucket_list_delimiter_percentage(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='%') - eq(response['Delimiter'], '%') + assert response['Delimiter'] == '%' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) + assert len(prefixes) == 2 # bar, baz, and cab should be broken up by the 'a' delimiters - eq(prefixes, ['b%', 'c%']) + assert prefixes == ['b%', 'c%'] @attr(resource='bucket') @attr(method='get') @@ -537,15 +536,15 @@ def test_bucket_listv2_delimiter_percentage(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='%') - eq(response['Delimiter'], '%') + assert response['Delimiter'] == '%' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) + assert len(prefixes) == 2 # bar, baz, and cab should be broken up by the 'a' delimiters - eq(prefixes, ['b%', 'c%']) + assert prefixes == ['b%', 'c%'] @attr(resource='bucket') @attr(method='get') @@ -556,15 +555,15 @@ def test_bucket_list_delimiter_whitespace(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter=' ') - eq(response['Delimiter'], ' ') + assert response['Delimiter'] == ' ' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) + assert len(prefixes) == 2 # bar, baz, and cab should be broken up by the 'a' delimiters - eq(prefixes, ['b ', 'c ']) + assert prefixes == ['b ', 'c '] @attr(resource='bucket') @attr(method='get') @@ -576,15 +575,15 @@ def test_bucket_listv2_delimiter_whitespace(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter=' ') - eq(response['Delimiter'], ' ') + assert response['Delimiter'] == ' ' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) + assert len(prefixes) == 2 # bar, baz, and cab should be broken up by the 'a' delimiters - eq(prefixes, ['b ', 'c ']) + assert prefixes == ['b ', 'c '] @attr(resource='bucket') @attr(method='get') @@ -595,15 +594,15 @@ def test_bucket_list_delimiter_dot(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='.') - eq(response['Delimiter'], '.') + assert response['Delimiter'] == '.' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) + assert len(prefixes) == 2 # bar, baz, and cab should be broken up by the 'a' delimiters - eq(prefixes, ['b.', 'c.']) + assert prefixes == ['b.', 'c.'] @attr(resource='bucket') @attr(method='get') @@ -615,15 +614,15 @@ def test_bucket_listv2_delimiter_dot(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='.') - eq(response['Delimiter'], '.') + assert response['Delimiter'] == '.' keys = _get_keys(response) # foo contains no 'a' and so is a complete key - eq(keys, ['foo']) + assert keys == ['foo'] prefixes = _get_prefixes(response) - eq(len(prefixes), 2) + assert len(prefixes) == 2 # bar, baz, and cab should be broken up by the 'a' delimiters - eq(prefixes, ['b.', 'c.']) + assert prefixes == ['b.', 'c.'] @attr(resource='bucket') @attr(method='get') @@ -635,12 +634,12 @@ def test_bucket_list_delimiter_unreadable(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='\x0a') - eq(response['Delimiter'], '\x0a') + assert response['Delimiter'] == '\x0a' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -653,12 +652,12 @@ def test_bucket_listv2_delimiter_unreadable(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='\x0a') - eq(response['Delimiter'], '\x0a') + assert response['Delimiter'] == '\x0a' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -671,12 +670,12 @@ def test_bucket_list_delimiter_empty(): response = client.list_objects(Bucket=bucket_name, Delimiter='') # putting an empty value into Delimiter will not return a value in the response - eq('Delimiter' in response, False) + assert not 'Delimiter' in response keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -690,12 +689,12 @@ def test_bucket_listv2_delimiter_empty(): response = client.list_objects_v2(Bucket=bucket_name, Delimiter='') # putting an empty value into Delimiter will not return a value in the response - eq('Delimiter' in response, False) + assert not 'Delimiter' in response keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -708,12 +707,12 @@ def test_bucket_list_delimiter_none(): response = client.list_objects(Bucket=bucket_name) # putting an empty value into Delimiter will not return a value in the response - eq('Delimiter' in response, False) + assert not 'Delimiter' in response keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -727,12 +726,12 @@ def test_bucket_listv2_delimiter_none(): response = client.list_objects_v2(Bucket=bucket_name) # putting an empty value into Delimiter will not return a value in the response - eq('Delimiter' in response, False) + assert not 'Delimiter' in response keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr('list-objects-v2') @pytest.mark.list_objects_v2 @@ -743,7 +742,7 @@ def test_bucket_listv2_fetchowner_notempty(): response = client.list_objects_v2(Bucket=bucket_name, FetchOwner=True) objs_list = response['Contents'] - eq('Owner' in objs_list[0], True) + assert 'Owner' in objs_list[0] @attr('list-objects-v2') @pytest.mark.list_objects_v2 @@ -754,7 +753,7 @@ def test_bucket_listv2_fetchowner_defaultempty(): response = client.list_objects_v2(Bucket=bucket_name) objs_list = response['Contents'] - eq('Owner' in objs_list[0], False) + assert not 'Owner' in objs_list[0] @attr('list-objects-v2') @pytest.mark.list_objects_v2 @@ -765,10 +764,7 @@ def test_bucket_listv2_fetchowner_empty(): response = client.list_objects_v2(Bucket=bucket_name, FetchOwner= False) objs_list = response['Contents'] - eq('Owner' in objs_list[0], False) - - - + assert not 'Owner' in objs_list[0] @attr(resource='bucket') @attr(method='get') @@ -781,12 +777,12 @@ def test_bucket_list_delimiter_not_exist(): response = client.list_objects(Bucket=bucket_name, Delimiter='/') # putting an empty value into Delimiter will not return a value in the response - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -800,12 +796,12 @@ def test_bucket_listv2_delimiter_not_exist(): response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/') # putting an empty value into Delimiter will not return a value in the response - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @@ -822,12 +818,12 @@ def test_bucket_list_delimiter_not_skip_special(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='/') - eq(response['Delimiter'], '/') + assert response['Delimiter'] == '/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names2) - eq(prefixes, ['0/']) + assert keys == key_names2 + assert prefixes == ['0/'] @attr(resource='bucket') @attr(method='get') @@ -839,12 +835,12 @@ def test_bucket_list_prefix_basic(): client = get_client() response = client.list_objects(Bucket=bucket_name, Prefix='foo/') - eq(response['Prefix'], 'foo/') + assert response['Prefix'] == 'foo/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['foo/bar', 'foo/baz']) - eq(prefixes, []) + assert keys == ['foo/bar', 'foo/baz'] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -858,12 +854,12 @@ def test_bucket_listv2_prefix_basic(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Prefix='foo/') - eq(response['Prefix'], 'foo/') + assert response['Prefix'] == 'foo/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['foo/bar', 'foo/baz']) - eq(prefixes, []) + assert keys == ['foo/bar', 'foo/baz'] + assert prefixes == [] # just testing that we can do the delimeter and prefix logic on non-slashes @attr(resource='bucket') @@ -876,12 +872,12 @@ def test_bucket_list_prefix_alt(): client = get_client() response = client.list_objects(Bucket=bucket_name, Prefix='ba') - eq(response['Prefix'], 'ba') + assert response['Prefix'] == 'ba' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['bar', 'baz']) - eq(prefixes, []) + assert keys == ['bar', 'baz'] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -895,12 +891,12 @@ def test_bucket_listv2_prefix_alt(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Prefix='ba') - eq(response['Prefix'], 'ba') + assert response['Prefix'] == 'ba' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['bar', 'baz']) - eq(prefixes, []) + assert keys == ['bar', 'baz'] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -912,12 +908,12 @@ def test_bucket_list_prefix_empty(): client = get_client() response = client.list_objects(Bucket=bucket_name, Prefix='') - eq(response['Prefix'], '') + assert response['Prefix'] == '' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -931,12 +927,12 @@ def test_bucket_listv2_prefix_empty(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Prefix='') - eq(response['Prefix'], '') + assert response['Prefix'] == '' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -948,12 +944,12 @@ def test_bucket_list_prefix_none(): client = get_client() response = client.list_objects(Bucket=bucket_name, Prefix='') - eq(response['Prefix'], '') + assert response['Prefix'] == '' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -967,12 +963,12 @@ def test_bucket_listv2_prefix_none(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Prefix='') - eq(response['Prefix'], '') + assert response['Prefix'] == '' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, key_names) - eq(prefixes, []) + assert keys == key_names + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -984,12 +980,12 @@ def test_bucket_list_prefix_not_exist(): client = get_client() response = client.list_objects(Bucket=bucket_name, Prefix='d') - eq(response['Prefix'], 'd') + assert response['Prefix'] == 'd' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1003,12 +999,12 @@ def test_bucket_listv2_prefix_not_exist(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Prefix='d') - eq(response['Prefix'], 'd') + assert response['Prefix'] == 'd' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1020,12 +1016,12 @@ def test_bucket_list_prefix_unreadable(): client = get_client() response = client.list_objects(Bucket=bucket_name, Prefix='\x0a') - eq(response['Prefix'], '\x0a') + assert response['Prefix'] == '\x0a' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1039,12 +1035,12 @@ def test_bucket_listv2_prefix_unreadable(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Prefix='\x0a') - eq(response['Prefix'], '\x0a') + assert response['Prefix'] == '\x0a' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1056,13 +1052,13 @@ def test_bucket_list_prefix_delimiter_basic(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='/', Prefix='foo/') - eq(response['Prefix'], 'foo/') - eq(response['Delimiter'], '/') + assert response['Prefix'] == 'foo/' + assert response['Delimiter'] == '/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['foo/bar']) - eq(prefixes, ['foo/baz/']) + assert keys == ['foo/bar'] + assert prefixes == ['foo/baz/'] @attr(resource='bucket') @attr(method='get') @@ -1076,13 +1072,13 @@ def test_bucket_listv2_prefix_delimiter_basic(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='/', Prefix='foo/') - eq(response['Prefix'], 'foo/') - eq(response['Delimiter'], '/') + assert response['Prefix'] == 'foo/' + assert response['Delimiter'] == '/' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['foo/bar']) - eq(prefixes, ['foo/baz/']) + assert keys == ['foo/bar'] + assert prefixes == ['foo/baz/'] @attr(resource='bucket') @attr(method='get') @@ -1094,13 +1090,13 @@ def test_bucket_list_prefix_delimiter_alt(): client = get_client() response = client.list_objects(Bucket=bucket_name, Delimiter='a', Prefix='ba') - eq(response['Prefix'], 'ba') - eq(response['Delimiter'], 'a') + assert response['Prefix'] == 'ba' + assert response['Delimiter'] == 'a' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['bar']) - eq(prefixes, ['baza']) + assert keys == ['bar'] + assert prefixes == ['baza'] @attr('list-objects-v2') @pytest.mark.list_objects_v2 @@ -1110,13 +1106,13 @@ def test_bucket_listv2_prefix_delimiter_alt(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, Delimiter='a', Prefix='ba') - eq(response['Prefix'], 'ba') - eq(response['Delimiter'], 'a') + assert response['Prefix'] == 'ba' + assert response['Delimiter'] == 'a' keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['bar']) - eq(prefixes, ['baza']) + assert keys == ['bar'] + assert prefixes == ['baza'] @attr(resource='bucket') @attr(method='get') @@ -1131,8 +1127,8 @@ def test_bucket_list_prefix_delimiter_prefix_not_exist(): keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1149,8 +1145,8 @@ def test_bucket_listv2_prefix_delimiter_prefix_not_exist(): keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1165,8 +1161,8 @@ def test_bucket_list_prefix_delimiter_delimiter_not_exist(): keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['b/a/c', 'b/a/g', 'b/a/r']) - eq(prefixes, []) + assert keys == ['b/a/c', 'b/a/g', 'b/a/r'] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1183,8 +1179,8 @@ def test_bucket_listv2_prefix_delimiter_delimiter_not_exist(): keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, ['b/a/c', 'b/a/g', 'b/a/r']) - eq(prefixes, []) + assert keys == ['b/a/c', 'b/a/g', 'b/a/r'] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1199,8 +1195,8 @@ def test_bucket_list_prefix_delimiter_prefix_delimiter_not_exist(): keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1217,8 +1213,8 @@ def test_bucket_listv2_prefix_delimiter_prefix_delimiter_not_exist(): keys = _get_keys(response) prefixes = _get_prefixes(response) - eq(keys, []) - eq(prefixes, []) + assert keys == [] + assert prefixes == [] @attr(resource='bucket') @attr(method='get') @@ -1232,16 +1228,16 @@ def test_bucket_list_maxkeys_one(): client = get_client() response = client.list_objects(Bucket=bucket_name, MaxKeys=1) - eq(response['IsTruncated'], True) + assert response['IsTruncated'] == True keys = _get_keys(response) - eq(keys, key_names[0:1]) + assert keys == key_names[0:1] response = client.list_objects(Bucket=bucket_name, Marker=key_names[0]) - eq(response['IsTruncated'], False) + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names[1:]) + assert keys == key_names[1:] @attr(resource='bucket') @attr(method='get') @@ -1257,16 +1253,16 @@ def test_bucket_listv2_maxkeys_one(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=1) - eq(response['IsTruncated'], True) + assert response['IsTruncated'] == True keys = _get_keys(response) - eq(keys, key_names[0:1]) + assert keys == key_names[0:1] response = client.list_objects_v2(Bucket=bucket_name, StartAfter=key_names[0]) - eq(response['IsTruncated'], False) + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names[1:]) + assert keys == key_names[1:] @attr(resource='bucket') @attr(method='get') @@ -1279,9 +1275,9 @@ def test_bucket_list_maxkeys_zero(): response = client.list_objects(Bucket=bucket_name, MaxKeys=0) - eq(response['IsTruncated'], False) + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, []) + assert keys == [] @attr(resource='bucket') @attr(method='get') @@ -1296,9 +1292,9 @@ def test_bucket_listv2_maxkeys_zero(): response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=0) - eq(response['IsTruncated'], False) + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, []) + assert keys == [] @attr(resource='bucket') @attr(method='get') @@ -1310,10 +1306,10 @@ def test_bucket_list_maxkeys_none(): client = get_client() response = client.list_objects(Bucket=bucket_name) - eq(response['IsTruncated'], False) + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names) - eq(response['MaxKeys'], 1000) + assert keys == key_names + assert response['MaxKeys'] == 1000 @attr(resource='bucket') @attr(method='get') @@ -1327,10 +1323,10 @@ def test_bucket_listv2_maxkeys_none(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name) - eq(response['IsTruncated'], False) + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names) - eq(response['MaxKeys'], 1000) + assert keys == key_names + assert response['MaxKeys'] == 1000 def get_http_response_body(**kwargs): global http_response_body @@ -1368,11 +1364,11 @@ def test_account_usage(): xml = ET.fromstring(http_response_body.decode('utf-8')) parsed = parseXmlToJson(xml) summary = parsed['Summary'] - eq(summary['QuotaMaxBytes'], '-1') - eq(summary['QuotaMaxBuckets'], '1000') - eq(summary['QuotaMaxObjCount'], '-1') - eq(summary['QuotaMaxBytesPerBucket'], '-1') - eq(summary['QuotaMaxObjCountPerBucket'], '-1') + assert summary['QuotaMaxBytes'] == '-1' + assert summary['QuotaMaxBuckets'] == '1000' + assert summary['QuotaMaxObjCount'] == '-1' + assert summary['QuotaMaxBytesPerBucket'] == '-1' + assert summary['QuotaMaxObjCountPerBucket'] == '-1' @attr(resource='bucket') @attr(method='head') @@ -1390,13 +1386,13 @@ def test_head_bucket_usage(): client.meta.events.register('after-call.s3.HeadBucket', get_http_response) client.head_bucket(Bucket=bucket_name) hdrs = http_response['headers'] - eq(hdrs['X-RGW-Object-Count'], '1') - eq(hdrs['X-RGW-Bytes-Used'], '3') - eq(hdrs['X-RGW-Quota-User-Size'], '-1') - eq(hdrs['X-RGW-Quota-User-Objects'], '-1') - eq(hdrs['X-RGW-Quota-Max-Buckets'], '1000') - eq(hdrs['X-RGW-Quota-Bucket-Size'], '-1') - eq(hdrs['X-RGW-Quota-Bucket-Objects'], '-1') + assert hdrs['X-RGW-Object-Count'] == '1' + assert hdrs['X-RGW-Bytes-Used'] == '3' + assert hdrs['X-RGW-Quota-User-Size'] == '-1' + assert hdrs['X-RGW-Quota-User-Objects'] == '-1' + assert hdrs['X-RGW-Quota-Max-Buckets'] == '1000' + assert hdrs['X-RGW-Quota-Bucket-Size'] == '-1' + assert hdrs['X-RGW-Quota-Bucket-Objects'] == '-1' @attr(resource='bucket') @attr(method='get') @@ -1424,38 +1420,38 @@ def test_bucket_list_unordered(): # test simple retrieval response = client.list_objects(Bucket=bucket_name, MaxKeys=1000) unordered_keys_out = _get_keys(response) - eq(len(keys_in), len(unordered_keys_out)) - eq(keys_in.sort(), unordered_keys_out.sort()) + assert len(keys_in) == len(unordered_keys_out) + assert keys_in.sort() == unordered_keys_out.sort() # test retrieval with prefix response = client.list_objects(Bucket=bucket_name, MaxKeys=1000, Prefix="abc/") unordered_keys_out = _get_keys(response) - eq(5, len(unordered_keys_out)) + assert 5 == len(unordered_keys_out) # test incremental retrieval with marker response = client.list_objects(Bucket=bucket_name, MaxKeys=6) unordered_keys_out = _get_keys(response) - eq(6, len(unordered_keys_out)) + assert 6 == len(unordered_keys_out) # now get the next bunch response = client.list_objects(Bucket=bucket_name, MaxKeys=6, Marker=unordered_keys_out[-1]) unordered_keys_out2 = _get_keys(response) - eq(6, len(unordered_keys_out2)) + assert 6 == len(unordered_keys_out2) # make sure there's no overlap between the incremental retrievals intersect = set(unordered_keys_out).intersection(unordered_keys_out2) - eq(0, len(intersect)) + assert 0 == len(intersect) # verify that unordered used with delimiter results in error e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name, Delimiter="/") status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='bucket') @attr(method='get') @@ -1485,38 +1481,38 @@ def test_bucket_listv2_unordered(): # test simple retrieval response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=1000) unordered_keys_out = _get_keys(response) - eq(len(keys_in), len(unordered_keys_out)) - eq(keys_in.sort(), unordered_keys_out.sort()) + assert len(keys_in) == len(unordered_keys_out) + assert keys_in.sort() == unordered_keys_out.sort() # test retrieval with prefix response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=1000, Prefix="abc/") unordered_keys_out = _get_keys(response) - eq(5, len(unordered_keys_out)) + assert 5 == len(unordered_keys_out) # test incremental retrieval with marker response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=6) unordered_keys_out = _get_keys(response) - eq(6, len(unordered_keys_out)) + assert 6 == len(unordered_keys_out) # now get the next bunch response = client.list_objects_v2(Bucket=bucket_name, MaxKeys=6, StartAfter=unordered_keys_out[-1]) unordered_keys_out2 = _get_keys(response) - eq(6, len(unordered_keys_out2)) + assert 6 == len(unordered_keys_out2) # make sure there's no overlap between the incremental retrievals intersect = set(unordered_keys_out).intersection(unordered_keys_out2) - eq(0, len(intersect)) + assert 0 == len(intersect) # verify that unordered used with delimiter results in error e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name, Delimiter="/") status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='bucket') @@ -1536,8 +1532,8 @@ def test_bucket_list_maxkeys_invalid(): e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @@ -1551,7 +1547,7 @@ def test_bucket_list_marker_none(): client = get_client() response = client.list_objects(Bucket=bucket_name) - eq(response['Marker'], '') + assert response['Marker'] == '' @attr(resource='bucket') @@ -1564,10 +1560,10 @@ def test_bucket_list_marker_empty(): client = get_client() response = client.list_objects(Bucket=bucket_name, Marker='') - eq(response['Marker'], '') - eq(response['IsTruncated'], False) + assert response['Marker'] == '' + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names) + assert keys == key_names @attr(resource='bucket') @attr(method='get') @@ -1581,10 +1577,10 @@ def test_bucket_listv2_continuationtoken_empty(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, ContinuationToken='') - eq(response['ContinuationToken'], '') - eq(response['IsTruncated'], False) + assert response['ContinuationToken'] == '' + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names) + assert keys == key_names @attr(resource='bucket') @attr(method='get') @@ -1601,11 +1597,11 @@ def test_bucket_listv2_continuationtoken(): next_continuation_token = response1['NextContinuationToken'] response2 = client.list_objects_v2(Bucket=bucket_name, ContinuationToken=next_continuation_token) - eq(response2['ContinuationToken'], next_continuation_token) - eq(response2['IsTruncated'], False) + assert response2['ContinuationToken'] == next_continuation_token + assert response2['IsTruncated'] == False key_names2 = ['baz', 'foo', 'quxx'] keys = _get_keys(response2) - eq(keys, key_names2) + assert keys == key_names2 @attr(resource='bucket') @attr(method='get') @@ -1624,12 +1620,12 @@ def test_bucket_listv2_both_continuationtoken_startafter(): next_continuation_token = response1['NextContinuationToken'] response2 = client.list_objects_v2(Bucket=bucket_name, StartAfter='bar', ContinuationToken=next_continuation_token) - eq(response2['ContinuationToken'], next_continuation_token) - eq(response2['StartAfter'], 'bar') - eq(response2['IsTruncated'], False) + assert response2['ContinuationToken'] == next_continuation_token + assert response2['StartAfter'] == 'bar' + assert response2['IsTruncated'] == False key_names2 = ['foo', 'quxx'] keys = _get_keys(response2) - eq(keys, key_names2) + assert keys == key_names2 @attr(resource='bucket') @attr(method='get') @@ -1641,10 +1637,10 @@ def test_bucket_list_marker_unreadable(): client = get_client() response = client.list_objects(Bucket=bucket_name, Marker='\x0a') - eq(response['Marker'], '\x0a') - eq(response['IsTruncated'], False) + assert response['Marker'] == '\x0a' + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names) + assert keys == key_names @attr(resource='bucket') @attr(method='get') @@ -1658,10 +1654,10 @@ def test_bucket_listv2_startafter_unreadable(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, StartAfter='\x0a') - eq(response['StartAfter'], '\x0a') - eq(response['IsTruncated'], False) + assert response['StartAfter'] == '\x0a' + assert response['IsTruncated'] == False keys = _get_keys(response) - eq(keys, key_names) + assert keys == key_names @attr(resource='bucket') @attr(method='get') @@ -1673,9 +1669,9 @@ def test_bucket_list_marker_not_in_list(): client = get_client() response = client.list_objects(Bucket=bucket_name, Marker='blah') - eq(response['Marker'], 'blah') + assert response['Marker'] == 'blah' keys = _get_keys(response) - eq(keys, [ 'foo','quxx']) + assert keys == [ 'foo','quxx'] @attr(resource='bucket') @attr(method='get') @@ -1689,9 +1685,9 @@ def test_bucket_listv2_startafter_not_in_list(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, StartAfter='blah') - eq(response['StartAfter'], 'blah') + assert response['StartAfter'] == 'blah' keys = _get_keys(response) - eq(keys, ['foo', 'quxx']) + assert keys == ['foo', 'quxx'] @attr(resource='bucket') @attr(method='get') @@ -1703,10 +1699,10 @@ def test_bucket_list_marker_after_list(): client = get_client() response = client.list_objects(Bucket=bucket_name, Marker='zzz') - eq(response['Marker'], 'zzz') + assert response['Marker'] == 'zzz' keys = _get_keys(response) - eq(response['IsTruncated'], False) - eq(keys, []) + assert response['IsTruncated'] == False + assert keys == [] @attr(resource='bucket') @attr(method='get') @@ -1720,10 +1716,10 @@ def test_bucket_listv2_startafter_after_list(): client = get_client() response = client.list_objects_v2(Bucket=bucket_name, StartAfter='zzz') - eq(response['StartAfter'], 'zzz') + assert response['StartAfter'] == 'zzz' keys = _get_keys(response) - eq(response['IsTruncated'], False) - eq(keys, []) + assert response['IsTruncated'] == False + assert keys == [] def _compare_dates(datetime1, datetime2): """ @@ -1732,7 +1728,7 @@ def _compare_dates(datetime1, datetime2): # both times are in datetime format but datetime1 has # microseconds and datetime2 does not datetime1 = datetime1.replace(microsecond=0) - eq(datetime1, datetime2) + assert datetime1 == datetime2 @attr(resource='object') @attr(method='head') @@ -1764,10 +1760,10 @@ def test_bucket_list_return_data(): for obj in objs_list: key_name = obj['Key'] key_data = data[key_name] - eq(obj['ETag'],key_data['ETag']) - eq(obj['Size'],key_data['ContentLength']) - eq(obj['Owner']['DisplayName'],key_data['DisplayName']) - eq(obj['Owner']['ID'],key_data['ID']) + assert obj['ETag'] == key_data['ETag'] + assert obj['Size'] == key_data['ContentLength'] + assert obj['Owner']['DisplayName'] == key_data['DisplayName'] + assert obj['Owner']['ID'] == key_data['ID'] _compare_dates(obj['LastModified'],key_data['LastModified']) @@ -1807,11 +1803,11 @@ def test_bucket_list_return_data_versioning(): for obj in objs_list: key_name = obj['Key'] key_data = data[key_name] - eq(obj['Owner']['DisplayName'],key_data['DisplayName']) - eq(obj['ETag'],key_data['ETag']) - eq(obj['Size'],key_data['ContentLength']) - eq(obj['Owner']['ID'],key_data['ID']) - eq(obj['VersionId'], key_data['VersionId']) + assert obj['Owner']['DisplayName'] == key_data['DisplayName'] + assert obj['ETag'] == key_data['ETag'] + assert obj['Size'] == key_data['ContentLength'] + assert obj['Owner']['ID'] == key_data['ID'] + assert obj['VersionId'] == key_data['VersionId'] _compare_dates(obj['LastModified'],key_data['LastModified']) @attr(resource='bucket') @@ -1851,8 +1847,8 @@ def test_bucket_list_objects_anonymous_fail(): e = assert_raises(ClientError, unauthenticated_client.list_objects, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @attr(resource='bucket') @attr(method='get') @@ -1867,8 +1863,8 @@ def test_bucket_listv2_objects_anonymous_fail(): e = assert_raises(ClientError, unauthenticated_client.list_objects_v2, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @attr(resource='bucket') @attr(method='get') @@ -1881,8 +1877,8 @@ def test_bucket_notexist(): e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='bucket') @attr(method='get') @@ -1897,8 +1893,8 @@ def test_bucketv2_notexist(): e = assert_raises(ClientError, client.list_objects_v2, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='bucket') @attr(method='delete') @@ -1911,8 +1907,8 @@ def test_bucket_delete_notexist(): e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='bucket') @attr(method='delete') @@ -1926,8 +1922,8 @@ def test_bucket_delete_nonempty(): e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 409) - eq(error_code, 'BucketNotEmpty') + assert status == 409 + assert error_code == 'BucketNotEmpty' def _do_set_bucket_canned_acl(client, bucket_name, canned_acl, i, results): try: @@ -1965,7 +1961,7 @@ def test_bucket_concurrent_set_canned_acl(): _do_wait_completion(t) for r in results: - eq(r, True) + assert r == True @attr(resource='object') @attr(method='put') @@ -1979,8 +1975,8 @@ def test_object_write_to_nonexist_bucket(): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='bucket') @@ -1995,8 +1991,8 @@ def test_bucket_create_delete(): e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='object') @attr(method='get') @@ -2009,8 +2005,8 @@ def test_object_read_not_exist(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='bar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' http_response = None @@ -2037,7 +2033,7 @@ def test_object_requestid_matches_header_on_error(): request_id = resp_body_xml.find('.//RequestId').text assert request_id is not None - eq(request_id, e.response['ResponseMetadata']['RequestId']) + assert request_id == e.response['ResponseMetadata']['RequestId'] def _make_objs_dict(key_names): objs_list = [] @@ -2056,18 +2052,18 @@ def test_multi_object_delete(): bucket_name = _create_objects(keys=key_names) client = get_client() response = client.list_objects(Bucket=bucket_name) - eq(len(response['Contents']), 3) + assert len(response['Contents']) == 3 objs_dict = _make_objs_dict(key_names=key_names) response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict) - eq(len(response['Deleted']), 3) + assert len(response['Deleted']) == 3 assert 'Errors' not in response response = client.list_objects(Bucket=bucket_name) assert 'Contents' not in response response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict) - eq(len(response['Deleted']), 3) + assert len(response['Deleted']) == 3 assert 'Errors' not in response response = client.list_objects(Bucket=bucket_name) assert 'Contents' not in response @@ -2083,18 +2079,18 @@ def test_multi_objectv2_delete(): bucket_name = _create_objects(keys=key_names) client = get_client() response = client.list_objects_v2(Bucket=bucket_name) - eq(len(response['Contents']), 3) + assert len(response['Contents']) == 3 objs_dict = _make_objs_dict(key_names=key_names) response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict) - eq(len(response['Deleted']), 3) + assert len(response['Deleted']) == 3 assert 'Errors' not in response response = client.list_objects_v2(Bucket=bucket_name) assert 'Contents' not in response response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict) - eq(len(response['Deleted']), 3) + assert len(response['Deleted']) == 3 assert 'Errors' not in response response = client.list_objects_v2(Bucket=bucket_name) assert 'Contents' not in response @@ -2113,12 +2109,12 @@ def test_multi_object_delete_key_limit(): numKeys = 0 for page in pages: numKeys += len(page['Contents']) - eq(numKeys, 1001) + assert numKeys == 1001 objs_dict = _make_objs_dict(key_names=key_names) e = assert_raises(ClientError,client.delete_objects,Bucket=bucket_name,Delete=objs_dict) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='post') @@ -2134,12 +2130,12 @@ def test_multi_objectv2_delete_key_limit(): numKeys = 0 for page in pages: numKeys += len(page['Contents']) - eq(numKeys, 1001) + assert numKeys == 1001 objs_dict = _make_objs_dict(key_names=key_names) e = assert_raises(ClientError,client.delete_objects,Bucket=bucket_name,Delete=objs_dict) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -2151,7 +2147,7 @@ def test_object_head_zero_bytes(): client.put_object(Bucket=bucket_name, Key='foo', Body='') response = client.head_object(Bucket=bucket_name, Key='foo') - eq(response['ContentLength'], 0) + assert response['ContentLength'] == 0 @attr(resource='object') @attr(method='put') @@ -2161,8 +2157,8 @@ def test_object_write_check_etag(): bucket_name = get_new_bucket() client = get_client() response = client.put_object(Bucket=bucket_name, Key='foo', Body='bar') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['ETag'], '"37b51d194a7513e45b56f6524f2d51f2"') + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['ETag'] == '"37b51d194a7513e45b56f6524f2d51f2"' @attr(resource='object') @attr(method='put') @@ -2175,7 +2171,7 @@ def test_object_write_cache_control(): client.put_object(Bucket=bucket_name, Key='foo', Body='bar', CacheControl=cache_control) response = client.head_object(Bucket=bucket_name, Key='foo') - eq(response['ResponseMetadata']['HTTPHeaders']['cache-control'], cache_control) + assert response['ResponseMetadata']['HTTPHeaders']['cache-control'] == cache_control @attr(resource='object') @attr(method='put') @@ -2212,13 +2208,13 @@ def test_object_write_read_update_read_delete(): # Read response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' # Update client.put_object(Bucket=bucket_name, Key='foo', Body='soup') # Read response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'soup') + assert body == 'soup' # Delete client.delete_object(Bucket=bucket_name, Key='foo') @@ -2245,7 +2241,7 @@ def _set_get_metadata(metadata, bucket_name=None): @attr(assertion='reread what we wrote') def test_object_set_get_metadata_none_to_good(): got = _set_get_metadata('mymeta') - eq(got, 'mymeta') + assert got == 'mymeta' @attr(resource='object.metadata') @attr(method='put') @@ -2253,7 +2249,7 @@ def test_object_set_get_metadata_none_to_good(): @attr(assertion='write empty value, returns empty value') def test_object_set_get_metadata_none_to_empty(): got = _set_get_metadata('') - eq(got, '') + assert got == '' @attr(resource='object.metadata') @attr(method='put') @@ -2262,9 +2258,9 @@ def test_object_set_get_metadata_none_to_empty(): def test_object_set_get_metadata_overwrite_to_empty(): bucket_name = get_new_bucket() got = _set_get_metadata('oldmeta', bucket_name) - eq(got, 'oldmeta') + assert got == 'oldmeta' got = _set_get_metadata('', bucket_name) - eq(got, '') + assert got == '' @attr(resource='object.metadata') @attr(method='put') @@ -2288,7 +2284,7 @@ def test_object_set_get_unicode_metadata(): got = response['Metadata']['meta1'] print(got) print(u"Hello World\xe9") - eq(got, u"Hello World\xe9") + assert got == u"Hello World\xe9" def _set_get_metadata_unreadable(metadata, bucket_name=None): """ @@ -2320,7 +2316,7 @@ def test_object_metadata_replaced_on_put(): response = client.get_object(Bucket=bucket_name, Key='foo') got = response['Metadata'] - eq(got, {}) + assert got == {} @attr(resource='object') @attr(method='put') @@ -2334,7 +2330,7 @@ def test_object_write_file(): client.put_object(Bucket=bucket_name, Key='foo', Body=data) response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' def _get_post_url(bucket_name): endpoint = get_config_endpoint() @@ -2353,10 +2349,10 @@ def test_post_object_anonymous_request(): client.create_bucket(ACL='public-read-write', Bucket=bucket_name) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='post') @@ -2395,10 +2391,10 @@ def test_post_object_authenticated_request(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='post') @@ -2436,10 +2432,10 @@ def test_post_object_authenticated_no_content_type(): ('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key="foo.txt") body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='post') @@ -2478,7 +2474,7 @@ def test_post_object_authenticated_request_bad_access_key(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -2495,9 +2491,9 @@ def test_post_object_set_success_code(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 201) + assert r.status_code == 201 message = ET.fromstring(r.content).find('Key') - eq(message.text,'foo.txt') + assert message.text == 'foo.txt' @attr(resource='object') @attr(method='post') @@ -2514,9 +2510,9 @@ def test_post_object_set_invalid_success_code(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 content = r.content.decode() - eq(content,'') + assert content == '' @attr(resource='object') @attr(method='post') @@ -2556,10 +2552,10 @@ def test_post_object_upload_larger_than_chunk(): ("Content-Type" , "text/plain"),('file', foo_string)]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, foo_string) + assert body == foo_string @attr(resource='object') @attr(method='post') @@ -2596,10 +2592,10 @@ def test_post_object_set_key_from_filename(): ("Content-Type" , "text/plain"),('file', ('foo.txt', 'bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='post') @@ -2637,7 +2633,7 @@ def test_post_object_ignored_header(): ("Content-Type" , "text/plain"),("x-ignore-foo" , "bar"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 @attr(resource='object') @attr(method='post') @@ -2676,7 +2672,7 @@ def test_post_object_case_insensitive_condition_fields(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 @attr(resource='object') @attr(method='post') @@ -2713,10 +2709,10 @@ def test_post_object_escaped_field_values(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='\$foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='post') @@ -2758,12 +2754,11 @@ def test_post_object_success_redirect_action(): ('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 200) + assert r.status_code == 200 url = r.url response = client.get_object(Bucket=bucket_name, Key='foo.txt') - eq(url, - '{rurl}?bucket={bucket}&key={key}&etag=%22{etag}%22'.format(rurl = redirect_url,\ - bucket = bucket_name, key = 'foo.txt', etag = response['ETag'].strip('"'))) + assert url == '{rurl}?bucket={bucket}&key={key}&etag=%22{etag}%22'.format(\ + rurl = redirect_url, bucket = bucket_name, key = 'foo.txt', etag = response['ETag'].strip('"')) @attr(resource='object') @attr(method='post') @@ -2800,7 +2795,7 @@ def test_post_object_invalid_signature(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -2837,7 +2832,7 @@ def test_post_object_invalid_access_key(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -2874,7 +2869,7 @@ def test_post_object_invalid_date_format(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -2910,7 +2905,7 @@ def test_post_object_no_key_specified(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -2947,7 +2942,7 @@ def test_post_object_missing_signature(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -2983,7 +2978,7 @@ def test_post_object_missing_policy_condition(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -3021,9 +3016,9 @@ def test_post_object_user_specified_header(): ("Content-Type" , "text/plain"),('x-amz-meta-foo' , 'barclamp'),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') - eq(response['Metadata']['foo'], 'barclamp') + assert response['Metadata']['foo'] == 'barclamp' @attr(resource='object') @attr(method='post') @@ -3061,7 +3056,7 @@ def test_post_object_request_missing_policy_specified_field(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -3098,7 +3093,7 @@ def test_post_object_condition_is_case_sensitive(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3135,7 +3130,7 @@ def test_post_object_expires_is_case_sensitive(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3172,7 +3167,7 @@ def test_post_object_expired_policy(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -3209,7 +3204,7 @@ def test_post_object_invalid_request_field_value(): ("Content-Type" , "text/plain"),('x-amz-meta-foo' , 'barclamp'),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 403) + assert r.status_code == 403 @attr(resource='object') @attr(method='post') @@ -3246,7 +3241,7 @@ def test_post_object_missing_expires_condition(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3275,7 +3270,7 @@ def test_post_object_missing_conditions_list(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3312,7 +3307,7 @@ def test_post_object_upload_size_limit_exceeded(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3349,7 +3344,7 @@ def test_post_object_missing_content_length_argument(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3386,7 +3381,7 @@ def test_post_object_invalid_content_length_argument(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3423,7 +3418,7 @@ def test_post_object_upload_size_below_minimum(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='post') @@ -3456,7 +3451,7 @@ def test_post_object_empty_conditions(): ("Content-Type" , "text/plain"),('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 400) + assert r.status_code == 400 @attr(resource='object') @attr(method='get') @@ -3470,7 +3465,7 @@ def test_get_object_ifmatch_good(): response = client.get_object(Bucket=bucket_name, Key='foo', IfMatch=etag) body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='get') @@ -3483,8 +3478,8 @@ def test_get_object_ifmatch_failed(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfMatch='"ABCORZ"') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' @attr(resource='object') @attr(method='get') @@ -3498,8 +3493,8 @@ def test_get_object_ifnonematch_good(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfNoneMatch=etag) status, error_code = _get_status_and_error_code(e.response) - eq(status, 304) - eq(e.response['Error']['Message'], 'Not Modified') + assert status == 304 + assert e.response['Error']['Message'] == 'Not Modified' @attr(resource='object') @attr(method='get') @@ -3512,7 +3507,7 @@ def test_get_object_ifnonematch_failed(): response = client.get_object(Bucket=bucket_name, Key='foo', IfNoneMatch='ABCORZ') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='get') @@ -3525,7 +3520,7 @@ def test_get_object_ifmodifiedsince_good(): response = client.get_object(Bucket=bucket_name, Key='foo', IfModifiedSince='Sat, 29 Oct 1994 19:43:31 GMT') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='get') @@ -3550,8 +3545,8 @@ def test_get_object_ifmodifiedsince_failed(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfModifiedSince=after_str) status, error_code = _get_status_and_error_code(e.response) - eq(status, 304) - eq(e.response['Error']['Message'], 'Not Modified') + assert status == 304 + assert e.response['Error']['Message'] == 'Not Modified' @attr(resource='object') @attr(method='get') @@ -3566,8 +3561,8 @@ def test_get_object_ifunmodifiedsince_good(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo', IfUnmodifiedSince='Sat, 29 Oct 1994 19:43:31 GMT') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' @attr(resource='object') @attr(method='get') @@ -3580,7 +3575,7 @@ def test_get_object_ifunmodifiedsince_failed(): response = client.get_object(Bucket=bucket_name, Key='foo', IfUnmodifiedSince='Sat, 29 Oct 2100 19:43:31 GMT') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @@ -3596,7 +3591,7 @@ def test_put_object_ifmatch_good(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' etag = response['ETag'].replace('"', '') @@ -3607,7 +3602,7 @@ def test_put_object_ifmatch_good(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'zar') + assert body == 'zar' @attr(resource='object') @attr(method='get') @@ -3621,7 +3616,7 @@ def test_put_object_ifmatch_failed(): client.put_object(Bucket=bucket_name, Key='foo', Body='bar') response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' # pass in custom header 'If-Match' before PutObject call lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-Match': '"ABCORZ"'})) @@ -3629,12 +3624,12 @@ def test_put_object_ifmatch_failed(): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='zar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='put') @@ -3648,7 +3643,7 @@ def test_put_object_ifmatch_overwrite_existed_good(): client.put_object(Bucket=bucket_name, Key='foo', Body='bar') response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-Match': '*'})) client.meta.events.register('before-call.s3.PutObject', lf) @@ -3656,7 +3651,7 @@ def test_put_object_ifmatch_overwrite_existed_good(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'zar') + assert body == 'zar' @attr(resource='object') @attr(method='put') @@ -3674,13 +3669,13 @@ def test_put_object_ifmatch_nonexisted_failed(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='bar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' @attr(resource='object') @attr(method='put') @@ -3694,7 +3689,7 @@ def test_put_object_ifnonmatch_good(): client.put_object(Bucket=bucket_name, Key='foo', Body='bar') response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-None-Match': 'ABCORZ'})) client.meta.events.register('before-call.s3.PutObject', lf) @@ -3702,7 +3697,7 @@ def test_put_object_ifnonmatch_good(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'zar') + assert body == 'zar' @attr(resource='object') @attr(method='put') @@ -3719,7 +3714,7 @@ def test_put_object_ifnonmatch_failed(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' etag = response['ETag'].replace('"', '') @@ -3728,12 +3723,12 @@ def test_put_object_ifnonmatch_failed(): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='zar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='put') @@ -3751,7 +3746,7 @@ def test_put_object_ifnonmatch_nonexisted_good(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='put') @@ -3768,19 +3763,19 @@ def test_put_object_ifnonmatch_overwrite_existed_failed(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' lf = (lambda **kwargs: kwargs['params']['headers'].update({'If-None-Match': '*'})) client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo', Body='zar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' def _setup_bucket_object_acl(bucket_acl, object_acl): """ @@ -3813,7 +3808,7 @@ def test_object_raw_get(): unauthenticated_client = get_unauthenticated_client() response = unauthenticated_client.get_object(Bucket=bucket_name, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='get') @@ -3830,8 +3825,8 @@ def test_object_raw_get_bucket_gone(): e = assert_raises(ClientError, unauthenticated_client.get_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='object') @attr(method='get') @@ -3848,8 +3843,8 @@ def test_object_delete_key_bucket_gone(): e = assert_raises(ClientError, unauthenticated_client.delete_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='object') @attr(method='get') @@ -3865,8 +3860,8 @@ def test_object_raw_get_object_gone(): e = assert_raises(ClientError, unauthenticated_client.get_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' @attr(resource='bucket') @attr(method='head') @@ -3877,7 +3872,7 @@ def test_bucket_head(): client = get_client() response = client.head_bucket(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='head') @@ -3890,11 +3885,11 @@ def test_bucket_head_notexist(): e = assert_raises(ClientError, client.head_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) + assert status == 404 # n.b., RGW does not send a response document for this operation, # which seems consistent with # https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html - #eq(error_code, 'NoSuchKey') + #assert error_code == 'NoSuchKey' @attr('fails_on_aws') @pytest.mark.fails_on_aws @@ -3909,14 +3904,14 @@ def test_bucket_head_extended(): client = get_client() response = client.head_bucket(Bucket=bucket_name) - eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']), 0) - eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']), 0) + assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']) == 0 + assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']) == 0 _create_objects(bucket_name=bucket_name, keys=['foo','bar','baz']) response = client.head_bucket(Bucket=bucket_name) - eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']), 3) - eq(int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']), 9) + assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']) == 3 + assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']) == 9 @attr(resource='bucket.acl') @attr(method='get') @@ -3927,7 +3922,7 @@ def test_object_raw_get_bucket_acl(): unauthenticated_client = get_unauthenticated_client() response = unauthenticated_client.get_object(Bucket=bucket_name, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object.acl') @attr(method='get') @@ -3939,8 +3934,8 @@ def test_object_raw_get_object_acl(): unauthenticated_client = get_unauthenticated_client() e = assert_raises(ClientError, unauthenticated_client.get_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @attr(resource='object') @attr(method='ACLs') @@ -3951,7 +3946,7 @@ def test_object_raw_authenticated(): client = get_client() response = client.get_object(Bucket=bucket_name, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='get') @@ -3963,12 +3958,12 @@ def test_object_raw_response_headers(): client = get_client() response = client.get_object(Bucket=bucket_name, Key='foo', ResponseCacheControl='no-cache', ResponseContentDisposition='bla', ResponseContentEncoding='aaa', ResponseContentLanguage='esperanto', ResponseContentType='foo/bar', ResponseExpires='123') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], 'foo/bar') - eq(response['ResponseMetadata']['HTTPHeaders']['content-disposition'], 'bla') - eq(response['ResponseMetadata']['HTTPHeaders']['content-language'], 'esperanto') - eq(response['ResponseMetadata']['HTTPHeaders']['content-encoding'], 'aaa') - eq(response['ResponseMetadata']['HTTPHeaders']['cache-control'], 'no-cache') + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == 'foo/bar' + assert response['ResponseMetadata']['HTTPHeaders']['content-disposition'] == 'bla' + assert response['ResponseMetadata']['HTTPHeaders']['content-language'] == 'esperanto' + assert response['ResponseMetadata']['HTTPHeaders']['content-encoding'] == 'aaa' + assert response['ResponseMetadata']['HTTPHeaders']['cache-control'] == 'no-cache' @attr(resource='object') @attr(method='ACLs') @@ -3979,7 +3974,7 @@ def test_object_raw_authenticated_bucket_acl(): client = get_client() response = client.get_object(Bucket=bucket_name, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='ACLs') @@ -3990,7 +3985,7 @@ def test_object_raw_authenticated_object_acl(): client = get_client() response = client.get_object(Bucket=bucket_name, Key='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='get') @@ -4005,8 +4000,8 @@ def test_object_raw_authenticated_bucket_gone(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='object') @attr(method='get') @@ -4020,8 +4015,8 @@ def test_object_raw_authenticated_object_gone(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' @attr(resource='object') @attr(method='get') @@ -4035,7 +4030,7 @@ def test_object_raw_get_x_amz_expires_not_expired(): url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=100000, HttpMethod='GET') res = requests.get(url, verify=get_config_ssl_verify()).__dict__ - eq(res['status_code'], 200) + assert res['status_code'] == 200 @attr(resource='object') @attr(method='get') @@ -4049,7 +4044,7 @@ def test_object_raw_get_x_amz_expires_out_range_zero(): url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=0, HttpMethod='GET') res = requests.get(url, verify=get_config_ssl_verify()).__dict__ - eq(res['status_code'], 403) + assert res['status_code'] == 403 @attr(resource='object') @attr(method='get') @@ -4063,7 +4058,7 @@ def test_object_raw_get_x_amz_expires_out_max_range(): url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=609901, HttpMethod='GET') res = requests.get(url, verify=get_config_ssl_verify()).__dict__ - eq(res['status_code'], 403) + assert res['status_code'] == 403 @attr(resource='object') @attr(method='get') @@ -4077,7 +4072,7 @@ def test_object_raw_get_x_amz_expires_out_positive_range(): url = client.generate_presigned_url(ClientMethod='get_object', Params=params, ExpiresIn=-7, HttpMethod='GET') res = requests.get(url, verify=get_config_ssl_verify()).__dict__ - eq(res['status_code'], 403) + assert res['status_code'] == 403 @attr(resource='object') @@ -4094,8 +4089,8 @@ def test_object_anon_put(): e = assert_raises(ClientError, unauthenticated_client.put_object, Bucket=bucket_name, Key='foo', Body='foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @attr(resource='object') @attr(method='put') @@ -4109,7 +4104,7 @@ def test_object_anon_put_write_access(): unauthenticated_client = get_unauthenticated_client() response = unauthenticated_client.put_object(Bucket=bucket_name, Key='foo', Body='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='put') @@ -4120,7 +4115,7 @@ def test_object_put_authenticated(): client = get_client() response = client.put_object(Bucket=bucket_name, Key='foo', Body='foo') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='put') @@ -4136,7 +4131,7 @@ def test_object_raw_put_authenticated_expired(): # params wouldn't take a 'Body' parameter so we're passing it in here res = requests.put(url, data="foo", verify=get_config_ssl_verify()).__dict__ - eq(res['status_code'], 403) + assert res['status_code'] == 403 def check_bad_bucket_name(bucket_name): """ @@ -4146,8 +4141,8 @@ def check_bad_bucket_name(bucket_name): client = get_client() e = assert_raises(ClientError, client.create_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidBucketName') + assert status == 400 + assert error_code == 'InvalidBucketName' # AWS does not enforce all documented bucket restrictions. @@ -4216,7 +4211,7 @@ def check_good_bucket_name(name, _prefix=None): ) client = get_client() response = client.create_bucket(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 def _test_bucket_create_naming_good_long(length): """ @@ -4240,7 +4235,7 @@ def _test_bucket_create_naming_good_long(length): ) client = get_client() response = client.create_bucket(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 # Breaks DNS with SubdomainCallingFormat @attr('fails_with_subdomain') @@ -4310,7 +4305,7 @@ def test_bucket_list_long_name(): ) bucket = get_new_bucket_resource(name=bucket_name) is_empty = _bucket_is_empty(bucket) - eq(is_empty, True) + assert is_empty == True # AWS does not enforce all documented bucket restrictions. # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html @@ -4334,8 +4329,8 @@ def test_bucket_create_naming_bad_ip(): def test_bucket_create_naming_dns_underscore(): invalid_bucketname = 'foo_bar' status, error_code = check_invalid_bucketname(invalid_bucketname) - eq(status, 400) - eq(error_code, 'InvalidBucketName') + assert status == 400 + assert error_code == 'InvalidBucketName' # Breaks DNS with SubdomainCallingFormat @attr('fails_with_subdomain') @@ -4363,8 +4358,8 @@ def test_bucket_create_naming_dns_long(): def test_bucket_create_naming_dns_dash_at_end(): invalid_bucketname = 'foo-' status, error_code = check_invalid_bucketname(invalid_bucketname) - eq(status, 400) - eq(error_code, 'InvalidBucketName') + assert status == 400 + assert error_code == 'InvalidBucketName' # Breaks DNS with SubdomainCallingFormat @@ -4379,8 +4374,8 @@ def test_bucket_create_naming_dns_dash_at_end(): def test_bucket_create_naming_dns_dot_dot(): invalid_bucketname = 'foo..bar' status, error_code = check_invalid_bucketname(invalid_bucketname) - eq(status, 400) - eq(error_code, 'InvalidBucketName') + assert status == 400 + assert error_code == 'InvalidBucketName' # Breaks DNS with SubdomainCallingFormat @@ -4395,8 +4390,8 @@ def test_bucket_create_naming_dns_dot_dot(): def test_bucket_create_naming_dns_dot_dash(): invalid_bucketname = 'foo.-bar' status, error_code = check_invalid_bucketname(invalid_bucketname) - eq(status, 400) - eq(error_code, 'InvalidBucketName') + assert status == 400 + assert error_code == 'InvalidBucketName' # Breaks DNS with SubdomainCallingFormat @@ -4411,8 +4406,8 @@ def test_bucket_create_naming_dns_dot_dash(): def test_bucket_create_naming_dns_dash_dot(): invalid_bucketname = 'foo-.bar' status, error_code = check_invalid_bucketname(invalid_bucketname) - eq(status, 400) - eq(error_code, 'InvalidBucketName') + assert status == 400 + assert error_code == 'InvalidBucketName' @attr(resource='bucket') @attr(method='put') @@ -4428,8 +4423,8 @@ def test_bucket_create_exists(): response = client.create_bucket(Bucket=bucket_name) except ClientError as e: status, error_code = _get_status_and_error_code(e.response) - eq(e.status, 409) - eq(e.error_code, 'BucketAlreadyOwnedByYou') + assert e.status == 409 + assert e.error_code == 'BucketAlreadyOwnedByYou' @attr(resource='bucket') @attr(method='get') @@ -4448,7 +4443,7 @@ def test_bucket_get_location(): response = client.get_bucket_location(Bucket=bucket_name) if location_constraint == "": location_constraint = None - eq(response['LocationConstraint'], location_constraint) + assert response['LocationConstraint'] == location_constraint @attr(resource='bucket') @attr(method='put') @@ -4467,8 +4462,8 @@ def test_bucket_create_exists_nonowner(): client.create_bucket(Bucket=bucket_name) e = assert_raises(ClientError, alt_client.create_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 409) - eq(error_code, 'BucketAlreadyExists') + assert status == 409 + assert error_code == 'BucketAlreadyExists' @attr(resource='bucket') @attr(method='put') @@ -4483,8 +4478,8 @@ def test_bucket_recreate_overwrite_acl(): client.create_bucket(Bucket=bucket_name, ACL='public-read') e = assert_raises(ClientError, client.create_bucket, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 409) - eq(error_code, 'BucketAlreadyExists') + assert status == 409 + assert error_code == 'BucketAlreadyExists' @attr(resource='bucket') @attr(method='put') @@ -4499,13 +4494,13 @@ def test_bucket_recreate_new_acl(): client.create_bucket(Bucket=bucket_name) e = assert_raises(ClientError, client.create_bucket, Bucket=bucket_name, ACL='public-read') status, error_code = _get_status_and_error_code(e.response) - eq(status, 409) - eq(error_code, 'BucketAlreadyExists') + assert status == 409 + assert error_code == 'BucketAlreadyExists' def check_access_denied(fn, *args, **kwargs): e = assert_raises(ClientError, fn, *args, **kwargs) status = _get_status(e.response) - eq(status, 403) + assert status == 403 def check_grants(got, want): @@ -4513,7 +4508,7 @@ def check_grants(got, want): Check that grants list in got matches the dictionaries in want, in any order. """ - eq(len(got), len(want)) + assert len(got) == len(want) # There are instances when got does not match due the order of item. if got[0]["Grantee"].get("DisplayName"): @@ -4523,13 +4518,13 @@ def check_grants(got, want): for g, w in zip(got, want): w = dict(w) g = dict(g) - eq(g.pop('Permission', None), w['Permission']) - eq(g['Grantee'].pop('DisplayName', None), w['DisplayName']) - eq(g['Grantee'].pop('ID', None), w['ID']) - eq(g['Grantee'].pop('Type', None), w['Type']) - eq(g['Grantee'].pop('URI', None), w['URI']) - eq(g['Grantee'].pop('EmailAddress', None), w['EmailAddress']) - eq(g, {'Grantee': {}}) + assert g.pop('Permission', None) == w['Permission'] + assert g['Grantee'].pop('DisplayName', None) == w['DisplayName'] + assert g['Grantee'].pop('ID', None) == w['ID'] + assert g['Grantee'].pop('Type', None) == w['Type'] + assert g['Grantee'].pop('URI', None) == w['URI'] + assert g['Grantee'].pop('EmailAddress', None) == w['EmailAddress'] + assert g == {'Grantee': {}} @attr(resource='bucket') @@ -4545,8 +4540,8 @@ def test_bucket_acl_default(): display_name = get_main_display_name() user_id = get_main_user_id() - eq(response['Owner']['DisplayName'], display_name) - eq(response['Owner']['ID'], user_id) + assert response['Owner']['DisplayName'] == display_name + assert response['Owner']['ID'] == user_id grants = response['Grants'] check_grants( @@ -5063,7 +5058,7 @@ def test_object_acl_full_control_verify_owner(): alt_client.put_object_acl(Bucket=bucket_name, Key='foo', AccessControlPolicy=grant) response = alt_client.get_object_acl(Bucket=bucket_name, Key='foo') - eq(response['Owner']['ID'], main_user_id) + assert response['Owner']['ID'] == main_user_id def add_obj_user_grant(bucket_name, key, grant): """ @@ -5118,8 +5113,8 @@ def test_object_acl_full_control_verify_attributes(): main_client.put_object_acl(Bucket=bucket_name, Key='foo', AccessControlPolicy=grants) response = main_client.get_object(Bucket=bucket_name, Key='foo') - eq(content_type, response['ContentType']) - eq(etag, response['ETag']) + assert content_type == response['ContentType'] + assert etag == response['ETag'] @attr(resource='bucket') @attr(method='ACLs') @@ -5130,7 +5125,7 @@ def test_bucket_acl_canned_private_to_private(): client = get_client() response = client.put_bucket_acl(Bucket=bucket_name, ACL='private') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 def add_bucket_user_grant(bucket_name, grant): """ @@ -5372,8 +5367,8 @@ def test_bucket_acl_grant_userid_fullcontrol(): main_display_name = get_main_display_name() main_user_id = get_main_user_id() - eq(owner_id, main_user_id) - eq(owner_display_name, main_display_name) + assert owner_id == main_user_id + assert owner_display_name == main_display_name @attr(resource='bucket') @attr(method='ACLs') @@ -5465,8 +5460,8 @@ def test_bucket_acl_grant_nonexist_user(): e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, AccessControlPolicy=grant) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='bucket') @attr(method='ACLs') @@ -5753,8 +5748,8 @@ def test_bucket_acl_grant_email_not_exist(): e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name, AccessControlPolicy = grant) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'UnresolvableGrantByEmailAddress') + assert status == 400 + assert error_code == 'UnresolvableGrantByEmailAddress' @attr(resource='bucket') @attr(method='ACLs') @@ -5778,7 +5773,7 @@ def test_bucket_acl_revoke_all(): response = client.get_bucket_acl(Bucket=bucket_name) - eq(len(response['Grants']), 0) + assert len(response['Grants']) == 0 # set policy back to original so that bucket can be cleaned up policy['Grants'] = old_grants @@ -5926,7 +5921,7 @@ def test_access_bucket_private_object_publicread(): body = _get_body(response) # a should be public-read, b gets default (private) - eq(body, 'foocontent') + assert body == 'foocontent' check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite') alt_client2 = get_alt_client() @@ -5952,7 +5947,7 @@ def test_access_bucket_private_objectv2_publicread(): body = _get_body(response) # a should be public-read, b gets default (private) - eq(body, 'foocontent') + assert body == 'foocontent' check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite') alt_client2 = get_alt_client() @@ -5976,7 +5971,7 @@ def test_access_bucket_private_object_publicreadwrite(): # a should be public-read-only ... because it is in a private bucket # b gets default (private) - eq(body, 'foocontent') + assert body == 'foocontent' check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite') alt_client2 = get_alt_client() @@ -6002,7 +5997,7 @@ def test_access_bucket_private_objectv2_publicreadwrite(): # a should be public-read-only ... because it is in a private bucket # b gets default (private) - eq(body, 'foocontent') + assert body == 'foocontent' check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite') alt_client2 = get_alt_client() @@ -6033,7 +6028,7 @@ def test_access_bucket_publicread_object_private(): objs = get_objects_list(bucket=bucket_name, client=alt_client3) - eq(objs, ['bar', 'foo']) + assert objs == ['bar', 'foo'] check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') @attr(resource='object') @@ -6048,7 +6043,7 @@ def test_access_bucket_publicread_object_publicread(): # a should be public-read, b gets default (private) body = _get_body(response) - eq(body, 'foocontent') + assert body == 'foocontent' check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite') @@ -6060,7 +6055,7 @@ def test_access_bucket_publicread_object_publicread(): objs = get_objects_list(bucket=bucket_name, client=alt_client3) - eq(objs, ['bar', 'foo']) + assert objs == ['bar', 'foo'] check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') @@ -6078,7 +6073,7 @@ def test_access_bucket_publicread_object_publicreadwrite(): # a should be public-read-only ... because it is in a r/o bucket # b gets default (private) - eq(body, 'foocontent') + assert body == 'foocontent' check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='foooverwrite') @@ -6090,7 +6085,7 @@ def test_access_bucket_publicread_object_publicreadwrite(): objs = get_objects_list(bucket=bucket_name, client=alt_client3) - eq(objs, ['bar', 'foo']) + assert objs == ['bar', 'foo'] check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') @@ -6110,7 +6105,7 @@ def test_access_bucket_publicreadwrite_object_private(): alt_client.put_object(Bucket=bucket_name, Key=key2, Body='baroverwrite') objs = get_objects_list(bucket=bucket_name, client=alt_client) - eq(objs, ['bar', 'foo']) + assert objs == ['bar', 'foo'] alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent') @attr(resource='object') @@ -6125,14 +6120,14 @@ def test_access_bucket_publicreadwrite_object_publicread(): response = alt_client.get_object(Bucket=bucket_name, Key=key1) body = _get_body(response) - eq(body, 'foocontent') + assert body == 'foocontent' alt_client.put_object(Bucket=bucket_name, Key=key1, Body='barcontent') check_access_denied(alt_client.get_object, Bucket=bucket_name, Key=key2) alt_client.put_object(Bucket=bucket_name, Key=key2, Body='baroverwrite') objs = get_objects_list(bucket=bucket_name, client=alt_client) - eq(objs, ['bar', 'foo']) + assert objs == ['bar', 'foo'] alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent') @attr(resource='object') @@ -6146,12 +6141,12 @@ def test_access_bucket_publicreadwrite_object_publicreadwrite(): body = _get_body(response) # a should be public-read-write, b gets default (private) - eq(body, 'foocontent') + assert body == 'foocontent' alt_client.put_object(Bucket=bucket_name, Key=key1, Body='foooverwrite') check_access_denied(alt_client.get_object, Bucket=bucket_name, Key=key2) alt_client.put_object(Bucket=bucket_name, Key=key2, Body='baroverwrite') objs = get_objects_list(bucket=bucket_name, client=alt_client) - eq(objs, ['bar', 'foo']) + assert objs == ['bar', 'foo'] alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent') @attr(resource='bucket') @@ -6209,7 +6204,7 @@ def test_list_buckets_anonymous(): # allowing us to vary the calling format in testing. unauthenticated_client = get_unauthenticated_client() response = unauthenticated_client.list_buckets() - eq(len(response['Buckets']), 0) + assert len(response['Buckets']) == 0 @attr(resource='bucket') @attr(method='get') @@ -6219,8 +6214,8 @@ def test_list_buckets_invalid_auth(): bad_auth_client = get_bad_auth_client() e = assert_raises(ClientError, bad_auth_client.list_buckets) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'InvalidAccessKeyId') + assert status == 403 + assert error_code == 'InvalidAccessKeyId' @attr(resource='bucket') @attr(method='get') @@ -6231,8 +6226,8 @@ def test_list_buckets_bad_auth(): bad_auth_client = get_bad_auth_client(aws_access_key_id=main_access_key) e = assert_raises(ClientError, bad_auth_client.list_buckets) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'SignatureDoesNotMatch') + assert status == 403 + assert error_code == 'SignatureDoesNotMatch' @pytest.fixture def override_prefix_a(): @@ -6287,13 +6282,13 @@ def test_bucket_recreate_not_overriding(): bucket_name = _create_objects(keys=key_names) objs_list = get_objects_list(bucket_name) - eq(key_names, objs_list) + assert key_names == objs_list client = get_client() client.create_bucket(Bucket=bucket_name) objs_list = get_objects_list(bucket_name) - eq(key_names, objs_list) + assert key_names == objs_list @attr(resource='object') @attr(method='put') @@ -6320,15 +6315,15 @@ def test_bucket_create_special_key_names(): bucket_name = _create_objects(keys=key_names) objs_list = get_objects_list(bucket_name) - eq(key_names, objs_list) + assert key_names == objs_list client = get_client() for name in key_names: - eq((name in objs_list), True) + assert name in objs_list response = client.get_object(Bucket=bucket_name, Key=name) body = _get_body(response) - eq(name, body) + assert name == body client.put_object_acl(Bucket=bucket_name, Key=name, ACL='private') @attr(resource='bucket') @@ -6341,10 +6336,10 @@ def test_bucket_list_special_prefix(): objs_list = get_objects_list(bucket_name) - eq(len(objs_list), 5) + assert len(objs_list) == 5 objs_list = get_objects_list(bucket_name, prefix='_bla/') - eq(len(objs_list), 4) + assert len(objs_list) == 4 @attr(resource='object') @attr(method='put') @@ -6363,7 +6358,7 @@ def test_object_copy_zero_size(): client.copy(copy_source, bucket_name, 'bar321foo') response = client.get_object(Bucket=bucket_name, Key='bar321foo') - eq(response['ContentLength'], 0) + assert response['ContentLength'] == 0 @attr(resource='object') @attr(method='put') @@ -6381,7 +6376,7 @@ def test_object_copy_16m(): key2 = 'obj2' client.copy_object(Bucket=bucket_name, Key=key2, CopySource=copy_source) response = client.get_object(Bucket=bucket_name, Key=key2) - eq(response['ContentLength'], 16*1024*1024) + assert response['ContentLength'] == 16*1024*1024 @attr(resource='object') @attr(method='put') @@ -6400,7 +6395,7 @@ def test_object_copy_same_bucket(): response = client.get_object(Bucket=bucket_name, Key='bar321foo') body = _get_body(response) - eq('foo', body) + assert 'foo' == body @attr(resource='object') @attr(method='put') @@ -6421,9 +6416,9 @@ def test_object_copy_verify_contenttype(): response = client.get_object(Bucket=bucket_name, Key='bar321foo') body = _get_body(response) - eq('foo', body) + assert 'foo' == body response_content_type = response['ContentType'] - eq(response_content_type, content_type) + assert response_content_type == content_type @attr(resource='object') @attr(method='put') @@ -6438,8 +6433,8 @@ def test_object_copy_to_itself(): e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'foo123bar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRequest') + assert status == 400 + assert error_code == 'InvalidRequest' @attr(resource='object') @attr(method='put') @@ -6456,7 +6451,7 @@ def test_object_copy_to_itself_with_metadata(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key='foo123bar', Metadata=metadata, MetadataDirective='REPLACE') response = client.get_object(Bucket=bucket_name, Key='foo123bar') - eq(response['Metadata'], metadata) + assert response['Metadata'] == metadata @attr(resource='object') @attr(method='put') @@ -6477,7 +6472,7 @@ def test_object_copy_diff_bucket(): response = client.get_object(Bucket=bucket_name2, Key='bar321foo') body = _get_body(response) - eq('foo', body) + assert 'foo' == body @attr(resource='object') @attr(method='put') @@ -6497,7 +6492,7 @@ def test_object_copy_not_owned_bucket(): e = assert_raises(ClientError, alt_client.copy, copy_source, bucket_name2, 'bar321foo') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='object') @attr(method='put') @@ -6567,10 +6562,10 @@ def test_object_copy_retaining_metadata(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key='bar321foo') response = client.get_object(Bucket=bucket_name, Key='bar321foo') - eq(content_type, response['ContentType']) - eq(metadata, response['Metadata']) + assert content_type == response['ContentType'] + assert metadata == response['Metadata'] body = _get_body(response) - eq(size, response['ContentLength']) + assert size == response['ContentLength'] @attr(resource='object') @attr(method='put') @@ -6593,9 +6588,9 @@ def test_object_copy_replacing_metadata(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key='bar321foo', Metadata=metadata, MetadataDirective='REPLACE', ContentType=content_type) response = client.get_object(Bucket=bucket_name, Key='bar321foo') - eq(content_type, response['ContentType']) - eq(metadata, response['Metadata']) - eq(size, response['ContentLength']) + assert content_type == response['ContentType'] + assert metadata == response['Metadata'] + assert size == response['ContentLength'] @attr(resource='object') @attr(method='put') @@ -6607,7 +6602,7 @@ def test_object_copy_bucket_not_found(): copy_source = {'Bucket': bucket_name + "-fake", 'Key': 'foo123bar'} e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'bar321foo') status = _get_status(e.response) - eq(status, 404) + assert status == 404 @attr(resource='object') @attr(method='put') @@ -6619,7 +6614,7 @@ def test_object_copy_key_not_found(): copy_source = {'Bucket': bucket_name, 'Key': 'foo123bar'} e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'bar321foo') status = _get_status(e.response) - eq(status, 404) + assert status == 404 @attr(resource='object') @attr(method='put') @@ -6647,8 +6642,8 @@ def test_object_copy_versioned_bucket(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key2) response = client.get_object(Bucket=bucket_name, Key=key2) body = _get_body(response) - eq(data_str, body) - eq(size, response['ContentLength']) + assert data_str == body + assert size == response['ContentLength'] # second copy @@ -6658,8 +6653,8 @@ def test_object_copy_versioned_bucket(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key3) response = client.get_object(Bucket=bucket_name, Key=key3) body = _get_body(response) - eq(data_str, body) - eq(size, response['ContentLength']) + assert data_str == body + assert size == response['ContentLength'] # copy to another versioned bucket bucket_name2 = get_new_bucket() @@ -6669,8 +6664,8 @@ def test_object_copy_versioned_bucket(): client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key=key4) response = client.get_object(Bucket=bucket_name2, Key=key4) body = _get_body(response) - eq(data_str, body) - eq(size, response['ContentLength']) + assert data_str == body + assert size == response['ContentLength'] # copy to another non versioned bucket bucket_name3 = get_new_bucket() @@ -6679,8 +6674,8 @@ def test_object_copy_versioned_bucket(): client.copy_object(Bucket=bucket_name3, CopySource=copy_source, Key=key5) response = client.get_object(Bucket=bucket_name3, Key=key5) body = _get_body(response) - eq(data_str, body) - eq(size, response['ContentLength']) + assert data_str == body + assert size == response['ContentLength'] # copy from a non versioned bucket copy_source = {'Bucket': bucket_name3, 'Key': key5} @@ -6688,8 +6683,8 @@ def test_object_copy_versioned_bucket(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key6) response = client.get_object(Bucket=bucket_name, Key=key6) body = _get_body(response) - eq(data_str, body) - eq(size, response['ContentLength']) + assert data_str == body + assert size == response['ContentLength'] @attr(resource='object') @attr(method='put') @@ -6790,10 +6785,10 @@ def test_object_copy_versioning_multipart_upload(): response = client.get_object(Bucket=bucket_name, Key=key2) version_id2 = response['VersionId'] body = _get_body(response) - eq(data, body) - eq(key1_size, response['ContentLength']) - eq(key1_metadata, response['Metadata']) - eq(content_type, response['ContentType']) + assert data == body + assert key1_size == response['ContentLength'] + assert key1_metadata == response['Metadata'] + assert content_type == response['ContentType'] # second copy copy_source = {'Bucket': bucket_name, 'Key': key2, 'VersionId': version_id2} @@ -6801,10 +6796,10 @@ def test_object_copy_versioning_multipart_upload(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=key3) response = client.get_object(Bucket=bucket_name, Key=key3) body = _get_body(response) - eq(data, body) - eq(key1_size, response['ContentLength']) - eq(key1_metadata, response['Metadata']) - eq(content_type, response['ContentType']) + assert data == body + assert key1_size == response['ContentLength'] + assert key1_metadata == response['Metadata'] + assert content_type == response['ContentType'] # copy to another versioned bucket bucket_name2 = get_new_bucket() @@ -6815,10 +6810,10 @@ def test_object_copy_versioning_multipart_upload(): client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key=key4) response = client.get_object(Bucket=bucket_name2, Key=key4) body = _get_body(response) - eq(data, body) - eq(key1_size, response['ContentLength']) - eq(key1_metadata, response['Metadata']) - eq(content_type, response['ContentType']) + assert data == body + assert key1_size == response['ContentLength'] + assert key1_metadata == response['Metadata'] + assert content_type == response['ContentType'] # copy to another non versioned bucket bucket_name3 = get_new_bucket() @@ -6827,10 +6822,10 @@ def test_object_copy_versioning_multipart_upload(): client.copy_object(Bucket=bucket_name3, CopySource=copy_source, Key=key5) response = client.get_object(Bucket=bucket_name3, Key=key5) body = _get_body(response) - eq(data, body) - eq(key1_size, response['ContentLength']) - eq(key1_metadata, response['Metadata']) - eq(content_type, response['ContentType']) + assert data == body + assert key1_size == response['ContentLength'] + assert key1_metadata == response['Metadata'] + assert content_type == response['ContentType'] # copy from a non versioned bucket copy_source = {'Bucket': bucket_name3, 'Key': key5} @@ -6838,10 +6833,10 @@ def test_object_copy_versioning_multipart_upload(): client.copy_object(Bucket=bucket_name3, CopySource=copy_source, Key=key6) response = client.get_object(Bucket=bucket_name3, Key=key6) body = _get_body(response) - eq(data, body) - eq(key1_size, response['ContentLength']) - eq(key1_metadata, response['Metadata']) - eq(content_type, response['ContentType']) + assert data == body + assert key1_size == response['ContentLength'] + assert key1_metadata == response['Metadata'] + assert content_type == response['ContentType'] @attr(resource='object') @attr(method='put') @@ -6855,8 +6850,8 @@ def test_multipart_upload_empty(): (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key1, size=objlen) e = assert_raises(ClientError, client.complete_multipart_upload,Bucket=bucket_name, Key=key1, UploadId=upload_id) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' @attr(resource='object') @attr(method='put') @@ -6872,7 +6867,7 @@ def test_multipart_upload_small(): (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key1, size=objlen) response = client.complete_multipart_upload(Bucket=bucket_name, Key=key1, UploadId=upload_id, MultipartUpload={'Parts': parts}) response = client.get_object(Bucket=bucket_name, Key=key1) - eq(response['ContentLength'], objlen) + assert response['ContentLength'] == objlen # check extra client.complete_multipart_upload response = client.complete_multipart_upload(Bucket=bucket_name, Key=key1, UploadId=upload_id, MultipartUpload={'Parts': parts}) @@ -6935,7 +6930,7 @@ def _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name, ver else: response = client.get_object(Bucket=src_bucket_name, Key=src_key, Range=r, VersionId=version_id) src_data = _get_body(response) - eq(src_data, dest_data) + assert src_data == dest_data @attr(resource='object') @attr(method='put') @@ -6955,7 +6950,7 @@ def test_multipart_copy_small(): client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts}) response = client.get_object(Bucket=dest_bucket_name, Key=dest_key) - eq(size, response['ContentLength']) + assert size == response['ContentLength'] _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name) @attr(resource='object') @@ -6977,7 +6972,7 @@ def test_multipart_copy_invalid_range(): valid_status = [400, 416] if not status in valid_status: raise AssertionError("Invalid response " + str(status)) - eq(error_code, 'InvalidRange') + assert error_code == 'InvalidRange' @attr(resource='object') @@ -7011,8 +7006,8 @@ def test_multipart_copy_improper_range(): CopySourceRange=test_range, PartNumber=1) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='object') @@ -7040,7 +7035,7 @@ def test_multipart_copy_without_range(): client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts}) response = client.get_object(Bucket=dest_bucket_name, Key=dest_key) - eq(response['ContentLength'], 10) + assert response['ContentLength'] == 10 _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name) @attr(resource='object') @@ -7062,7 +7057,7 @@ def test_multipart_copy_special_names(): (upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size) response = client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts}) response = client.get_object(Bucket=dest_bucket_name, Key=dest_key) - eq(size, response['ContentLength']) + assert size == response['ContentLength'] _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name) def _check_content_using_range(key, bucket_name, data, step): @@ -7077,9 +7072,9 @@ def _check_content_using_range(key, bucket_name, data, step): end = ofs + toread - 1 r = 'bytes={s}-{e}'.format(s=ofs, e=end) response = client.get_object(Bucket=bucket_name, Key=key, Range=r) - eq(response['ContentLength'], toread) + assert response['ContentLength'] == toread body = _get_body(response) - eq(body, data[ofs:end+1]) + assert body == data[ofs:end+1] @attr(resource='object') @attr(method='put') @@ -7104,17 +7099,17 @@ def test_multipart_upload(): response = client.head_bucket(Bucket=bucket_name) rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen)) - eq(rgw_bytes_used, objlen) + assert rgw_bytes_used == objlen rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1)) - eq(rgw_object_count, 1) + assert rgw_object_count == 1 response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['ContentType'], content_type) - eq(response['Metadata'], metadata) + assert response['ContentType'] == content_type + assert response['Metadata'] == metadata body = _get_body(response) - eq(len(body), response['ContentLength']) - eq(body, data) + assert len(body) == response['ContentLength'] + assert body == data _check_content_using_range(key, bucket_name, data, 1000000) _check_content_using_range(key, bucket_name, data, 10000000) @@ -7124,9 +7119,9 @@ def check_versioning(bucket_name, status): try: response = client.get_bucket_versioning(Bucket=bucket_name) - eq(response['Status'], status) + assert response['Status'] == status except KeyError: - eq(status, None) + assert status == None # amazon is eventual consistent, retry a bit if failed def check_configure_versioning_retry(bucket_name, status, expected_string): @@ -7147,7 +7142,7 @@ def check_configure_versioning_retry(bucket_name, status, expected_string): time.sleep(1) - eq(expected_string, read_status) + assert expected_string == read_status @attr(resource='object') @attr(method='put') @@ -7180,7 +7175,7 @@ def test_multipart_copy_versioned(): (upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size, version_id=vid) response = client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts}) response = client.get_object(Bucket=dest_bucket_name, Key=dest_key) - eq(size, response['ContentLength']) + assert size == response['ContentLength'] _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name, version_id=vid) def _check_upload_multipart_resend(bucket_name, key, objlen, resend_parts): @@ -7191,11 +7186,11 @@ def _check_upload_multipart_resend(bucket_name, key, objlen, resend_parts): client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['ContentType'], content_type) - eq(response['Metadata'], metadata) + assert response['ContentType'] == content_type + assert response['Metadata'] == metadata body = _get_body(response) - eq(len(body), response['ContentLength']) - eq(body, data) + assert len(body) == response['ContentLength'] + assert body == data _check_content_using_range(key, bucket_name, data, 1000000) _check_content_using_range(key, bucket_name, data, 10000000) @@ -7304,8 +7299,8 @@ def test_multipart_upload_size_too_small(): (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=size, part_size=10*1024) e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'EntityTooSmall') + assert status == 400 + assert error_code == 'EntityTooSmall' def gen_rand_string(size, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) @@ -7394,10 +7389,10 @@ def test_abort_multipart_upload(): response = client.head_bucket(Bucket=bucket_name) rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', 0)) - eq(rgw_bytes_used, 0) + assert rgw_bytes_used == 0 rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 0)) - eq(rgw_object_count, 0) + assert rgw_object_count == 0 @attr(resource='object') @attr(method='put') @@ -7411,8 +7406,8 @@ def test_abort_multipart_upload_not_found(): e = assert_raises(ClientError, client.abort_multipart_upload, Bucket=bucket_name, Key=key, UploadId='56788') status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchUpload') + assert status == 404 + assert error_code == 'NoSuchUpload' @attr(resource='object') @attr(method='put') @@ -7444,7 +7439,7 @@ def test_list_multipart_upload(): resp_uploadids.append(uploads[i]['UploadId']) for i in range(0, len(upload_ids)): - eq(True, (upload_ids[i] in resp_uploadids)) + assert True == (upload_ids[i] in resp_uploadids) client.abort_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id1) client.abort_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id2) @@ -7478,22 +7473,22 @@ def test_list_multipart_upload_owner(): try: # match fields of an Upload from ListMultipartUploadsResult def match(upload, key, uploadid, userid, username): - eq(upload['Key'], key) - eq(upload['UploadId'], uploadid) - eq(upload['Initiator']['ID'], userid) - eq(upload['Initiator']['DisplayName'], username) - eq(upload['Owner']['ID'], userid) - eq(upload['Owner']['DisplayName'], username) + assert upload['Key'] == key + assert upload['UploadId'] == uploadid + assert upload['Initiator']['ID'] == userid + assert upload['Initiator']['DisplayName'] == username + assert upload['Owner']['ID'] == userid + assert upload['Owner']['DisplayName'] == username # list uploads with client1 uploads1 = client1.list_multipart_uploads(Bucket=bucket_name)['Uploads'] - eq(len(uploads1), 2) + assert len(uploads1) == 2 match(uploads1[0], key1, upload1, user1, name1) match(uploads1[1], key2, upload2, user2, name2) # list uploads with client2 uploads2 = client2.list_multipart_uploads(Bucket=bucket_name)['Uploads'] - eq(len(uploads2), 2) + assert len(uploads2) == 2 match(uploads2[0], key1, upload1, user1, name1) match(uploads2[1], key2, upload2, user2, name2) finally: @@ -7520,8 +7515,8 @@ def test_multipart_upload_missing_part(): e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidPart') + assert status == 400 + assert error_code == 'InvalidPart' @attr(resource='object') @attr(method='put') @@ -7542,8 +7537,8 @@ def test_multipart_upload_incorrect_etag(): e = assert_raises(ClientError, client.complete_multipart_upload, Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidPart') + assert status == 400 + assert error_code == 'InvalidPart' def _simple_http_req_100_cont(host, port, is_secure, method, resource): """ @@ -7598,12 +7593,12 @@ def test_100_continue(): #NOTES: this test needs to be tested when is_secure is True status = _simple_http_req_100_cont(host, port, is_secure, 'PUT', resource) - eq(status, '403') + assert status == '403' client.put_bucket_acl(Bucket=bucket_name, ACL='public-read-write') status = _simple_http_req_100_cont(host, port, is_secure, 'PUT', resource) - eq(status, '100') + assert status == '100' @attr(resource='bucket') @attr(method='put') @@ -7626,21 +7621,21 @@ def test_set_cors(): e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name) status = _get_status(e.response) - eq(status, 404) + assert status == 404 client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config) response = client.get_bucket_cors(Bucket=bucket_name) - eq(response['CORSRules'][0]['AllowedMethods'], allowed_methods) - eq(response['CORSRules'][0]['AllowedOrigins'], allowed_origins) + assert response['CORSRules'][0]['AllowedMethods'] == allowed_methods + assert response['CORSRules'][0]['AllowedOrigins'] == allowed_origins client.delete_bucket_cors(Bucket=bucket_name) e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name) status = _get_status(e.response) - eq(status, 404) + assert status == 404 def _cors_request_and_check(func, url, headers, expect_status, expect_allow_origin, expect_allow_methods): r = func(url, headers=headers, verify=get_config_ssl_verify()) - eq(r.status_code, expect_status) + assert r.status_code == expect_status assert r.headers.get('access-control-allow-origin', None) == expect_allow_origin assert r.headers.get('access-control-allow-methods', None) == expect_allow_methods @@ -7673,7 +7668,7 @@ def test_cors_origin_response(): e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name) status = _get_status(e.response) - eq(status, 404) + assert status == 404 client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config) @@ -7744,7 +7739,7 @@ def test_cors_origin_wildcard(): e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name) status = _get_status(e.response) - eq(status, 404) + assert status == 404 client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config) @@ -7775,7 +7770,7 @@ def test_cors_header_option(): e = assert_raises(ClientError, client.get_bucket_cors, Bucket=bucket_name) status = _get_status(e.response) - eq(status, 404) + assert status == 404 client.put_bucket_cors(Bucket=bucket_name, CORSConfiguration=cors_config) @@ -7807,23 +7802,23 @@ def test_set_bucket_tagging(): e = assert_raises(ClientError, client.get_bucket_tagging, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchTagSet') + assert status == 404 + assert error_code == 'NoSuchTagSet' client.put_bucket_tagging(Bucket=bucket_name, Tagging=tags) response = client.get_bucket_tagging(Bucket=bucket_name) - eq(len(response['TagSet']), 1) - eq(response['TagSet'][0]['Key'], 'Hello') - eq(response['TagSet'][0]['Value'], 'World') + assert len(response['TagSet']) == 1 + assert response['TagSet'][0]['Key'] == 'Hello' + assert response['TagSet'][0]['Value'] == 'World' response = client.delete_bucket_tagging(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 e = assert_raises(ClientError, client.get_bucket_tagging, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchTagSet') + assert status == 404 + assert error_code == 'NoSuchTagSet' class FakeFile(object): @@ -7877,7 +7872,7 @@ class FakeReadFile(FakeFile): self.expected_size = size def write(self, chars): - eq(chars, self.char*len(chars)) + assert chars == self.char*len(chars) self.offset += len(chars) self.size += len(chars) @@ -7888,7 +7883,7 @@ class FakeReadFile(FakeFile): self.interrupted = True def close(self): - eq(self.size, self.expected_size) + assert self.size == self.expected_size class FakeFileVerifier(object): """ @@ -7903,7 +7898,7 @@ class FakeFileVerifier(object): if self.char == None: self.char = data[0] self.size += size - eq(data.decode(), self.char*size) + assert data.decode() == self.char*size def _verify_atomic_key_data(bucket_name, key, size=-1, char=None): """ @@ -7913,7 +7908,7 @@ def _verify_atomic_key_data(bucket_name, key, size=-1, char=None): client = get_client() client.download_fileobj(bucket_name, key, fp_verify) if size >= 0: - eq(fp_verify.size, size) + assert fp_verify.size == size def _test_atomic_read(file_size): """ @@ -8124,8 +8119,8 @@ def _test_atomic_dual_conditional_write(file_size): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=objname, Body=fp_c) status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' # verify the file _verify_atomic_key_data(bucket_name, objname, file_size, 'B') @@ -8163,8 +8158,8 @@ def test_atomic_write_bucket_gone(): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=objname, Body=fp_a) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchBucket') + assert status == 404 + assert error_code == 'NoSuchBucket' @attr(resource='object') @attr(method='put') @@ -8180,13 +8175,13 @@ def test_atomic_multipart_upload_write(): response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' client.abort_multipart_upload(Bucket=bucket_name, Key='foo', UploadId=upload_id) response = client.get_object(Bucket=bucket_name, Key='foo') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' class Counter: def __init__(self, default_val): @@ -8284,9 +8279,9 @@ def test_ranged_request_response_code(): response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=4-7') fetched_content = _get_body(response) - eq(fetched_content, content[4:8]) - eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 4-7/11') - eq(response['ResponseMetadata']['HTTPStatusCode'], 206) + assert fetched_content == content[4:8] + assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-7/11' + assert response['ResponseMetadata']['HTTPStatusCode'] == 206 def _generate_random_string(size): return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(size)) @@ -8307,9 +8302,9 @@ def test_ranged_big_request_response_code(): response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=3145728-5242880') fetched_content = _get_body(response) - eq(fetched_content, content[3145728:5242881]) - eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 3145728-5242880/8388608') - eq(response['ResponseMetadata']['HTTPStatusCode'], 206) + assert fetched_content == content[3145728:5242881] + assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 3145728-5242880/8388608' + assert response['ResponseMetadata']['HTTPStatusCode'] == 206 @attr(resource='object') @attr(method='get') @@ -8327,9 +8322,9 @@ def test_ranged_request_skip_leading_bytes_response_code(): response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=4-') fetched_content = _get_body(response) - eq(fetched_content, content[4:]) - eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 4-10/11') - eq(response['ResponseMetadata']['HTTPStatusCode'], 206) + assert fetched_content == content[4:] + assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-10/11' + assert response['ResponseMetadata']['HTTPStatusCode'] == 206 @attr(resource='object') @attr(method='get') @@ -8347,9 +8342,9 @@ def test_ranged_request_return_trailing_bytes_response_code(): response = client.get_object(Bucket=bucket_name, Key='testobj', Range='bytes=-7') fetched_content = _get_body(response) - eq(fetched_content, content[-7:]) - eq(response['ResponseMetadata']['HTTPHeaders']['content-range'], 'bytes 4-10/11') - eq(response['ResponseMetadata']['HTTPStatusCode'], 206) + assert fetched_content == content[-7:] + assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-10/11' + assert response['ResponseMetadata']['HTTPStatusCode'] == 206 @attr(resource='object') @attr(method='get') @@ -8366,8 +8361,8 @@ def test_ranged_request_invalid_range(): # test invalid range e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='testobj', Range='bytes=40-50') status, error_code = _get_status_and_error_code(e.response) - eq(status, 416) - eq(error_code, 'InvalidRange') + assert status == 416 + assert error_code == 'InvalidRange' @attr(resource='object') @attr(method='get') @@ -8384,8 +8379,8 @@ def test_ranged_request_empty_object(): # test invalid range e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='testobj', Range='bytes=40-50') status, error_code = _get_status_and_error_code(e.response) - eq(status, 416) - eq(error_code, 'InvalidRange') + assert status == 416 + assert error_code == 'InvalidRange' @attr(resource='bucket') @attr(method='create') @@ -8404,9 +8399,9 @@ def check_obj_content(client, bucket_name, key, version_id, content): response = client.get_object(Bucket=bucket_name, Key=key, VersionId=version_id) if content is not None: body = _get_body(response) - eq(body, content) + assert body == content else: - eq(response['DeleteMarker'], True) + assert response['DeleteMarker'] == True def check_obj_versions(client, bucket_name, key, version_ids, contents): # check to see if objects is pointing at correct version @@ -8419,8 +8414,8 @@ def check_obj_versions(client, bucket_name, key, version_ids, contents): i = 0 for version in versions: - eq(version['VersionId'], version_ids[i]) - eq(version['Key'], key) + assert version['VersionId'] == version_ids[i] + assert version['Key'] == key check_obj_content(client, bucket_name, key, version['VersionId'], contents[i]) i += 1 @@ -8442,7 +8437,7 @@ def create_multiple_versions(client, bucket_name, key, num_versions, version_ids return (version_ids, contents) def remove_obj_version(client, bucket_name, key, version_ids, contents, index): - eq(len(version_ids), len(contents)) + assert len(version_ids) == len(contents) index = index % len(version_ids) rm_version_id = version_ids.pop(index) rm_content = contents.pop(index) @@ -8516,19 +8511,19 @@ def test_versioning_obj_create_read_remove_head(): response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=removed_version_id) response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, contents[-1]) + assert body == contents[-1] # add a delete marker response = client.delete_object(Bucket=bucket_name, Key=key) - eq(response['DeleteMarker'], True) + assert response['DeleteMarker'] == True delete_marker_version_id = response['VersionId'] version_ids.append(delete_marker_version_id) response = client.list_object_versions(Bucket=bucket_name) - eq(len(response['Versions']), num_versions) - eq(len(response['DeleteMarkers']), 1) - eq(response['DeleteMarkers'][0]['VersionId'], delete_marker_version_id) + assert len(response['Versions']) == num_versions + assert len(response['DeleteMarkers']) == 1 + assert response['DeleteMarkers'][0]['VersionId'] == delete_marker_version_id clean_up_bucket(client, bucket_name, key, version_ids) @@ -8551,11 +8546,11 @@ def test_versioning_obj_plain_null_version_removal(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response @attr(resource='object') @attr(method='create') @@ -8577,23 +8572,23 @@ def test_versioning_obj_plain_null_version_overwrite(): response = client.put_object(Bucket=bucket_name, Key=key, Body=content2) response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, content2) + assert body == content2 version_id = response['VersionId'] client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id) response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, content) + assert body == content client.delete_object(Bucket=bucket_name, Key=key, VersionId='null') e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response @attr(resource='object') @attr(method='create') @@ -8616,27 +8611,27 @@ def test_versioning_obj_plain_null_version_overwrite_suspended(): response = client.put_object(Bucket=bucket_name, Key=key, Body=content2) response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, content2) + assert body == content2 response = client.list_object_versions(Bucket=bucket_name) # original object with 'null' version id still counts as a version - eq(len(response['Versions']), 1) + assert len(response['Versions']) == 1 client.delete_object(Bucket=bucket_name, Key=key, VersionId='null') e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response def delete_suspended_versioning_obj(client, bucket_name, key, version_ids, contents): client.delete_object(Bucket=bucket_name, Key=key) # clear out old null objects in lists since they will get overwritten - eq(len(version_ids), len(contents)) + assert len(version_ids) == len(contents) i = 0 for version_id in version_ids: if version_id == 'null': @@ -8650,7 +8645,7 @@ def overwrite_suspended_versioning_obj(client, bucket_name, key, version_ids, co client.put_object(Bucket=bucket_name, Key=key, Body=content) # clear out old null objects in lists since they will get overwritten - eq(len(version_ids), len(contents)) + assert len(version_ids) == len(contents) i = 0 for version_id in version_ids: if version_id == 'null': @@ -8699,8 +8694,8 @@ def test_versioning_obj_suspend_versions(): for idx in range(num_versions): remove_obj_version(client, bucket_name, key, version_ids, contents, idx) - eq(len(version_ids), 0) - eq(len(version_ids), len(contents)) + assert len(version_ids) == 0 + assert len(version_ids) == len(contents) @attr(resource='object') @attr(method='remove') @@ -8720,8 +8715,8 @@ def test_versioning_obj_create_versions_remove_all(): for idx in range(num_versions): remove_obj_version(client, bucket_name, key, version_ids, contents, idx) - eq(len(version_ids), 0) - eq(len(version_ids), len(contents)) + assert len(version_ids) == 0 + assert len(version_ids) == len(contents) @attr(resource='object') @attr(method='remove') @@ -8742,8 +8737,8 @@ def test_versioning_obj_create_versions_remove_special_names(): for idx in range(num_versions): remove_obj_version(client, bucket_name, key, version_ids, contents, idx) - eq(len(version_ids), 0) - eq(len(version_ids), len(contents)) + assert len(version_ids) == 0 + assert len(version_ids) == len(contents) @attr(resource='object') @attr(method='multipart') @@ -8777,8 +8772,8 @@ def test_versioning_obj_create_overwrite_multipart(): for idx in range(num_versions): remove_obj_version(client, bucket_name, key, version_ids, contents, idx) - eq(len(version_ids), 0) - eq(len(version_ids), len(contents)) + assert len(version_ids) == 0 + assert len(version_ids) == len(contents) @attr(resource='object') @attr(method='multipart') @@ -8827,16 +8822,16 @@ def test_versioning_obj_list_marker(): # test the last 5 created objects first for i in range(5): version = versions[i] - eq(version['VersionId'], version_ids2[i]) - eq(version['Key'], key2) + assert version['VersionId'] == version_ids2[i] + assert version['Key'] == key2 check_obj_content(client, bucket_name, key2, version['VersionId'], contents2[i]) i += 1 # then the first 5 for j in range(5): version = versions[i] - eq(version['VersionId'], version_ids[j]) - eq(version['Key'], key) + assert version['VersionId'] == version_ids[j] + assert version['Key'] == key check_obj_content(client, bucket_name, key, version['VersionId'], contents[j]) i += 1 @@ -8864,7 +8859,7 @@ def test_versioning_copy_obj_version(): client.copy_object(Bucket=bucket_name, CopySource=copy_source, Key=new_key_name) response = client.get_object(Bucket=bucket_name, Key=new_key_name) body = _get_body(response) - eq(body, contents[i]) + assert body == contents[i] another_bucket_name = get_new_bucket() @@ -8874,7 +8869,7 @@ def test_versioning_copy_obj_version(): client.copy_object(Bucket=another_bucket_name, CopySource=copy_source, Key=new_key_name) response = client.get_object(Bucket=another_bucket_name, Key=new_key_name) body = _get_body(response) - eq(body, contents[i]) + assert body == contents[i] new_key_name = 'new_key' copy_source = {'Bucket': bucket_name, 'Key': key} @@ -8882,7 +8877,7 @@ def test_versioning_copy_obj_version(): response = client.get_object(Bucket=another_bucket_name, Key=new_key_name) body = _get_body(response) - eq(body, contents[-1]) + assert body == contents[-1] @attr(resource='object') @attr(method='delete') @@ -8908,14 +8903,14 @@ def test_versioning_multi_object_delete(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId']) response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response # now remove again, should all succeed due to idempotency for version in versions: client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId']) response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response @attr(resource='object') @attr(method='delete') @@ -8939,8 +8934,8 @@ def test_versioning_multi_object_delete_with_marker(): delete_markers = response['DeleteMarkers'] version_ids.append(delete_markers[0]['VersionId']) - eq(len(version_ids), 3) - eq(len(delete_markers), 1) + assert len(version_ids) == 3 + assert len(delete_markers) == 1 for version in versions: client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId']) @@ -8949,8 +8944,8 @@ def test_versioning_multi_object_delete_with_marker(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=delete_marker['VersionId']) response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) - eq(('DeleteMarkers' in response), False) + assert not 'Versions' in response + assert not 'DeleteMarkers' in response for version in versions: client.delete_object(Bucket=bucket_name, Key=key, VersionId=version['VersionId']) @@ -8960,8 +8955,8 @@ def test_versioning_multi_object_delete_with_marker(): # now remove again, should all succeed due to idempotency response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) - eq(('DeleteMarkers' in response), False) + assert not 'Versions' in response + assert not 'DeleteMarkers' in response @attr(resource='object') @attr(method='delete') @@ -8984,9 +8979,9 @@ def test_versioning_multi_object_delete_with_marker_create(): response = client.list_object_versions(Bucket=bucket_name) delete_markers = response['DeleteMarkers'] - eq(len(delete_markers), 1) - eq(delete_marker_version_id, delete_markers[0]['VersionId']) - eq(key, delete_markers[0]['Key']) + assert len(delete_markers) == 1 + assert delete_marker_version_id == delete_markers[0]['VersionId'] + assert key == delete_markers[0]['Key'] @attr(resource='object') @attr(method='put') @@ -9011,8 +9006,8 @@ def test_versioned_object_acl(): display_name = get_main_display_name() user_id = get_main_user_id() - eq(response['Owner']['DisplayName'], display_name) - eq(response['Owner']['ID'], user_id) + assert response['Owner']['DisplayName'] == display_name + assert response['Owner']['ID'] == user_id grants = response['Grants'] default_policy = [ @@ -9086,8 +9081,8 @@ def test_versioned_object_acl_no_version_specified(): display_name = get_main_display_name() user_id = get_main_user_id() - eq(response['Owner']['DisplayName'], display_name) - eq(response['Owner']['ID'], user_id) + assert response['Owner']['DisplayName'] == display_name + assert response['Owner']['ID'] == user_id grants = response['Grants'] default_policy = [ @@ -9177,13 +9172,13 @@ def test_versioned_concurrent_object_create_concurrent_remove(): response = client.list_object_versions(Bucket=bucket_name) versions = response['Versions'] - eq(len(versions), num_versions) + assert len(versions) == num_versions t = _do_clear_versioned_bucket_concurrent(client, bucket_name) _do_wait_completion(t) response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response @attr(resource='object') @attr(method='put') @@ -9216,7 +9211,7 @@ def test_versioned_concurrent_object_create_and_remove(): _do_wait_completion(t) response = client.list_object_versions(Bucket=bucket_name) - eq(('Versions' in response), False) + assert not 'Versions' in response @attr(resource='bucket') @attr(method='put') @@ -9230,7 +9225,7 @@ def test_lifecycle_set(): {'ID': 'rule2', 'Expiration': {'Days': 2}, 'Prefix': 'test2/', 'Status':'Disabled'}] lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='get') @@ -9245,7 +9240,7 @@ def test_lifecycle_get(): lifecycle = {'Rules': rules} client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) response = client.get_bucket_lifecycle_configuration(Bucket=bucket_name) - eq(response['Rules'], rules) + assert response['Rules'] == rules @attr(resource='bucket') @attr(method='get') @@ -9269,12 +9264,12 @@ def test_lifecycle_get_no_id(): for lc_rule in current_lc: if lc_rule['Prefix'] == rules['rule1'].prefix: - eq(lc_rule['Expiration']['Days'], rules['rule1'].days) - eq(lc_rule['Status'], rules['rule1'].status) + assert lc_rule['Expiration']['Days'] == rules['rule1'].days + assert lc_rule['Status'] == rules['rule1'].status assert 'ID' in lc_rule elif lc_rule['Prefix'] == rules['rule2'].prefix: - eq(lc_rule['Expiration']['Days'], rules['rule2'].days) - eq(lc_rule['Status'], rules['rule2'].status) + assert lc_rule['Expiration']['Days'] == rules['rule2'].days + assert lc_rule['Status'] == rules['rule2'].status assert 'ID' in lc_rule else: # neither of the rules we supplied was returned, something wrong @@ -9318,10 +9313,10 @@ def test_lifecycle_expiration(): response = client.list_objects(Bucket=bucket_name) expire3_objects = response['Contents'] - eq(len(init_objects), 6) - eq(len(expire1_objects), 4) - eq(len(keep2_objects), 4) - eq(len(expire3_objects), 2) + assert len(init_objects) == 6 + assert len(expire1_objects) == 4 + assert len(keep2_objects) == 4 + assert len(expire3_objects) == 2 @attr(resource='bucket') @attr(method='put') @@ -9361,10 +9356,10 @@ def test_lifecyclev2_expiration(): response = client.list_objects_v2(Bucket=bucket_name) expire3_objects = response['Contents'] - eq(len(init_objects), 6) - eq(len(expire1_objects), 4) - eq(len(keep2_objects), 4) - eq(len(expire3_objects), 2) + assert len(init_objects) == 6 + assert len(expire1_objects) == 4 + assert len(keep2_objects) == 4 + assert len(expire3_objects) == 2 @attr(resource='bucket') @attr(method='put') @@ -9393,8 +9388,8 @@ def test_lifecycle_expiration_versioning_enabled(): response = client.list_object_versions(Bucket=bucket_name) versions = response['Versions'] delete_markers = response['DeleteMarkers'] - eq(len(versions), 1) - eq(len(delete_markers), 1) + assert len(versions) == 1 + assert len(delete_markers) == 1 @attr(resource='bucket') @attr(method='put') @@ -9417,7 +9412,7 @@ def test_lifecycle_expiration_tags1(): response = client.put_object_tagging(Bucket=bucket_name, Key=tom_key, Tagging=tom_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 lifecycle_config = { 'Rules': [ @@ -9440,7 +9435,7 @@ def test_lifecycle_expiration_tags1(): response = client.put_bucket_lifecycle_configuration( Bucket=bucket_name, LifecycleConfiguration=lifecycle_config) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 lc_interval = get_lc_debug_interval() @@ -9451,7 +9446,7 @@ def test_lifecycle_expiration_tags1(): except KeyError: expire_objects = [] - eq(len(expire_objects), 0) + assert len(expire_objects) == 0 # factor out common setup code def setup_lifecycle_tags2(client, bucket_name): @@ -9463,7 +9458,7 @@ def setup_lifecycle_tags2(client, bucket_name): response = client.put_object_tagging(Bucket=bucket_name, Key=tom_key, Tagging=tom_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 huck_key = 'days1/huck' huck_tagset = { @@ -9475,7 +9470,7 @@ def setup_lifecycle_tags2(client, bucket_name): response = client.put_object_tagging(Bucket=bucket_name, Key=huck_key, Tagging=huck_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 lifecycle_config = { 'Rules': [ @@ -9507,7 +9502,7 @@ def setup_lifecycle_tags2(client, bucket_name): response = client.put_bucket_lifecycle_configuration( Bucket=bucket_name, LifecycleConfiguration=lifecycle_config) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 return response @attr(resource='bucket') @@ -9533,7 +9528,7 @@ def test_lifecycle_expiration_tags2(): response = client.list_objects(Bucket=bucket_name) expire1_objects = response['Contents'] - eq(len(expire1_objects), 1) + assert len(expire1_objects) == 1 @attr(resource='bucket') @attr(method='put') @@ -9561,7 +9556,7 @@ def test_lifecycle_expiration_versioned_tags2(): response = client.list_objects(Bucket=bucket_name) expire1_objects = response['Contents'] - eq(len(expire1_objects), 1) + assert len(expire1_objects) == 1 # setup for scenario based on vidushi mishra's in rhbz#1877737 def setup_lifecycle_noncur_tags(client, bucket_name, days): @@ -9574,10 +9569,10 @@ def setup_lifecycle_noncur_tags(client, bucket_name, days): for ix in range(10): body = "%s v%d" % (key, ix) response = client.put_object(Bucket=bucket_name, Key=key, Body=body) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 lifecycle_config = { 'Rules': [ @@ -9600,7 +9595,7 @@ def setup_lifecycle_noncur_tags(client, bucket_name, days): response = client.put_bucket_lifecycle_configuration( Bucket=bucket_name, LifecycleConfiguration=lifecycle_config) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 return response def verify_lifecycle_expiration_noncur_tags(client, bucket_name, secs): @@ -9639,13 +9634,13 @@ def test_lifecycle_expiration_noncur_tags1(): client, bucket_name, 2*lc_interval) # at T+20, 10 objects should exist - eq(num_objs, 10) + assert num_objs == 10 num_objs = verify_lifecycle_expiration_noncur_tags( client, bucket_name, 5*lc_interval) # at T+60, only the current object version should exist - eq(num_objs, 1) + assert num_objs == 1 @attr(resource='bucket') @attr(method='put') @@ -9661,8 +9656,8 @@ def test_lifecycle_id_too_long(): e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='bucket') @attr(method='put') @@ -9679,8 +9674,8 @@ def test_lifecycle_same_id(): e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='bucket') @attr(method='put') @@ -9696,24 +9691,24 @@ def test_lifecycle_invalid_status(): e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' rules=[{'ID': 'rule1', 'Expiration': {'Days': 2}, 'Prefix': 'test1/', 'Status':'disabled'}] lifecycle = {'Rules': rules} e = assert_raises(ClientError, client.put_bucket_lifecycle, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' rules=[{'ID': 'rule1', 'Expiration': {'Days': 2}, 'Prefix': 'test1/', 'Status':'invalid'}] lifecycle = {'Rules': rules} e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' @attr(resource='bucket') @attr(method='put') @@ -9727,7 +9722,7 @@ def test_lifecycle_set_date(): lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='put') @@ -9743,7 +9738,7 @@ def test_lifecycle_set_invalid_date(): e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='bucket') @attr(method='put') @@ -9773,8 +9768,8 @@ def test_lifecycle_expiration_date(): response = client.list_objects(Bucket=bucket_name) expire_objects = response['Contents'] - eq(len(init_objects), 2) - eq(len(expire_objects), 1) + assert len(init_objects) == 2 + assert len(expire_objects) == 1 @attr(resource='bucket') @attr(method='put') @@ -9798,7 +9793,7 @@ def test_lifecycle_expiration_days0(): except botocore.exceptions.ClientError as e: response_code = e.response['Error']['Code'] - eq(response_code, 'InvalidArgument') + assert response_code == 'InvalidArgument' def setup_lifecycle_expiration(client, bucket_name, rule_id, delta_days, @@ -9809,12 +9804,12 @@ def setup_lifecycle_expiration(client, bucket_name, rule_id, delta_days, lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration( Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 key = rule_prefix + 'foo' body = 'bar' response = client.put_object(Bucket=bucket_name, Key=key, Body=body) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 return response def check_lifecycle_expiration_header(response, start_time, rule_id, @@ -9846,7 +9841,7 @@ def test_lifecycle_expiration_header_put(): now = datetime.datetime.now(None) response = setup_lifecycle_expiration( client, bucket_name, 'rule1', 1, 'days1/') - eq(check_lifecycle_expiration_header(response, now, 'rule1', 1), True) + assert check_lifecycle_expiration_header(response, now, 'rule1', 1) @attr(resource='bucket') @attr(method='head') @@ -9869,8 +9864,8 @@ def test_lifecycle_expiration_header_head(): # stat the object, check header response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(check_lifecycle_expiration_header(response, now, 'rule1', 1), True) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert check_lifecycle_expiration_header(response, now, 'rule1', 1) @attr(resource='bucket') @attr(method='head') @@ -9909,8 +9904,8 @@ def test_lifecycle_expiration_header_tags_head(): # stat the object, check header response = client.head_object(Bucket=bucket_name, Key=key1) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1), True) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) # test that header is not returning when it should not lifecycle={ @@ -9931,8 +9926,8 @@ def test_lifecycle_expiration_header_tags_head(): Bucket=bucket_name, LifecycleConfiguration=lifecycle) # stat the object, check header response = client.head_object(Bucket=bucket_name, Key=key1) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1), False) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) @attr(resource='bucket') @attr(method='head') @@ -9981,8 +9976,8 @@ def test_lifecycle_expiration_header_and_tags_head(): # stat the object, check header response = client.head_object(Bucket=bucket_name, Key=key1) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1), False) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) @attr(resource='bucket') @attr(method='put') @@ -9996,7 +9991,7 @@ def test_lifecycle_set_noncurrent(): {'ID': 'rule2', 'NoncurrentVersionExpiration': {'NoncurrentDays': 3}, 'Prefix': 'future/', 'Status':'Enabled'}] lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='put') @@ -10031,8 +10026,8 @@ def test_lifecycle_noncur_expiration(): response = client.list_object_versions(Bucket=bucket_name) expire_versions = response['Versions'] - eq(len(init_versions), 6) - eq(len(expire_versions), 4) + assert len(init_versions) == 6 + assert len(expire_versions) == 4 @attr(resource='bucket') @attr(method='put') @@ -10045,7 +10040,7 @@ def test_lifecycle_set_deletemarker(): rules=[{'ID': 'rule1', 'Expiration': {'ExpiredObjectDeleteMarker': True}, 'Prefix': 'test1/', 'Status':'Enabled'}] lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='put') @@ -10058,7 +10053,7 @@ def test_lifecycle_set_filter(): rules=[{'ID': 'rule1', 'Expiration': {'ExpiredObjectDeleteMarker': True}, 'Filter': {'Prefix': 'foo'}, 'Status':'Enabled'}] lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='put') @@ -10071,7 +10066,7 @@ def test_lifecycle_set_empty_filter(): rules=[{'ID': 'rule1', 'Expiration': {'ExpiredObjectDeleteMarker': True}, 'Filter': {}, 'Status':'Enabled'}] lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='put') @@ -10112,8 +10107,8 @@ def test_lifecycle_deletemarker_expiration(): deleted_versions = response['DeleteMarkers'] total_expire_versions = init_versions + deleted_versions - eq(len(total_init_versions), 4) - eq(len(total_expire_versions), 2) + assert len(total_init_versions) == 4 + assert len(total_expire_versions) == 2 @attr(resource='bucket') @attr(method='put') @@ -10131,7 +10126,7 @@ def test_lifecycle_set_multipart(): ] lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @attr(method='put') @@ -10172,8 +10167,8 @@ def test_lifecycle_multipart_expiration(): response = client.list_multipart_uploads(Bucket=bucket_name) expired_uploads = response['Uploads'] - eq(len(init_uploads), 2) - eq(len(expired_uploads), 1) + assert len(init_uploads) == 2 + assert len(expired_uploads) == 1 @attr(resource='bucket') @attr(method='put') @@ -10188,7 +10183,7 @@ def test_lifecycle_transition_set_invalid_date(): lifecycle = {'Rules': rules} e = assert_raises(ClientError, client.put_bucket_lifecycle_configuration, Bucket=bucket_name, LifecycleConfiguration=lifecycle) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 def _test_encryption_sse_customer_write(file_size): """ @@ -10214,7 +10209,7 @@ def _test_encryption_sse_customer_write(file_size): client.meta.events.register('before-call.s3.GetObject', lf) response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, data) + assert body == data # The test harness for lifecycle is configured to treat days as 10 second intervals. @attr(resource='bucket') @@ -10242,30 +10237,30 @@ def test_lifecycle_transition(): # Get list of all keys response = client.list_objects(Bucket=bucket_name) init_keys = _get_keys(response) - eq(len(init_keys), 6) + assert len(init_keys) == 6 lc_interval = get_lc_debug_interval() # Wait for first expiration (plus fudge to handle the timer window) time.sleep(4*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire1_keys['STANDARD']), 4) - eq(len(expire1_keys[sc[1]]), 2) - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys['STANDARD']) == 4 + assert len(expire1_keys[sc[1]]) == 2 + assert len(expire1_keys[sc[2]]) == 0 # Wait for next expiration cycle time.sleep(lc_interval) keep2_keys = list_bucket_storage_class(client, bucket_name) - eq(len(keep2_keys['STANDARD']), 4) - eq(len(keep2_keys[sc[1]]), 2) - eq(len(keep2_keys[sc[2]]), 0) + assert len(keep2_keys['STANDARD']) == 4 + assert len(keep2_keys[sc[1]]) == 2 + assert len(keep2_keys[sc[2]]) == 0 # Wait for final expiration cycle time.sleep(5*lc_interval) expire3_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire3_keys['STANDARD']), 2) - eq(len(expire3_keys[sc[1]]), 2) - eq(len(expire3_keys[sc[2]]), 2) + assert len(expire3_keys['STANDARD']) == 2 + assert len(expire3_keys[sc[1]]) == 2 + assert len(expire3_keys[sc[2]]) == 2 # The test harness for lifecycle is configured to treat days as 10 second intervals. @attr(resource='bucket') @@ -10292,30 +10287,30 @@ def test_lifecycle_transition_single_rule_multi_trans(): # Get list of all keys response = client.list_objects(Bucket=bucket_name) init_keys = _get_keys(response) - eq(len(init_keys), 6) + assert len(init_keys) == 6 lc_interval = get_lc_debug_interval() # Wait for first expiration (plus fudge to handle the timer window) time.sleep(5*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire1_keys['STANDARD']), 4) - eq(len(expire1_keys[sc[1]]), 2) - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys['STANDARD']) == 4 + assert len(expire1_keys[sc[1]]) == 2 + assert len(expire1_keys[sc[2]]) == 0 # Wait for next expiration cycle time.sleep(lc_interval) keep2_keys = list_bucket_storage_class(client, bucket_name) - eq(len(keep2_keys['STANDARD']), 4) - eq(len(keep2_keys[sc[1]]), 2) - eq(len(keep2_keys[sc[2]]), 0) + assert len(keep2_keys['STANDARD']) == 4 + assert len(keep2_keys[sc[1]]) == 2 + assert len(keep2_keys[sc[2]]) == 0 # Wait for final expiration cycle time.sleep(6*lc_interval) expire3_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire3_keys['STANDARD']), 4) - eq(len(expire3_keys[sc[1]]), 0) - eq(len(expire3_keys[sc[2]]), 2) + assert len(expire3_keys['STANDARD']) == 4 + assert len(expire3_keys[sc[1]]) == 0 + assert len(expire3_keys[sc[2]]) == 2 @attr(resource='bucket') @attr(method='put') @@ -10355,7 +10350,7 @@ def test_lifecycle_set_noncurrent_transition(): lifecycle = {'Rules': rules} response = client.put_bucket_lifecycle_configuration(Bucket=bucket, LifecycleConfiguration=lifecycle) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @@ -10405,27 +10400,27 @@ def test_lifecycle_noncur_transition(): create_multiple_versions(client, bucket, "test1/b", 3) init_keys = list_bucket_storage_class(client, bucket) - eq(len(init_keys['STANDARD']), 6) + assert len(init_keys['STANDARD']) == 6 lc_interval = get_lc_debug_interval() time.sleep(4*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 4) - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 4 + assert len(expire1_keys[sc[2]]) == 0 time.sleep(4*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 0) - eq(len(expire1_keys[sc[2]]), 4) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 0 + assert len(expire1_keys[sc[2]]) == 4 time.sleep(6*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 0) - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 0 + assert len(expire1_keys[sc[2]]) == 0 def verify_object(client, bucket, key, content=None, sc=None): response = client.get_object(Bucket=bucket, Key=key) @@ -10434,13 +10429,13 @@ def verify_object(client, bucket, key, content=None, sc=None): sc = 'STANDARD' if ('StorageClass' in response): - eq(response['StorageClass'], sc) + assert response['StorageClass'] == sc else: #storage class should be STANDARD - eq('STANDARD', sc) + assert 'STANDARD' == sc if (content != None): body = _get_body(response) - eq(body, content) + assert body == content # The test harness for lifecycle is configured to treat days as 10 second intervals. @attr(resource='bucket') @@ -10475,19 +10470,19 @@ def test_lifecycle_cloud_transition(): # Get list of all keys response = client.list_objects(Bucket=bucket_name) init_keys = _get_keys(response) - eq(len(init_keys), 4) + assert len(init_keys) == 4 lc_interval = get_lc_debug_interval() # Wait for first expiration (plus fudge to handle the timer window) time.sleep(10*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire1_keys['STANDARD']), 2) + assert len(expire1_keys['STANDARD']) == 2 if (retain_head_object != None and retain_head_object == "true"): - eq(len(expire1_keys[cloud_sc]), 2) + assert len(expire1_keys[cloud_sc]) == 2 else: - eq(len(expire1_keys[cloud_sc]), 0) + assert len(expire1_keys[cloud_sc]) == 0 time.sleep(2*lc_interval) # Check if objects copied to target path @@ -10509,28 +10504,28 @@ def test_lifecycle_cloud_transition(): if (retain_head_object != None and retain_head_object == "true"): # verify HEAD response response = client.head_object(Bucket=bucket_name, Key=keys[0]) - eq(0, response['ContentLength']) - eq(cloud_sc, response['StorageClass']) + assert 0 == response['ContentLength'] + assert cloud_sc == response['StorageClass'] # GET should return InvalidObjectState error e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=src_key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'InvalidObjectState') + assert status == 403 + assert error_code == 'InvalidObjectState' # COPY of object should return InvalidObjectState error copy_source = {'Bucket': bucket_name, 'Key': src_key} e = assert_raises(ClientError, client.copy, CopySource=copy_source, Bucket=bucket_name, Key='copy_obj') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'InvalidObjectState') + assert status == 403 + assert error_code == 'InvalidObjectState' # DELETE should succeed response = client.delete_object(Bucket=bucket_name, Key=src_key) e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=src_key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'NoSuchKey') + assert status == 404 + assert error_code == 'NoSuchKey' # Similar to 'test_lifecycle_transition' but for cloud transition @attr(resource='bucket') @@ -10574,34 +10569,34 @@ def test_lifecycle_cloud_multiple_transition(): # Get list of all keys response = client.list_objects(Bucket=bucket_name) init_keys = _get_keys(response) - eq(len(init_keys), 4) + assert len(init_keys) == 4 lc_interval = get_lc_debug_interval() # Wait for first expiration (plus fudge to handle the timer window) time.sleep(4*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 2) - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 2 + assert len(expire1_keys[sc[2]]) == 0 # Wait for next expiration cycle time.sleep(7*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 0) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 0 if (retain_head_object != None and retain_head_object == "true"): - eq(len(expire1_keys[sc[2]]), 2) + assert len(expire1_keys[sc[2]]) == 2 else: - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys[sc[2]]) == 0 # Wait for final expiration cycle time.sleep(12*lc_interval) expire3_keys = list_bucket_storage_class(client, bucket_name) - eq(len(expire3_keys['STANDARD']), 2) - eq(len(expire3_keys[sc[1]]), 0) - eq(len(expire3_keys[sc[2]]), 0) + assert len(expire3_keys['STANDARD']) == 2 + assert len(expire3_keys[sc[1]]) == 0 + assert len(expire3_keys[sc[2]]) == 0 # Noncurrent objects for cloud transition @attr(resource='bucket') @@ -10664,7 +10659,7 @@ def test_lifecycle_noncur_cloud_transition(): create_multiple_versions(client, bucket, k, 3) init_keys = list_bucket_storage_class(client, bucket) - eq(len(init_keys['STANDARD']), 6) + assert len(init_keys['STANDARD']) == 6 response = client.list_object_versions(Bucket=bucket) @@ -10672,19 +10667,19 @@ def test_lifecycle_noncur_cloud_transition(): time.sleep(4*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 4) - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 4 + assert len(expire1_keys[sc[2]]) == 0 time.sleep(10*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket) - eq(len(expire1_keys['STANDARD']), 2) - eq(len(expire1_keys[sc[1]]), 0) + assert len(expire1_keys['STANDARD']) == 2 + assert len(expire1_keys[sc[1]]) == 0 if (retain_head_object == None or retain_head_object == "false"): - eq(len(expire1_keys[sc[2]]), 0) + assert len(expire1_keys[sc[2]]) == 0 else: - eq(len(expire1_keys[sc[2]]), 4) + assert len(expire1_keys[sc[2]]) == 4 #check if versioned object exists on cloud endpoint if target_path == None: @@ -10744,13 +10739,13 @@ def test_lifecycle_cloud_transition_large_obj(): # Wait for first expiration (plus fudge to handle the timer window) time.sleep(8*lc_interval) expire1_keys = list_bucket_storage_class(client, bucket) - eq(len(expire1_keys['STANDARD']), 1) + assert len(expire1_keys['STANDARD']) == 1 if (retain_head_object != None and retain_head_object == "true"): - eq(len(expire1_keys[cloud_sc]), 1) + assert len(expire1_keys[cloud_sc]) == 1 else: - eq(len(expire1_keys[cloud_sc]), 0) + assert len(expire1_keys[cloud_sc]) == 0 # Check if objects copied to target path if target_path == None: @@ -10832,12 +10827,12 @@ def test_encryption_sse_c_method_head(): e = assert_raises(ClientError, client.head_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 lf = (lambda **kwargs: kwargs['params']['headers'].update(sse_client_headers)) client.meta.events.register('before-call.s3.HeadObject', lf) response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='object') @attr(method='put') @@ -10862,7 +10857,7 @@ def test_encryption_sse_c_present(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -10894,7 +10889,7 @@ def test_encryption_sse_c_other_key(): client.meta.events.register('before-call.s3.GetObject', lf) e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -10917,7 +10912,7 @@ def test_encryption_sse_c_invalid_md5(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -10978,7 +10973,7 @@ def test_encryption_key_no_sse_c(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 def _multipart_upload_enc(client, bucket_name, key, size, part_size, init_headers, part_headers, metadata, resend_parts): """ @@ -11028,8 +11023,8 @@ def _check_content_using_range_enc(client, bucket_name, key, data, step, enc_hea response = client.get_object(Bucket=bucket_name, Key=key, Range=r) read_range = response['ContentLength'] body = _get_body(response) - eq(read_range, toread) - eq(body, data[ofs:end+1]) + assert read_range == toread + assert body == data[ofs:end+1] @attr(resource='object') @attr(method='put') @@ -11065,21 +11060,21 @@ def test_encryption_sse_c_multipart_upload(): response = client.head_bucket(Bucket=bucket_name) rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1)) - eq(rgw_object_count, 1) + assert rgw_object_count == 1 rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen)) - eq(rgw_bytes_used, objlen) + assert rgw_bytes_used == objlen lf = (lambda **kwargs: kwargs['params']['headers'].update(enc_headers)) client.meta.events.register('before-call.s3.GetObject', lf) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['Metadata'], metadata) - eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type) + assert response['Metadata'] == metadata + assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type body = _get_body(response) - eq(body, data) + assert body == data size = response['ContentLength'] - eq(len(body), size) + assert len(body) == size _check_content_using_range_enc(client, bucket_name, key, data, 1000000, enc_headers=enc_headers) _check_content_using_range_enc(client, bucket_name, key, data, 10000000, enc_headers=enc_headers) @@ -11116,7 +11111,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_1(): e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name, key=key, size=objlen, part_size=5*1024*1024, init_headers=init_headers, part_headers=part_headers, metadata=metadata, resend_parts=resend_parts) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -11150,7 +11145,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_2(): e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name, key=key, size=objlen, part_size=5*1024*1024, init_headers=init_headers, part_headers=part_headers, metadata=metadata, resend_parts=resend_parts) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -11189,22 +11184,22 @@ def test_encryption_sse_c_multipart_bad_download(): response = client.head_bucket(Bucket=bucket_name) rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1)) - eq(rgw_object_count, 1) + assert rgw_object_count == 1 rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen)) - eq(rgw_bytes_used, objlen) + assert rgw_bytes_used == objlen lf = (lambda **kwargs: kwargs['params']['headers'].update(put_headers)) client.meta.events.register('before-call.s3.GetObject', lf) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['Metadata'], metadata) - eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type) + assert response['Metadata'] == metadata + assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type lf = (lambda **kwargs: kwargs['params']['headers'].update(get_headers)) client.meta.events.register('before-call.s3.GetObject', lf) e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @@ -11254,7 +11249,7 @@ def test_encryption_sse_c_post_object_authenticated_request(): ('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 get_headers = { 'x-amz-server-side-encryption-customer-algorithm': 'AES256', @@ -11265,7 +11260,7 @@ def test_encryption_sse_c_post_object_authenticated_request(): client.meta.events.register('before-call.s3.GetObject', lf) response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(assertion='success') @attr('encryption') @@ -11292,7 +11287,7 @@ def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'): response = client.get_object(Bucket=bucket_name, Key='testobj') body = _get_body(response) - eq(body, data) + assert body == data @@ -11323,14 +11318,14 @@ def test_sse_kms_method_head(): client.put_object(Bucket=bucket_name, Key=key, Body=data) response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid) + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms' + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid lf = (lambda **kwargs: kwargs['params']['headers'].update(sse_kms_client_headers)) client.meta.events.register('before-call.s3.HeadObject', lf) e = assert_raises(ClientError, client.head_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -11357,7 +11352,7 @@ def test_sse_kms_present(): response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, data) + assert body == data @attr(resource='object') @attr(method='put') @@ -11400,7 +11395,7 @@ def test_sse_kms_not_declared(): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -11434,22 +11429,22 @@ def test_sse_kms_multipart_upload(): response = client.head_bucket(Bucket=bucket_name) rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1)) - eq(rgw_object_count, 1) + assert rgw_object_count == 1 rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen)) - eq(rgw_bytes_used, objlen) + assert rgw_bytes_used == objlen lf = (lambda **kwargs: kwargs['params']['headers'].update(part_headers)) client.meta.events.register('before-call.s3.UploadPart', lf) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['Metadata'], metadata) - eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type) + assert response['Metadata'] == metadata + assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type body = _get_body(response) - eq(body, data) + assert body == data size = response['ContentLength'] - eq(len(body), size) + assert len(body) == size _check_content_using_range(key, bucket_name, data, 1000000) _check_content_using_range(key, bucket_name, data, 10000000) @@ -11566,11 +11561,11 @@ def test_sse_kms_post_object_authenticated_request(): ('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='put') @@ -11654,7 +11649,7 @@ def test_sse_kms_read_declare(): e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='bucket') @attr(method='get') @@ -11688,7 +11683,7 @@ def test_bucket_policy(): alt_client = get_alt_client() response = alt_client.list_objects(Bucket=bucket_name) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 @attr('bucket-policy') @pytest.mark.bucket_policy @@ -11720,7 +11715,7 @@ def test_bucketv2_policy(): alt_client = get_alt_client() response = alt_client.list_objects_v2(Bucket=bucket_name) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 @attr(resource='bucket') @attr(method='get') @@ -11756,8 +11751,8 @@ def test_bucket_policy_acl(): alt_client = get_alt_client() e = assert_raises(ClientError, alt_client.list_objects, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' client.delete_bucket_policy(Bucket=bucket_name) client.put_bucket_acl(Bucket=bucket_name, ACL='public-read') @@ -11798,8 +11793,8 @@ def test_bucketv2_policy_acl(): alt_client = get_alt_client() e = assert_raises(ClientError, alt_client.list_objects_v2, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' client.delete_bucket_policy(Bucket=bucket_name) client.put_bucket_acl(Bucket=bucket_name, ACL='public-read') @@ -11852,7 +11847,7 @@ def test_bucket_policy_different_tenant(): #alt_client = get_alt_client() #response = alt_client.list_objects(Bucket=bucket_name) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 @attr(resource='bucket') @attr(method='get') @@ -11904,7 +11899,7 @@ def test_bucketv2_policy_different_tenant(): #alt_client = get_alt_client() #response = alt_client.list_objects_v2(Bucket=bucket_name) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 @attr(resource='bucket') @attr(method='get') @@ -11942,11 +11937,11 @@ def test_bucket_policy_another_bucket(): alt_client = get_alt_client() response = alt_client.list_objects(Bucket=bucket_name) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 alt_client = get_alt_client() response = alt_client.list_objects(Bucket=bucket_name2) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 @attr(resource='bucket') @attr(method='get') @@ -11986,11 +11981,11 @@ def test_bucketv2_policy_another_bucket(): alt_client = get_alt_client() response = alt_client.list_objects_v2(Bucket=bucket_name) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 alt_client = get_alt_client() response = alt_client.list_objects_v2(Bucket=bucket_name2) - eq(len(response['Contents']), 1) + assert len(response['Contents']) == 1 @attr(resource='bucket') @attr(method='put') @@ -12030,7 +12025,7 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists(): client.meta.events.register('before-call.s3.GetObject', lf) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 request_headers={'referer': 'http://www.example.com/index.html'} @@ -12038,11 +12033,11 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists(): client.meta.events.register('before-call.s3.GetObject', lf) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 # the 'referer' headers need to be removed for this one #response = client.get_object(Bucket=bucket_name, Key=key) - #eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + #assert response['ResponseMetadata']['HTTPStatusCode'] == 200 request_headers={'referer': 'http://example.com'} @@ -12052,7 +12047,7 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists(): # TODO: Compare Requests sent in Boto3, Wireshark, RGW Log for both boto and boto3 e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 response = client.get_bucket_policy(Bucket=bucket_name) print(response) @@ -12083,10 +12078,10 @@ def test_get_obj_tagging(): input_tagset = _create_simple_tagset(2) response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] @attr(resource='object') @@ -12103,11 +12098,11 @@ def test_get_obj_head_tagging(): input_tagset = _create_simple_tagset(count) response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-tagging-count'], str(count)) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-tagging-count'] == str(count) @attr(resource='object') @attr(method='get') @@ -12124,10 +12119,10 @@ def test_put_max_tags(): input_tagset = _create_simple_tagset(10) response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] @attr(resource='object') @attr(method='get') @@ -12143,11 +12138,11 @@ def test_put_excess_tags(): input_tagset = _create_simple_tagset(11) e = assert_raises(ClientError, client.put_object_tagging, Bucket=bucket_name, Key=key, Tagging=input_tagset) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidTag') + assert status == 400 + assert error_code == 'InvalidTag' response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(len(response['TagSet']), 0) + assert len(response['TagSet']) == 0 @attr(resource='object') @attr(method='get') @@ -12169,11 +12164,11 @@ def test_put_max_kvsize_tags(): input_tagset = {'TagSet': tagset} response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) for kv_pair in response['TagSet']: - eq((kv_pair in input_tagset['TagSet']), True) + assert kv_pair in input_tagset['TagSet'] @attr(resource='object') @attr(method='get') @@ -12196,11 +12191,11 @@ def test_put_excess_key_tags(): e = assert_raises(ClientError, client.put_object_tagging, Bucket=bucket_name, Key=key, Tagging=input_tagset) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidTag') + assert status == 400 + assert error_code == 'InvalidTag' response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(len(response['TagSet']), 0) + assert len(response['TagSet']) == 0 @attr(resource='object') @attr(method='get') @@ -12223,11 +12218,11 @@ def test_put_excess_val_tags(): e = assert_raises(ClientError, client.put_object_tagging, Bucket=bucket_name, Key=key, Tagging=input_tagset) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidTag') + assert status == 400 + assert error_code == 'InvalidTag' response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(len(response['TagSet']), 0) + assert len(response['TagSet']) == 0 @attr(resource='object') @attr(method='get') @@ -12249,10 +12244,10 @@ def test_put_modify_tags(): input_tagset = {'TagSet': tagset} response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] tagset2 = [] tagset2.append({'Key': 'key3', 'Value': 'val3'}) @@ -12260,10 +12255,10 @@ def test_put_modify_tags(): input_tagset2 = {'TagSet': tagset2} response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset2) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset2['TagSet']) + assert response['TagSet'] == input_tagset2['TagSet'] @attr(resource='object') @attr(method='get') @@ -12280,16 +12275,16 @@ def test_put_delete_tags(): input_tagset = _create_simple_tagset(2) response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] response = client.delete_object_tagging(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(len(response['TagSet']), 0) + assert len(response['TagSet']) == 0 @attr(resource='object') @attr(method='post') @@ -12321,13 +12316,13 @@ def test_post_object_tags_anonymous_request(): ]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key=key_name) body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' response = client.get_object_tagging(Bucket=bucket_name, Key=key_name) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] @attr(resource='object') @attr(method='post') @@ -12374,10 +12369,10 @@ def test_post_object_tags_authenticated_request(): ('file', ('bar'))]) r = requests.post(url, files=payload, verify=get_config_ssl_verify()) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @@ -12408,12 +12403,12 @@ def test_put_obj_with_tags(): client.put_object(Bucket=bucket_name, Key=key, Body=data) response = client.get_object(Bucket=bucket_name, Key=key) body = _get_body(response) - eq(body, data) + assert body == data response = client.get_object_tagging(Bucket=bucket_name, Key=key) response_tagset = response['TagSet'] tagset = tagset - eq(response_tagset, tagset) + assert response_tagset == tagset def _make_arn_resource(path="*"): return "arn:aws:s3:::{}".format(path) @@ -12441,12 +12436,12 @@ def test_get_tags_acl_public(): input_tagset = _create_simple_tagset(10) response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 alt_client = get_alt_client() response = alt_client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] @attr(resource='object') @attr(method='get') @@ -12472,10 +12467,10 @@ def test_put_tags_acl_public(): input_tagset = _create_simple_tagset(10) alt_client = get_alt_client() response = alt_client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(response['TagSet'], input_tagset['TagSet']) + assert response['TagSet'] == input_tagset['TagSet'] @attr(resource='object') @attr(method='get') @@ -12498,15 +12493,15 @@ def test_delete_tags_obj_public(): input_tagset = _create_simple_tagset(10) response = client.put_object_tagging(Bucket=bucket_name, Key=key, Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 alt_client = get_alt_client() response = alt_client.delete_object_tagging(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response = client.get_object_tagging(Bucket=bucket_name, Key=key) - eq(len(response['TagSet']), 0) + assert len(response['TagSet']) == 0 @attr(resource='object') @attr(method='put') @@ -12526,21 +12521,21 @@ def test_versioning_bucket_atomic_upload_return_version_id(): response = client.list_object_versions(Bucket=bucket_name) versions = response['Versions'] for version in versions: - eq(version['VersionId'], version_id) + assert version['VersionId'] == version_id # for versioning-default-bucket, no version-id should return. bucket_name = get_new_bucket() key = 'baz' response = client.put_object(Bucket=bucket_name, Key=key) - eq(('VersionId' in response), False) + assert not 'VersionId' in response # for versioning-suspended-bucket, no version-id should return. bucket_name = get_new_bucket() key = 'baz' check_configure_versioning_retry(bucket_name, "Suspended", "Suspended") response = client.put_object(Bucket=bucket_name, Key=key) - eq(('VersionId' in response), False) + assert not 'VersionId' in response @attr(resource='object') @attr(method='put') @@ -12567,7 +12562,7 @@ def test_versioning_bucket_multipart_upload_return_version_id(): response = client.list_object_versions(Bucket=bucket_name) versions = response['Versions'] for version in versions: - eq(version['VersionId'], version_id) + assert version['VersionId'] == version_id # for versioning-default-bucket, no version-id should return. bucket_name = get_new_bucket() @@ -12576,7 +12571,7 @@ def test_versioning_bucket_multipart_upload_return_version_id(): (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen, client=client, content_type=content_type, metadata=metadata) response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) - eq(('VersionId' in response), False) + assert not 'VersionId' in response # for versioning-suspended-bucket, no version-id should return bucket_name = get_new_bucket() @@ -12586,7 +12581,7 @@ def test_versioning_bucket_multipart_upload_return_version_id(): (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen, client=client, content_type=content_type, metadata=metadata) response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) - eq(('VersionId' in response), False) + assert not 'VersionId' in response @attr(resource='object') @attr(method='get') @@ -12620,7 +12615,7 @@ def test_bucket_policy_get_obj_existing_tag(): input_tagset = {'TagSet': tagset} response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset2 = [] tagset2.append({'Key': 'security', 'Value': 'private'}) @@ -12628,7 +12623,7 @@ def test_bucket_policy_get_obj_existing_tag(): input_tagset = {'TagSet': tagset2} response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset3 = [] tagset3.append({'Key': 'security1', 'Value': 'public'}) @@ -12636,19 +12631,19 @@ def test_bucket_policy_get_obj_existing_tag(): input_tagset = {'TagSet': tagset3} response = client.put_object_tagging(Bucket=bucket_name, Key='invalidtag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 alt_client = get_alt_client() response = alt_client.get_object(Bucket=bucket_name, Key='publictag') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='privatetag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='invalidtag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='object') @attr(method='get') @@ -12682,7 +12677,7 @@ def test_bucket_policy_get_obj_tagging_existing_tag(): input_tagset = {'TagSet': tagset} response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset2 = [] tagset2.append({'Key': 'security', 'Value': 'private'}) @@ -12690,7 +12685,7 @@ def test_bucket_policy_get_obj_tagging_existing_tag(): input_tagset = {'TagSet': tagset2} response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset3 = [] tagset3.append({'Key': 'security1', 'Value': 'public'}) @@ -12698,25 +12693,25 @@ def test_bucket_policy_get_obj_tagging_existing_tag(): input_tagset = {'TagSet': tagset3} response = client.put_object_tagging(Bucket=bucket_name, Key='invalidtag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 alt_client = get_alt_client() response = alt_client.get_object_tagging(Bucket=bucket_name, Key='publictag') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 # A get object itself should fail since we allowed only GetObjectTagging e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='publictag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='privatetag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='invalidtag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='object') @@ -12751,7 +12746,7 @@ def test_bucket_policy_put_obj_tagging_existing_tag(): input_tagset = {'TagSet': tagset} response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset2 = [] tagset2.append({'Key': 'security', 'Value': 'private'}) @@ -12759,7 +12754,7 @@ def test_bucket_policy_put_obj_tagging_existing_tag(): input_tagset = {'TagSet': tagset2} response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 alt_client = get_alt_client() # PUT requests with object tagging are a bit wierd, if you forget to put @@ -12773,11 +12768,11 @@ def test_bucket_policy_put_obj_tagging_existing_tag(): input_tagset = {'TagSet': testtagset1} response = alt_client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 e = assert_raises(ClientError, alt_client.put_object_tagging, Bucket=bucket_name, Key='privatetag', Tagging=input_tagset) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 testtagset2 = [] testtagset2.append({'Key': 'security', 'Value': 'private'}) @@ -12785,14 +12780,14 @@ def test_bucket_policy_put_obj_tagging_existing_tag(): input_tagset = {'TagSet': testtagset2} response = alt_client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 # Now try putting the original tags again, this should fail input_tagset = {'TagSet': testtagset1} e = assert_raises(ClientError, alt_client.put_object_tagging, Bucket=bucket_name, Key='publictag', Tagging=input_tagset) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='object') @@ -12837,14 +12832,14 @@ def test_bucket_policy_put_obj_copy_source(): # policy on how to do this right response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo') body = _get_body(response) - eq(body, 'public/foo') + assert body == 'public/foo' copy_source = {'Bucket': bucket_name, 'Key': 'public/bar'} alt_client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key='new_foo2') response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo2') body = _get_body(response) - eq(body, 'public/bar') + assert body == 'public/bar' copy_source = {'Bucket': bucket_name, 'Key': 'private/foo'} check_access_denied(alt_client.copy_object, Bucket=bucket_name2, CopySource=copy_source, Key='new_foo2') @@ -12894,7 +12889,7 @@ def test_bucket_policy_put_obj_copy_source_meta(): # policy on how to do this right response = alt_client.get_object(Bucket=bucket_name, Key='new_foo') body = _get_body(response) - eq(body, 'public/foo') + assert body == 'public/foo' # remove the x-amz-metadata-directive header def remove_header(**kwargs): @@ -12942,7 +12937,7 @@ def test_bucket_policy_put_obj_acl(): # as an ERROR anyway response = alt_client.put_object(Bucket=bucket_name, Key=key1, Body=key1) #response = alt_client.put_object_acl(Bucket=bucket_name, Key=key1, ACL='private') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 key2 = 'public-key' @@ -12951,7 +12946,7 @@ def test_bucket_policy_put_obj_acl(): e = assert_raises(ClientError, alt_client.put_object, Bucket=bucket_name, Key=key2, Body=key2) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='object') @@ -12997,7 +12992,7 @@ def test_bucket_policy_put_obj_grant(): alt_client.meta.events.register('before-call.s3.PutObject', lf) response = alt_client.put_object(Bucket=bucket_name, Key=key1, Body=key1) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 def remove_header(**kwargs): if ("x-amz-grant-full-control" in kwargs['params']['headers']): @@ -13007,7 +13002,7 @@ def test_bucket_policy_put_obj_grant(): key2 = 'key2' response = alt_client.put_object(Bucket=bucket_name2, Key=key2, Body=key2) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 acl1_response = client.get_object_acl(Bucket=bucket_name, Key=key1) @@ -13017,8 +13012,8 @@ def test_bucket_policy_put_obj_grant(): acl2_response = alt_client.get_object_acl(Bucket=bucket_name2, Key=key2) - eq(acl1_response['Grants'][0]['Grantee']['ID'], main_user_id) - eq(acl2_response['Grants'][0]['Grantee']['ID'], alt_user_id) + assert acl1_response['Grants'][0]['Grantee']['ID'] == main_user_id + assert acl2_response['Grants'][0]['Grantee']['ID'] == alt_user_id @attr(resource='object') @@ -13046,8 +13041,8 @@ def test_put_obj_enc_conflict_c_s3(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='object') @attr(method='put') @@ -13078,8 +13073,8 @@ def test_put_obj_enc_conflict_c_kms(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='object') @attr(method='put') @@ -13107,8 +13102,8 @@ def test_put_obj_enc_conflict_s3_kms(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='object') @attr(method='put') @@ -13135,8 +13130,8 @@ def test_put_obj_enc_conflict_bad_enc_kms(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key1_str) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidArgument') + assert status == 400 + assert error_code == 'InvalidArgument' @attr(resource='object') @attr(method='put') @@ -13193,7 +13188,7 @@ def test_bucket_policy_put_obj_s3_noenc(): # first validate that writing a sse-s3 object works response = client.put_object(Bucket=bucket_name, Key=key1_str, ServerSideEncryption='AES256') response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' # then validate that a non-encrypted object fails. # (this also breaks the connection--non-sse bug, probably because the server @@ -13302,8 +13297,8 @@ def test_bucket_policy_put_obj_kms_noenc(): # breaks next call... response = client.put_object(Bucket=bucket_name, Key=key1_str, ServerSideEncryption='aws:kms', SSEKMSKeyId=kms_keyid) - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid) + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms' + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid check_access_denied(client.put_object, Bucket=bucket_name, Key=key2_str, Body=key2_str) @@ -13423,7 +13418,7 @@ def test_bucket_policy_get_obj_acl_existing_tag(): input_tagset = {'TagSet': tagset} response = client.put_object_tagging(Bucket=bucket_name, Key='publictag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset2 = [] tagset2.append({'Key': 'security', 'Value': 'private'}) @@ -13431,7 +13426,7 @@ def test_bucket_policy_get_obj_acl_existing_tag(): input_tagset = {'TagSet': tagset2} response = client.put_object_tagging(Bucket=bucket_name, Key='privatetag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 tagset3 = [] tagset3.append({'Key': 'security1', 'Value': 'public'}) @@ -13439,24 +13434,24 @@ def test_bucket_policy_get_obj_acl_existing_tag(): input_tagset = {'TagSet': tagset3} response = client.put_object_tagging(Bucket=bucket_name, Key='invalidtag', Tagging=input_tagset) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 alt_client = get_alt_client() response = alt_client.get_object_acl(Bucket=bucket_name, Key='publictag') - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 # A get object itself should fail since we allowed only GetObjectTagging e = assert_raises(ClientError, alt_client.get_object, Bucket=bucket_name, Key='publictag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='privatetag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, alt_client.get_object_tagging, Bucket=bucket_name, Key='invalidtag') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='bucket') @@ -13480,7 +13475,7 @@ def test_object_lock_put_obj_lock(): response = client.put_object_lock_configuration( Bucket=bucket_name, ObjectLockConfiguration=conf) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 conf = {'ObjectLockEnabled':'Enabled', 'Rule': { @@ -13492,10 +13487,10 @@ def test_object_lock_put_obj_lock(): response = client.put_object_lock_configuration( Bucket=bucket_name, ObjectLockConfiguration=conf) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.get_bucket_versioning(Bucket=bucket_name) - eq(response['Status'], 'Enabled') + assert response['Status'] == 'Enabled' @attr(resource='bucket') @@ -13516,8 +13511,8 @@ def test_object_lock_put_obj_lock_invalid_bucket(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 409) - eq(error_code, 'InvalidBucketState') + assert status == 409 + assert error_code == 'InvalidBucketState' @attr(resource='bucket') @@ -13541,8 +13536,8 @@ def test_object_lock_put_obj_lock_with_days_and_years(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' @attr(resource='bucket') @@ -13565,8 +13560,8 @@ def test_object_lock_put_obj_lock_invalid_days(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRetentionPeriod') + assert status == 400 + assert error_code == 'InvalidRetentionPeriod' @attr(resource='bucket') @@ -13589,8 +13584,8 @@ def test_object_lock_put_obj_lock_invalid_years(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRetentionPeriod') + assert status == 400 + assert error_code == 'InvalidRetentionPeriod' @attr(resource='bucket') @@ -13613,8 +13608,8 @@ def test_object_lock_put_obj_lock_invalid_mode(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' conf = {'ObjectLockEnabled':'Enabled', 'Rule': { @@ -13625,8 +13620,8 @@ def test_object_lock_put_obj_lock_invalid_mode(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' attr(resource='bucket') @@ -13649,8 +13644,8 @@ def test_object_lock_put_obj_lock_invalid_status(): }} e = assert_raises(ClientError, client.put_object_lock_configuration, Bucket=bucket_name, ObjectLockConfiguration=conf) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' attr(resource='bucket') @@ -13666,8 +13661,8 @@ def test_object_lock_suspend_versioning(): client.create_bucket(Bucket=bucket_name, ObjectLockEnabledForBucket=True) e = assert_raises(ClientError, client.put_bucket_versioning, Bucket=bucket_name, VersioningConfiguration={'Status': 'Suspended'}) status, error_code = _get_status_and_error_code(e.response) - eq(status, 409) - eq(error_code, 'InvalidBucketState') + assert status == 409 + assert error_code == 'InvalidBucketState' @attr(resource='bucket') @@ -13692,7 +13687,7 @@ def test_object_lock_get_obj_lock(): Bucket=bucket_name, ObjectLockConfiguration=conf) response = client.get_object_lock_configuration(Bucket=bucket_name) - eq(response['ObjectLockConfiguration'], conf) + assert response['ObjectLockConfiguration'] == conf @attr(resource='bucket') @@ -13706,8 +13701,8 @@ def test_object_lock_get_obj_lock_invalid_bucket(): client.create_bucket(Bucket=bucket_name) e = assert_raises(ClientError, client.get_object_lock_configuration, Bucket=bucket_name) status, error_code = _get_status_and_error_code(e.response) - eq(status, 404) - eq(error_code, 'ObjectLockConfigurationNotFoundError') + assert status == 404 + assert error_code == 'ObjectLockConfigurationNotFoundError' @attr(resource='bucket') @@ -13726,7 +13721,7 @@ def test_object_lock_put_obj_retention(): version_id = response['VersionId'] retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} response = client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13745,8 +13740,8 @@ def test_object_lock_put_obj_retention_invalid_bucket(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRequest') + assert status == 400 + assert error_code == 'InvalidRequest' @attr(resource='bucket') @@ -13765,14 +13760,14 @@ def test_object_lock_put_obj_retention_invalid_mode(): retention = {'Mode':'governance', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' retention = {'Mode':'abc', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' @attr(resource='bucket') @@ -13792,7 +13787,7 @@ def test_object_lock_get_obj_retention(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention) response = client.get_object_retention(Bucket=bucket_name, Key=key) - eq(response['Retention'], retention) + assert response['Retention'] == retention client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13833,8 +13828,8 @@ def test_object_lock_get_obj_retention_invalid_bucket(): client.put_object(Bucket=bucket_name, Body='abc', Key=key) e = assert_raises(ClientError, client.get_object_retention, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRequest') + assert status == 400 + assert error_code == 'InvalidRequest' @attr(resource='bucket') @@ -13855,7 +13850,7 @@ def test_object_lock_put_obj_retention_versionid(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} client.put_object_retention(Bucket=bucket_name, Key=key, VersionId=version_id, Retention=retention) response = client.get_object_retention(Bucket=bucket_name, Key=key, VersionId=version_id) - eq(response['Retention'], retention) + assert response['Retention'] == retention client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13886,7 +13881,7 @@ def test_object_lock_put_obj_retention_override_default_retention(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention) response = client.get_object_retention(Bucket=bucket_name, Key=key) - eq(response['Retention'], retention) + assert response['Retention'] == retention client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13909,7 +13904,7 @@ def test_object_lock_put_obj_retention_increase_period(): retention2 = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,3,tzinfo=pytz.UTC)} client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention2) response = client.get_object_retention(Bucket=bucket_name, Key=key) - eq(response['Retention'], retention2) + assert response['Retention'] == retention2 client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13932,8 +13927,8 @@ def test_object_lock_put_obj_retention_shorten_period(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13956,7 +13951,7 @@ def test_object_lock_put_obj_retention_shorten_period_bypass(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention, BypassGovernanceRetention=True) response = client.get_object_retention(Bucket=bucket_name, Key=key) - eq(response['Retention'], retention) + assert response['Retention'] == retention client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) @@ -13978,11 +13973,11 @@ def test_object_lock_delete_object_with_retention(): client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention) e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId']) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 @attr(resource='bucket') @attr(method='delete') @@ -14003,17 +13998,17 @@ def test_object_lock_delete_object_with_retention_and_marker(): del_response = client.delete_object(Bucket=bucket_name, Key=key) e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId']) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' client.delete_object(Bucket=bucket_name, Key=key, VersionId=del_response['VersionId']) e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId']) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 @attr(resource='object') @attr(method='delete') @@ -14055,17 +14050,17 @@ def test_object_lock_multi_delete_object_with_retention(): } ) - eq(len(delete_response['Deleted']), 1) - eq(len(delete_response['Errors']), 1) + assert len(delete_response['Deleted']) == 1 + assert len(delete_response['Errors']) == 1 failed_object = delete_response['Errors'][0] - eq(failed_object['Code'], 'AccessDenied') - eq(failed_object['Key'], key1) - eq(failed_object['VersionId'], versionId1) + assert failed_object['Code'] == 'AccessDenied' + assert failed_object['Key'] == key1 + assert failed_object['VersionId'] == versionId1 deleted_object = delete_response['Deleted'][0] - eq(deleted_object['Key'], key2) - eq(deleted_object['VersionId'], versionId2) + assert deleted_object['Key'] == key2 + assert deleted_object['VersionId'] == versionId2 delete_response = client.delete_objects( Bucket=bucket_name, @@ -14081,10 +14076,10 @@ def test_object_lock_multi_delete_object_with_retention(): ) assert( ('Errors' not in delete_response) or (len(delete_response['Errors']) == 0) ) - eq(len(delete_response['Deleted']), 1) + assert len(delete_response['Deleted']) == 1 deleted_object = delete_response['Deleted'][0] - eq(deleted_object['Key'], key1) - eq(deleted_object['VersionId'], versionId1) + assert deleted_object['Key'] == key1 + assert deleted_object['VersionId'] == versionId1 @@ -14103,9 +14098,9 @@ def test_object_lock_put_legal_hold(): client.put_object(Bucket=bucket_name, Body='abc', Key=key) legal_hold = {'Status': 'ON'} response = client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'}) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @@ -14122,8 +14117,8 @@ def test_object_lock_put_legal_hold_invalid_bucket(): legal_hold = {'Status': 'ON'} e = assert_raises(ClientError, client.put_object_legal_hold, Bucket=bucket_name, Key=key, LegalHold=legal_hold) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRequest') + assert status == 400 + assert error_code == 'InvalidRequest' @attr(resource='bucket') @@ -14142,8 +14137,8 @@ def test_object_lock_put_legal_hold_invalid_status(): legal_hold = {'Status': 'abc'} e = assert_raises(ClientError, client.put_object_legal_hold, Bucket=bucket_name, Key=key, LegalHold=legal_hold) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'MalformedXML') + assert status == 400 + assert error_code == 'MalformedXML' @attr(resource='bucket') @@ -14162,11 +14157,11 @@ def test_object_lock_get_legal_hold(): legal_hold = {'Status': 'ON'} client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold) response = client.get_object_legal_hold(Bucket=bucket_name, Key=key) - eq(response['LegalHold'], legal_hold) + assert response['LegalHold'] == legal_hold legal_hold_off = {'Status': 'OFF'} client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold=legal_hold_off) response = client.get_object_legal_hold(Bucket=bucket_name, Key=key) - eq(response['LegalHold'], legal_hold_off) + assert response['LegalHold'] == legal_hold_off @attr(resource='bucket') @@ -14182,8 +14177,8 @@ def test_object_lock_get_legal_hold_invalid_bucket(): client.put_object(Bucket=bucket_name, Body='abc', Key=key) e = assert_raises(ClientError, client.get_object_legal_hold, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(error_code, 'InvalidRequest') + assert status == 400 + assert error_code == 'InvalidRequest' @attr(resource='bucket') @@ -14202,8 +14197,8 @@ def test_object_lock_delete_object_with_legal_hold_on(): client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'ON'}) e = assert_raises(ClientError, client.delete_object, Bucket=bucket_name, Key=key, VersionId=response['VersionId']) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'}) @@ -14222,7 +14217,7 @@ def test_object_lock_delete_object_with_legal_hold_off(): response = client.put_object(Bucket=bucket_name, Body='abc', Key=key) client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status': 'OFF'}) response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId']) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 @attr(resource='bucket') @@ -14243,9 +14238,9 @@ def test_object_lock_get_obj_metadata(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':datetime.datetime(2030,1,1,tzinfo=pytz.UTC)} client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention) response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ObjectLockMode'], retention['Mode']) - eq(response['ObjectLockRetainUntilDate'], retention['RetainUntilDate']) - eq(response['ObjectLockLegalHoldStatus'], legal_hold['Status']) + assert response['ObjectLockMode'] == retention['Mode'] + assert response['ObjectLockRetainUntilDate'] == retention['RetainUntilDate'] + assert response['ObjectLockLegalHoldStatus'] == legal_hold['Status'] client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'}) client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) @@ -14267,9 +14262,9 @@ def test_object_lock_uploading_obj(): ObjectLockRetainUntilDate=datetime.datetime(2030,1,1,tzinfo=pytz.UTC), ObjectLockLegalHoldStatus='ON') response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ObjectLockMode'], 'GOVERNANCE') - eq(response['ObjectLockRetainUntilDate'], datetime.datetime(2030,1,1,tzinfo=pytz.UTC)) - eq(response['ObjectLockLegalHoldStatus'], 'ON') + assert response['ObjectLockMode'] == 'GOVERNANCE' + assert response['ObjectLockRetainUntilDate'] == datetime.datetime(2030,1,1,tzinfo=pytz.UTC) + assert response['ObjectLockLegalHoldStatus'] == 'ON' client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'}) client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) @@ -14313,8 +14308,8 @@ def test_object_lock_changing_mode_from_governance_without_bypass(): retention = {'Mode':'COMPLIANCE', 'RetainUntilDate':retain_until} e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @attr(resource='object') @attr(method='put') @@ -14336,8 +14331,8 @@ def test_object_lock_changing_mode_from_compliance(): retention = {'Mode':'GOVERNANCE', 'RetainUntilDate':retain_until} e = assert_raises(ClientError, client.put_object_retention, Bucket=bucket_name, Key=key, Retention=retention) status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) - eq(error_code, 'AccessDenied') + assert status == 403 + assert error_code == 'AccessDenied' @attr(resource='object') @attr(method='copy') @@ -14353,7 +14348,7 @@ def test_copy_object_ifmatch_good(): client.copy_object(Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfMatch=resp['ETag'], Key='bar') response = client.get_object(Bucket=bucket_name, Key='bar') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='copy') @@ -14369,8 +14364,8 @@ def test_copy_object_ifmatch_failed(): e = assert_raises(ClientError, client.copy_object, Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfMatch='ABCORZ', Key='bar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' @attr(resource='object') @attr(method='copy') @@ -14386,8 +14381,8 @@ def test_copy_object_ifnonematch_good(): e = assert_raises(ClientError, client.copy_object, Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfNoneMatch=resp['ETag'], Key='bar') status, error_code = _get_status_and_error_code(e.response) - eq(status, 412) - eq(error_code, 'PreconditionFailed') + assert status == 412 + assert error_code == 'PreconditionFailed' @attr(resource='object') @attr(method='copy') @@ -14403,7 +14398,7 @@ def test_copy_object_ifnonematch_failed(): client.copy_object(Bucket=bucket_name, CopySource=bucket_name+'/foo', CopySourceIfNoneMatch='ABCORZ', Key='bar') response = client.get_object(Bucket=bucket_name, Key='bar') body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='get') @@ -14417,8 +14412,8 @@ def test_object_read_unreadable(): client = get_client() e = assert_raises(ClientError, client.get_object, Bucket=bucket_name, Key='\xae\x8a-') status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) - eq(e.response['Error']['Message'], 'Couldn\'t parse the specified URI.') + assert status == 400 + assert e.response['Error']['Message'] == 'Couldn\'t parse the specified URI.' @attr(resource='bucket') @attr(method='get') @@ -14451,7 +14446,7 @@ def test_get_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],False) + assert resp['PolicyStatus']['IsPublic'] == False @attr(resource='bucket') @attr(method='get') @@ -14464,7 +14459,7 @@ def test_get_public_acl_bucket_policy_status(): client = get_client() client.put_bucket_acl(Bucket=bucket_name, ACL='public-read') resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],True) + assert resp['PolicyStatus']['IsPublic'] == True @attr(resource='bucket') @attr(method='get') @@ -14477,7 +14472,7 @@ def test_get_authpublic_acl_bucket_policy_status(): client = get_client() client.put_bucket_acl(Bucket=bucket_name, ACL='authenticated-read') resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],True) + assert resp['PolicyStatus']['IsPublic'] == True @attr(resource='bucket') @@ -14491,7 +14486,7 @@ def test_get_publicpolicy_acl_bucket_policy_status(): client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],False) + assert resp['PolicyStatus']['IsPublic'] == False resource1 = "arn:aws:s3:::" + bucket_name resource2 = "arn:aws:s3:::" + bucket_name + "/*" @@ -14511,7 +14506,7 @@ def test_get_publicpolicy_acl_bucket_policy_status(): client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document) resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],True) + assert resp['PolicyStatus']['IsPublic'] == True @attr(resource='bucket') @@ -14525,7 +14520,7 @@ def test_get_nonpublicpolicy_acl_bucket_policy_status(): client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],False) + assert resp['PolicyStatus']['IsPublic'] == False resource1 = "arn:aws:s3:::" + bucket_name resource2 = "arn:aws:s3:::" + bucket_name + "/*" @@ -14549,7 +14544,7 @@ def test_get_nonpublicpolicy_acl_bucket_policy_status(): client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document) resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],False) + assert resp['PolicyStatus']['IsPublic'] == False @attr(resource='bucket') @@ -14562,7 +14557,7 @@ def test_get_nonpublicpolicy_deny_bucket_policy_status(): client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],False) + assert resp['PolicyStatus']['IsPublic'] == False resource1 = "arn:aws:s3:::" + bucket_name resource2 = "arn:aws:s3:::" + bucket_name + "/*" @@ -14582,7 +14577,7 @@ def test_get_nonpublicpolicy_deny_bucket_policy_status(): client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document) resp = client.get_bucket_policy_status(Bucket=bucket_name) - eq(resp['PolicyStatus']['IsPublic'],True) + assert resp['PolicyStatus']['IsPublic'] == True @attr(resource='bucket') @attr(method='get') @@ -14595,10 +14590,10 @@ def test_get_default_public_block(): client = get_client() resp = client.get_public_access_block(Bucket=bucket_name) - eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], False) - eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], False) - eq(resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'], False) - eq(resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'], False) + assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == False + assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == False + assert resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'] == False + assert resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'] == False @attr(resource='bucket') @attr(method='put') @@ -14618,10 +14613,10 @@ def test_put_public_block(): client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf) resp = client.get_public_access_block(Bucket=bucket_name) - eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], access_conf['BlockPublicAcls']) - eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], access_conf['BlockPublicPolicy']) - eq(resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'], access_conf['IgnorePublicAcls']) - eq(resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'], access_conf['RestrictPublicBuckets']) + assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == access_conf['BlockPublicAcls'] + assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == access_conf['BlockPublicPolicy'] + assert resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'] == access_conf['IgnorePublicAcls'] + assert resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'] == access_conf['RestrictPublicBuckets'] @attr(resource='bucket') @@ -14642,20 +14637,20 @@ def test_block_public_put_bucket_acls(): client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf) resp = client.get_public_access_block(Bucket=bucket_name) - eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], access_conf['BlockPublicAcls']) - eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], access_conf['BlockPublicPolicy']) + assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == access_conf['BlockPublicAcls'] + assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == access_conf['BlockPublicPolicy'] e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name,ACL='public-read') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name,ACL='public-read-write') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, client.put_bucket_acl, Bucket=bucket_name,ACL='authenticated-read') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='bucket') @@ -14675,21 +14670,21 @@ def test_block_public_object_canned_acls(): client.put_public_access_block(Bucket=bucket_name, PublicAccessBlockConfiguration=access_conf) # resp = client.get_public_access_block(Bucket=bucket_name) - # eq(resp['PublicAccessBlockConfiguration']['BlockPublicAcls'], access_conf['BlockPublicAcls']) - # eq(resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'], access_conf['BlockPublicPolicy']) + # assert resp['PublicAccessBlockConfiguration']['BlockPublicAcls'] == access_conf['BlockPublicAcls'] + # assert resp['PublicAccessBlockConfiguration']['BlockPublicPolicy'] == access_conf['BlockPublicPolicy'] #FIXME: use empty body until #42208 e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo1', Body='', ACL='public-read') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo2', Body='', ACL='public-read') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='foo3', Body='', ACL='authenticated-read') status, error_code = _get_status_and_error_code(e.response) - eq(status, 403) + assert status == 403 @attr(resource='bucket') @@ -14730,7 +14725,7 @@ def test_ignore_public_acls(): client.put_object(Bucket=bucket_name,Key='key1',Body='abcde',ACL='public-read') resp=alt_client.get_object(Bucket=bucket_name, Key='key1') - eq(_get_body(resp), 'abcde') + assert _get_body(resp) == 'abcde' access_conf = {'BlockPublicAcls': False, 'IgnorePublicAcls': True, @@ -14774,7 +14769,7 @@ def test_multipart_upload_on_a_bucket_with_policy(): client.put_bucket_policy(Bucket=bucket_name, Policy=policy_document) (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen, client=client) response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 def _put_bucket_encryption_s3(client, bucket_name): """ @@ -14790,7 +14785,7 @@ def _put_bucket_encryption_s3(client, bucket_name): ] } response = client.put_bucket_encryption(Bucket=bucket_name, ServerSideEncryptionConfiguration=server_side_encryption_conf) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 def _put_bucket_encryption_kms(client, bucket_name): """ @@ -14810,7 +14805,7 @@ def _put_bucket_encryption_kms(client, bucket_name): ] } response = client.put_bucket_encryption(Bucket=bucket_name, ServerSideEncryptionConfiguration=server_side_encryption_conf) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 @attr(resource='bucket') @@ -14852,13 +14847,13 @@ def test_get_bucket_encryption_s3(): except ClientError as e: response_code = e.response['Error']['Code'] - eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError') + assert response_code == 'ServerSideEncryptionConfigurationNotFoundError' _put_bucket_encryption_s3(client, bucket_name) response = client.get_bucket_encryption(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'], 'AES256') + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'] == 'AES256' @attr(resource='bucket') @@ -14880,14 +14875,14 @@ def test_get_bucket_encryption_kms(): except ClientError as e: response_code = e.response['Error']['Code'] - eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError') + assert response_code == 'ServerSideEncryptionConfigurationNotFoundError' _put_bucket_encryption_kms(client, bucket_name) response = client.get_bucket_encryption(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 200) - eq(response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'], 'aws:kms') - eq(response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['KMSMasterKeyID'], kms_keyid) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 + assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'] == 'aws:kms' + assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['KMSMasterKeyID'] == kms_keyid @attr(resource='bucket') @@ -14901,12 +14896,12 @@ def test_delete_bucket_encryption_s3(): client = get_client() response = client.delete_bucket_encryption(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 _put_bucket_encryption_s3(client, bucket_name) response = client.delete_bucket_encryption(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response_code = "" try: @@ -14914,7 +14909,7 @@ def test_delete_bucket_encryption_s3(): except ClientError as e: response_code = e.response['Error']['Code'] - eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError') + assert response_code == 'ServerSideEncryptionConfigurationNotFoundError' @attr(resource='bucket') @@ -14928,12 +14923,12 @@ def test_delete_bucket_encryption_kms(): client = get_client() response = client.delete_bucket_encryption(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 _put_bucket_encryption_kms(client, bucket_name) response = client.delete_bucket_encryption(Bucket=bucket_name) - eq(response['ResponseMetadata']['HTTPStatusCode'], 204) + assert response['ResponseMetadata']['HTTPStatusCode'] == 204 response_code = "" try: @@ -14941,7 +14936,7 @@ def test_delete_bucket_encryption_kms(): except ClientError as e: response_code = e.response['Error']['Code'] - eq(response_code, 'ServerSideEncryptionConfigurationNotFoundError') + assert response_code == 'ServerSideEncryptionConfigurationNotFoundError' def _test_sse_s3_default_upload(file_size): """ @@ -14955,12 +14950,12 @@ def _test_sse_s3_default_upload(file_size): data = 'A'*file_size response = client.put_object(Bucket=bucket_name, Key='testobj', Body=data) - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' response = client.get_object(Bucket=bucket_name, Key='testobj') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' body = _get_body(response) - eq(body, data) + assert body == data @attr(resource='object') @attr(method='put') @@ -15037,14 +15032,14 @@ def _test_sse_kms_default_upload(file_size): data = 'A'*file_size response = client.put_object(Bucket=bucket_name, Key='testobj', Body=data) - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid) + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms' + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid response = client.get_object(Bucket=bucket_name, Key='testobj') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid) + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms' + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid body = _get_body(response) - eq(body, data) + assert body == data @attr(resource='object') @attr(method='put') @@ -15130,7 +15125,7 @@ def test_sse_s3_default_method_head(): client.put_object(Bucket=bucket_name, Key=key, Body=data) response = client.head_object(Bucket=bucket_name, Key=key) - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' sse_s3_headers = { 'x-amz-server-side-encryption': 'AES256', @@ -15139,7 +15134,7 @@ def test_sse_s3_default_method_head(): client.meta.events.register('before-call.s3.HeadObject', lf) e = assert_raises(ClientError, client.head_object, Bucket=bucket_name, Key=key) status, error_code = _get_status_and_error_code(e.response) - eq(status, 400) + assert status == 400 @attr(resource='object') @attr(method='put') @@ -15176,22 +15171,22 @@ def test_sse_s3_default_multipart_upload(): response = client.head_bucket(Bucket=bucket_name) rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 1)) - eq(rgw_object_count, 1) + assert rgw_object_count == 1 rgw_bytes_used = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-bytes-used', objlen)) - eq(rgw_bytes_used, objlen) + assert rgw_bytes_used == objlen lf = (lambda **kwargs: kwargs['params']['headers'].update(part_headers)) client.meta.events.register('before-call.s3.UploadPart', lf) response = client.get_object(Bucket=bucket_name, Key=key) - eq(response['Metadata'], metadata) - eq(response['ResponseMetadata']['HTTPHeaders']['content-type'], content_type) + assert response['Metadata'] == metadata + assert response['ResponseMetadata']['HTTPHeaders']['content-type'] == content_type body = _get_body(response) - eq(body, data) + assert body == data size = response['ContentLength'] - eq(len(body), size) + assert len(body) == size _check_content_using_range(key, bucket_name, data, 1000000) _check_content_using_range(key, bucket_name, data, 10000000) @@ -15243,12 +15238,12 @@ def test_sse_s3_default_post_object_authenticated_request(): ('file', ('bar'))]) r = requests.post(url, files = payload) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' @attr(resource='object') @attr(method='post') @@ -15298,13 +15293,13 @@ def test_sse_kms_default_post_object_authenticated_request(): ('file', ('bar'))]) r = requests.post(url, files = payload) - eq(r.status_code, 204) + assert r.status_code == 204 response = client.get_object(Bucket=bucket_name, Key='foo.txt') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid) + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'aws:kms' + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'] == kms_keyid body = _get_body(response) - eq(body, 'bar') + assert body == 'bar' def _test_sse_s3_encrypted_upload(file_size): @@ -15316,12 +15311,12 @@ def _test_sse_s3_encrypted_upload(file_size): data = 'A'*file_size response = client.put_object(Bucket=bucket_name, Key='testobj', Body=data, ServerSideEncryption='AES256') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' response = client.get_object(Bucket=bucket_name, Key='testobj') - eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'AES256') + assert response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'] == 'AES256' body = _get_body(response) - eq(body, data) + assert body == data @attr(resource='object') @attr(method='put') diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 5fa4f1c..5a98a37 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -7,7 +7,6 @@ from nose.plugins.attrib import attr from botocore.exceptions import ClientError import uuid -from nose.tools import eq_ as eq from . import ( configfile, @@ -120,8 +119,7 @@ def s3select_assert_result(a,b): else: assert a != "" assert b != "" - - nose.tools.assert_equal(a,b) + assert a == b def create_csv_object_for_datetime(rows,columns): result = "" @@ -225,7 +223,7 @@ def upload_csv_object(bucket_name,new_key,obj): # validate uploaded object c2 = get_client() response = c2.get_object(Bucket=bucket_name, Key=new_key) - eq(response['Body'].read().decode('utf-8'), obj, 's3select error[ downloaded object not equal to uploaded objecy') + assert response['Body'].read().decode('utf-8') == obj, 's3select error[ downloaded object not equal to uploaded objecy' def run_s3select(bucket,key,query,column_delim=",",row_delim="\n",quot_char='"',esc_char='\\',csv_header_info="NONE", progress = False): @@ -297,7 +295,7 @@ def remove_xml_tags_from_result(obj): x = bool(re.search("^failure.*$", result_strip)) if x: logging.info(result) - nose.tools.assert_equal(x, False) + assert x == False return result @@ -782,19 +780,19 @@ def test_nullif_expressions(): res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where _1 = _2 ;") ).replace("\n","") - nose.tools.assert_equal( res_s3select_nullif, res_s3select) + assert res_s3select_nullif == res_s3select res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where not nullif(_1,_2) is null ;") ).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where _1 != _2 ;") ).replace("\n","") - nose.tools.assert_equal( res_s3select_nullif, res_s3select) + assert res_s3select_nullif == res_s3select res_s3select_nullif = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where nullif(_1,_2) = _1 ;") ).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select count(0) from stdin where _1 != _2 ;") ).replace("\n","") - nose.tools.assert_equal( res_s3select_nullif, res_s3select) + assert res_s3select_nullif == res_s3select @attr('s3select') @pytest.mark.s3select @@ -808,11 +806,11 @@ def test_lowerupper_expressions(): res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select lower("AB12cd$$") from stdin ;') ).replace("\n","") - nose.tools.assert_equal( res_s3select, "ab12cd$$") + assert res_s3select == "ab12cd$$" res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select upper("ab12CD$$") from stdin ;') ).replace("\n","") - nose.tools.assert_equal( res_s3select, "AB12CD$$") + assert res_s3select == "AB12CD$$" @attr('s3select') @pytest.mark.s3select @@ -829,31 +827,31 @@ def test_in_expressions(): res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) = 1;')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) in(1,0);')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where int(_1) = 1 or int(_1) = 0;')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_2) from stdin where int(_2) in(1,0,2);')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_2) from stdin where int(_2) = 1 or int(_2) = 0 or int(_2) = 2;')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_2) from stdin where int(_2)*2 in(int(_3)*2,int(_4)*3,int(_5)*5);')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_2) from stdin where int(_2)*2 = int(_3)*2 or int(_2)*2 = int(_4)*3 or int(_2)*2 = int(_5)*5;')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where character_length(_1) = 2 and substring(_1,2,1) in ("3");')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select int(_1) from stdin where _1 like "_3";')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select @attr('s3select') @pytest.mark.s3select @@ -869,37 +867,37 @@ def test_like_expressions(): res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select count(*) from stdin where substring(_1,11,4) = "aeio" ;')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _1 like "cbcd%";')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select count(*) from stdin where substring(_1,1,4) = "cbcd";')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _3 like "%y[y-z]";')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select count(*) from stdin where substring(_3,character_length(_3),1) between "y" and "z" and substring(_3,character_length(_3)-1,1) = "y";')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _2 like "%yz";')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select count(*) from stdin where substring(_2,character_length(_2),1) = "z" and substring(_2,character_length(_2)-1,1) = "y";')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _3 like "c%z";')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select count(*) from stdin where substring(_3,character_length(_3),1) = "z" and substring(_3,1,1) = "c";')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select res_s3select_in = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,'select count(*) from stdin where _2 like "%xy_";')).replace("\n","") res_s3select = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name, 'select count(*) from stdin where substring(_2,character_length(_2)-1,1) = "y" and substring(_2,character_length(_2)-2,1) = "x";')).replace("\n","") - nose.tools.assert_equal( res_s3select_in, res_s3select ) + assert res_s3select_in == res_s3select @attr('s3select') diff --git a/s3tests_boto3/functional/test_sts.py b/s3tests_boto3/functional/test_sts.py index f926403..dc4fafe 100644 --- a/s3tests_boto3/functional/test_sts.py +++ b/s3tests_boto3/functional/test_sts.py @@ -2,7 +2,6 @@ import boto3 import botocore.session from botocore.exceptions import ClientError from botocore.exceptions import ParamValidationError -from nose.tools import eq_ as eq from nose.plugins.attrib import attr import pytest import isodate @@ -167,10 +166,10 @@ def test_get_session_token(): user_policy = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"],\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}},{\"Effect\":\"Allow\",\"Action\":\"sts:GetSessionToken\",\"Resource\":\"*\",\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}}]}" (resp_err,resp,policy_name)=put_user_policy(iam_client,sts_user_id,None,user_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 response=sts_client.get_session_token() - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client=boto3.client('s3', aws_access_key_id = response['Credentials']['AccessKeyId'], @@ -182,7 +181,7 @@ def test_get_session_token(): bucket_name = get_new_bucket_name() try: s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 finish=s3_client.delete_bucket(Bucket=bucket_name) finally: # clean up user policy even if create_bucket/delete_bucket fails iam_client.delete_user_policy(UserName=sts_user_id,PolicyName=policy_name) @@ -206,10 +205,10 @@ def test_get_session_token_permanent_creds_denied(): user_policy = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"],\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}},{\"Effect\":\"Allow\",\"Action\":\"sts:GetSessionToken\",\"Resource\":\"*\",\"Condition\":{\"BoolIfExists\":{\"sts:authentication\":\"false\"}}}]}" (resp_err,resp,policy_name)=put_user_policy(iam_client,sts_user_id,None,user_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 response=sts_client.get_session_token() - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client=boto3.client('s3', aws_access_key_id = s3_main_access_key, @@ -223,7 +222,7 @@ def test_get_session_token_permanent_creds_denied(): s3bucket = s3_client.create_bucket(Bucket=bucket_name) except ClientError as e: s3bucket_error = e.response.get("Error", {}).get("Code") - eq(s3bucket_error,'AccessDenied') + assert s3bucket_error == 'AccessDenied' iam_client.delete_user_policy(UserName=sts_user_id,PolicyName=policy_name) @attr(resource='assume role') @@ -243,14 +242,14 @@ def test_assume_role_allow(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -261,9 +260,9 @@ def test_assume_role_allow(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 @attr(resource='assume role') @attr(method='get') @@ -283,14 +282,14 @@ def test_assume_role_deny(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -304,7 +303,7 @@ def test_assume_role_deny(): s3bucket = s3_client.create_bucket(Bucket=bucket_name) except ClientError as e: s3bucket_error = e.response.get("Error", {}).get("Code") - eq(s3bucket_error,'AccessDenied') + assert s3bucket_error == 'AccessDenied' @attr(resource='assume role') @attr(method='get') @@ -323,14 +322,14 @@ def test_assume_role_creds_expiry(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":[\"arn:aws:iam:::user/"+sts_user_id+"\"]},\"Action\":[\"sts:AssumeRole\"]}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,DurationSeconds=900) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 time.sleep(900) s3_client = boto3.client('s3', @@ -345,7 +344,7 @@ def test_assume_role_creds_expiry(): s3bucket = s3_client.create_bucket(Bucket=bucket_name) except ClientError as e: s3bucket_error = e.response.get("Error", {}).get("Code") - eq(s3bucket_error,'AccessDenied') + assert s3bucket_error == 'AccessDenied' @attr(resource='assume role') @attr(method='head') @@ -368,15 +367,15 @@ def test_assume_role_deny_head_nonexistent(): policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}' (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name) + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name # allow GetObject but deny ListBucket role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":"s3:GetObject","Principal":"*","Resource":"arn:aws:s3:::*"}}' (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -389,7 +388,7 @@ def test_assume_role_deny_head_nonexistent(): s3_client.head_object(Bucket=bucket_name, Key='nonexistent') except ClientError as e: status = e.response['ResponseMetadata']['HTTPStatusCode'] - eq(status,403) + assert status == 403 @attr(resource='assume role') @attr(method='head') @@ -412,15 +411,15 @@ def test_assume_role_allow_head_nonexistent(): policy_document = '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["arn:aws:iam:::user/'+sts_user_id+'"]},"Action":["sts:AssumeRole"]}]}' (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name) + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name # allow GetObject and ListBucket role_policy = '{"Version":"2012-10-17","Statement":{"Effect":"Allow","Action":["s3:GetObject","s3:ListBucket"],"Principal":"*","Resource":"arn:aws:s3:::*"}}' (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -433,7 +432,7 @@ def test_assume_role_allow_head_nonexistent(): s3_client.head_object(Bucket=bucket_name, Key='nonexistent') except ClientError as e: status = e.response['ResponseMetadata']['HTTPStatusCode'] - eq(status,404) + assert status == 404 @attr(resource='assume role with web identity') @@ -466,14 +465,14 @@ def test_assume_role_with_web_identity(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -484,9 +483,9 @@ def test_assume_role_with_web_identity(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -519,11 +518,11 @@ def test_assume_role_with_web_identity_invalid_webtoken(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp="" try: @@ -533,7 +532,7 @@ def test_assume_role_with_web_identity_invalid_webtoken(): log.debug('{}'.format(e.response.get("Error", {}).get("Code"))) log.debug('{}'.format(e)) resp_error = e.response.get("Error", {}).get("Code") - eq(resp_error,'AccessDenied') + assert resp_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -573,17 +572,17 @@ def test_session_policy_check_on_different_buckets(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"arn:aws:s3:::test2\",\"arn:aws:s3:::test2/*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -598,14 +597,14 @@ def test_session_policy_check_on_different_buckets(): s3bucket = s3_client.create_bucket(Bucket=bucket_name_1) except ClientError as e: s3bucket_error = e.response.get("Error", {}).get("Code") - eq(s3bucket_error, 'AccessDenied') + assert s3bucket_error == 'AccessDenied' bucket_name_2 = 'test2' try: s3bucket = s3_client.create_bucket(Bucket=bucket_name_2) except ClientError as e: s3bucket_error = e.response.get("Error", {}).get("Code") - eq(s3bucket_error, 'AccessDenied') + assert s3bucket_error == 'AccessDenied' bucket_body = 'please-write-something' #body.encode(encoding='utf_8') @@ -613,7 +612,7 @@ def test_session_policy_check_on_different_buckets(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error,'NoSuchBucket') + assert s3_put_obj_error == 'NoSuchBucket' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -649,23 +648,23 @@ def test_session_policy_check_on_same_bucket(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_iam_creds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -677,7 +676,7 @@ def test_session_policy_check_on_same_bucket(): bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -715,23 +714,23 @@ def test_session_policy_check_put_obj_denial(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_iam_creds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -746,7 +745,7 @@ def test_session_policy_check_put_obj_denial(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error, 'AccessDenied') + assert s3_put_obj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -784,23 +783,23 @@ def test_swapping_role_policy_and_session_policy(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\",\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_iam_creds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -811,7 +810,7 @@ def test_swapping_role_policy_and_session_policy(): ) bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -848,23 +847,23 @@ def test_session_policy_check_different_op_permissions(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_iam_creds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -879,7 +878,7 @@ def test_session_policy_check_different_op_permissions(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error, 'AccessDenied') + assert s3_put_obj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -917,23 +916,23 @@ def test_session_policy_check_with_deny_effect(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_iam_creds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -947,7 +946,7 @@ def test_session_policy_check_with_deny_effect(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error, 'AccessDenied') + assert s3_put_obj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -985,23 +984,23 @@ def test_session_policy_check_with_deny_on_same_op(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy_new = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy_new) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client_iam_creds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Deny\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1016,7 +1015,7 @@ def test_session_policy_check_with_deny_on_same_op(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error, 'AccessDenied') + assert s3_put_obj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -1051,16 +1050,16 @@ def test_session_policy_bucket_policy_role_arn(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3client_iamcreds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 resource1 = "arn:aws:s3:::" + bucket_name_1 resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*" @@ -1082,7 +1081,7 @@ def test_session_policy_bucket_policy_role_arn(): session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1093,13 +1092,13 @@ def test_session_policy_bucket_policy_role_arn(): ) bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 try: obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3object_error = e.response.get("Error", {}).get("Code") - eq(s3object_error, 'AccessDenied') + assert s3object_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -1134,16 +1133,16 @@ def test_session_policy_bucket_policy_session_arn(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3client_iamcreds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 resource1 = "arn:aws:s3:::" + bucket_name_1 resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*" @@ -1165,7 +1164,7 @@ def test_session_policy_bucket_policy_session_arn(): session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1176,11 +1175,11 @@ def test_session_policy_bucket_policy_session_arn(): ) bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 s3_get_obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-1.txt") - eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -1215,16 +1214,16 @@ def test_session_policy_copy_object(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3client_iamcreds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 resource1 = "arn:aws:s3:::" + bucket_name_1 resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*" @@ -1247,7 +1246,7 @@ def test_session_policy_copy_object(): session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1258,7 +1257,7 @@ def test_session_policy_copy_object(): ) bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 copy_source = { 'Bucket': bucket_name_1, @@ -1268,7 +1267,7 @@ def test_session_policy_copy_object(): s3_client.copy(copy_source, bucket_name_1, "test-2.txt") s3_get_obj = s3_client.get_object(Bucket=bucket_name_1, Key="test-2.txt") - eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -1303,17 +1302,17 @@ def test_session_policy_no_bucket_role_policy(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' s3client_iamcreds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\",\"s3:GetObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1327,7 +1326,7 @@ def test_session_policy_no_bucket_role_policy(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3putobj_error = e.response.get("Error", {}).get("Code") - eq(s3putobj_error, 'AccessDenied') + assert s3putobj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -1362,16 +1361,16 @@ def test_session_policy_bucket_policy_deny(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_arn+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":app_id\":\""+aud+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":[\"*\"]}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 s3client_iamcreds = get_s3_client_using_iam_creds() bucket_name_1 = 'test1' s3bucket = s3client_iamcreds.create_bucket(Bucket=bucket_name_1) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 resource1 = "arn:aws:s3:::" + bucket_name_1 resource2 = "arn:aws:s3:::" + bucket_name_1 + "/*" @@ -1393,7 +1392,7 @@ def test_session_policy_bucket_policy_deny(): session_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":[\"s3:PutObject\"],\"Resource\":[\"arn:aws:s3:::test1\",\"arn:aws:s3:::test1/*\"]}}" resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token,Policy=session_policy) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1408,7 +1407,7 @@ def test_session_policy_bucket_policy_deny(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name_1, Key="test-1.txt") except ClientError as e: s3putobj_error = e.response.get("Error", {}).get("Code") - eq(s3putobj_error, 'AccessDenied') + assert s3putobj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_arn @@ -1444,14 +1443,14 @@ def test_assume_role_with_web_identity_with_sub(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":sub\":\""+sub+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1462,9 +1461,9 @@ def test_assume_role_with_web_identity_with_sub(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1500,14 +1499,14 @@ def test_assume_role_with_web_identity_with_azp(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\"],\"Condition\":{\"StringEquals\":{\"localhost:8080/auth/realms/"+realm+":azp\":\""+azp+"\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1518,9 +1517,9 @@ def test_assume_role_with_web_identity_with_azp(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1557,14 +1556,14 @@ def test_assume_role_with_web_identity_with_request_tag(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\"}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1575,9 +1574,9 @@ def test_assume_role_with_web_identity_with_request_tag(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1614,14 +1613,14 @@ def test_assume_role_with_web_identity_with_principal_tag(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"aws:PrincipalTag/Department\":\"Engineering\"}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1632,9 +1631,9 @@ def test_assume_role_with_web_identity_with_principal_tag(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1671,14 +1670,14 @@ def test_assume_role_with_web_identity_for_all_values(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAllValues:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\",\"Marketing\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1689,9 +1688,9 @@ def test_assume_role_with_web_identity_for_all_values(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1728,15 +1727,15 @@ def test_assume_role_with_web_identity_for_all_values_deny(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' #ForAllValues: The condition returns true if every key value in the request matches at least one value in the policy role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAllValues:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1750,7 +1749,7 @@ def test_assume_role_with_web_identity_for_all_values_deny(): s3bucket = s3_client.create_bucket(Bucket=bucket_name) except ClientError as e: s3bucket_error = e.response.get("Error", {}).get("Code") - eq(s3bucket_error,'AccessDenied') + assert s3bucket_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1787,14 +1786,14 @@ def test_assume_role_with_web_identity_tag_keys_trust_policy(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:TagKeys\":\"Department\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"ForAnyValue:StringEquals\":{\"aws:PrincipalTag/Department\":[\"Engineering\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1805,9 +1804,9 @@ def test_assume_role_with_web_identity_tag_keys_trust_policy(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1844,14 +1843,14 @@ def test_assume_role_with_web_identity_tag_keys_role_policy(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"aws:TagKeys\":[\"Department\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1862,9 +1861,9 @@ def test_assume_role_with_web_identity_tag_keys_role_policy(): ) bucket_name = get_new_bucket_name() s3bucket = s3_client.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bkt = s3_client.delete_bucket(Bucket=bucket_name) - eq(bkt['ResponseMetadata']['HTTPStatusCode'],204) + assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1898,7 +1897,7 @@ def test_assume_role_with_web_identity_resource_tag(): bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name) Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'},{'Key':'Department', 'Value': 'Marketing'}]}) @@ -1912,14 +1911,14 @@ def test_assume_role_with_web_identity_resource_tag(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1931,7 +1930,7 @@ def test_assume_role_with_web_identity_resource_tag(): bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -1965,7 +1964,7 @@ def test_assume_role_with_web_identity_resource_tag_deny(): bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_response = iam_client.create_open_id_connect_provider( Url='http://localhost:8080/auth/realms/{}'.format(realm), @@ -1976,14 +1975,14 @@ def test_assume_role_with_web_identity_resource_tag_deny(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -1998,7 +1997,7 @@ def test_assume_role_with_web_identity_resource_tag_deny(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error,'AccessDenied') + assert s3_put_obj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -2032,7 +2031,7 @@ def test_assume_role_with_web_identity_wrong_resource_tag_deny(): bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name) Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'WrongResourcetag'}]}) @@ -2046,14 +2045,14 @@ def test_assume_role_with_web_identity_wrong_resource_tag_deny(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -2068,7 +2067,7 @@ def test_assume_role_with_web_identity_wrong_resource_tag_deny(): s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt") except ClientError as e: s3_put_obj_error = e.response.get("Error", {}).get("Code") - eq(s3_put_obj_error,'AccessDenied') + assert s3_put_obj_error == 'AccessDenied' oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -2102,7 +2101,7 @@ def test_assume_role_with_web_identity_resource_tag_princ_tag(): bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name) Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]}) @@ -2116,14 +2115,14 @@ def test_assume_role_with_web_identity_resource_tag_princ_tag(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"${aws:PrincipalTag/Department}\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -2137,10 +2136,10 @@ def test_assume_role_with_web_identity_resource_tag_princ_tag(): tags = 'Department=Engineering&Department=Marketing' key = "test-1.txt" s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key=key, Tagging=tags) - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 s3_get_obj = s3_client.get_object(Bucket=bucket_name, Key=key) - eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -2175,14 +2174,14 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): #create two buckets and add same tags to both bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name) Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]}) copy_bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=copy_bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bucket_tagging = s3_res_iam_creds.BucketTagging(copy_bucket_name) Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'}]}) @@ -2196,14 +2195,14 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): policy_document = "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Federated\":[\""+oidc_response["OpenIDConnectProviderArn"]+"\"]},\"Action\":[\"sts:AssumeRoleWithWebIdentity\",\"sts:TagSession\"],\"Condition\":{\"StringEquals\":{\"aws:RequestTag/Department\":\"Engineering\"}}}]}" (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"${aws:PrincipalTag/Department}\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -2217,7 +2216,7 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): tags = 'Department=Engineering' key = "test-1.txt" s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key=key, Tagging=tags) - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 #copy to same bucket copy_source = { @@ -2228,7 +2227,7 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): s3_client.copy(copy_source, bucket_name, "test-2.txt") s3_get_obj = s3_client.get_object(Bucket=bucket_name, Key="test-2.txt") - eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200 #copy to another bucket copy_source = { @@ -2239,7 +2238,7 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): s3_client.copy(copy_source, copy_bucket_name, "test-1.txt") s3_get_obj = s3_client.get_object(Bucket=copy_bucket_name, Key="test-1.txt") - eq(s3_get_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_get_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] @@ -2273,7 +2272,7 @@ def test_assume_role_with_web_identity_role_resource_tag(): bucket_name = get_new_bucket_name() s3bucket = s3_client_iam_creds.create_bucket(Bucket=bucket_name) - eq(s3bucket['ResponseMetadata']['HTTPStatusCode'],200) + assert s3bucket['ResponseMetadata']['HTTPStatusCode'] == 200 bucket_tagging = s3_res_iam_creds.BucketTagging(bucket_name) Set_Tag = bucket_tagging.put(Tagging={'TagSet':[{'Key':'Department', 'Value': 'Engineering'},{'Key':'Department', 'Value': 'Marketing'}]}) @@ -2293,14 +2292,14 @@ def test_assume_role_with_web_identity_role_resource_tag(): ] (role_error,role_response,general_role_name)=create_role(iam_client,'/',None,policy_document,None,None,None,tags_list) - eq(role_response['Role']['Arn'],'arn:aws:iam:::role/'+general_role_name+'') + assert role_response['Role']['Arn'] == 'arn:aws:iam:::role/'+general_role_name+'' role_policy = "{\"Version\":\"2012-10-17\",\"Statement\":{\"Effect\":\"Allow\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::*\",\"Condition\":{\"StringEquals\":{\"s3:ResourceTag/Department\":[\"Engineering\"]}}}}" (role_err,response)=put_role_policy(iam_client,general_role_name,None,role_policy) - eq(response['ResponseMetadata']['HTTPStatusCode'],200) + assert response['ResponseMetadata']['HTTPStatusCode'] == 200 resp=sts_client.assume_role_with_web_identity(RoleArn=role_response['Role']['Arn'],RoleSessionName=role_session_name,WebIdentityToken=user_token) - eq(resp['ResponseMetadata']['HTTPStatusCode'],200) + assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 s3_client = boto3.client('s3', aws_access_key_id = resp['Credentials']['AccessKeyId'], @@ -2312,7 +2311,7 @@ def test_assume_role_with_web_identity_role_resource_tag(): bucket_body = 'this is a test file' s3_put_obj = s3_client.put_object(Body=bucket_body, Bucket=bucket_name, Key="test-1.txt") - eq(s3_put_obj['ResponseMetadata']['HTTPStatusCode'],200) + assert s3_put_obj['ResponseMetadata']['HTTPStatusCode'] == 200 oidc_remove=iam_client.delete_open_id_connect_provider( OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] diff --git a/s3tests_boto3/functional/test_utils.py b/s3tests_boto3/functional/test_utils.py index 59c3c74..c0dd398 100644 --- a/s3tests_boto3/functional/test_utils.py +++ b/s3tests_boto3/functional/test_utils.py @@ -1,11 +1,9 @@ -from nose.tools import eq_ as eq - from . import utils def test_generate(): FIVE_MB = 5 * 1024 * 1024 - eq(len(''.join(utils.generate_random(0))), 0) - eq(len(''.join(utils.generate_random(1))), 1) - eq(len(''.join(utils.generate_random(FIVE_MB - 1))), FIVE_MB - 1) - eq(len(''.join(utils.generate_random(FIVE_MB))), FIVE_MB) - eq(len(''.join(utils.generate_random(FIVE_MB + 1))), FIVE_MB + 1) + assert len(''.join(utils.generate_random(0))) == 0 + assert len(''.join(utils.generate_random(1))) == 1 + assert len(''.join(utils.generate_random(FIVE_MB - 1))) == FIVE_MB - 1 + assert len(''.join(utils.generate_random(FIVE_MB))) == FIVE_MB + assert len(''.join(utils.generate_random(FIVE_MB + 1))) == FIVE_MB + 1 diff --git a/s3tests_boto3/functional/utils.py b/s3tests_boto3/functional/utils.py index 4d9dc49..ab84c16 100644 --- a/s3tests_boto3/functional/utils.py +++ b/s3tests_boto3/functional/utils.py @@ -3,8 +3,6 @@ import requests import string import time -from nose.tools import eq_ as eq - def assert_raises(excClass, callableObj, *args, **kwargs): """ Like unittest.TestCase.assertRaises, but returns the exception. From 4c75fba0deada0f0db6dddf9d7445b9f10c18fcb Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Sat, 21 Jan 2023 14:24:31 -0500 Subject: [PATCH 08/12] nose: remove nose attrs and imports Signed-off-by: Casey Bodley --- s3tests/functional/test_headers.py | 240 -- s3tests/functional/test_s3.py | 122 - s3tests/functional/test_s3_website.py | 192 +- s3tests_boto3/functional/test_headers.py | 214 -- s3tests_boto3/functional/test_iam.py | 172 -- s3tests_boto3/functional/test_s3.py | 2609 --------------------- s3tests_boto3/functional/test_s3select.py | 38 - s3tests_boto3/functional/test_sts.py | 250 -- 8 files changed, 1 insertion(+), 3836 deletions(-) diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index 2ca16c5..85171a6 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -4,7 +4,6 @@ import boto.exception import boto.s3.connection import boto.s3.acl import boto.utils -import nose import pytest import operator import random @@ -19,8 +18,6 @@ from urllib.parse import urlparse from boto.s3.connection import S3Connection -from nose.plugins.attrib import attr - from .utils import assert_raises from email.header import decode_header @@ -183,11 +180,6 @@ def tag(*tags): # @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no content length') -@attr(assertion='fails 411') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_contentlength_none(): key = _setup_bad_object(remove=('Content-Length',)) @@ -199,11 +191,6 @@ def test_object_create_bad_contentlength_none(): @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/content length too long') -@attr(assertion='fails 400') -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_above(): content = 'bar' @@ -223,11 +210,6 @@ def test_object_create_bad_contentlength_mismatch_above(): @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty authorization') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_empty(): key = _setup_bad_object({'Authorization': ''}) @@ -238,11 +220,6 @@ def test_object_create_bad_authorization_empty(): assert e.error_code == 'AccessDenied' @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date and x-amz-date') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_date_and_amz_date(): date = formatdate(usegmt=True) @@ -250,11 +227,6 @@ def test_object_create_date_and_amz_date(): key.set_contents_from_string('bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/x-amz-date and no date') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_amz_date_and_no_date(): date = formatdate(usegmt=True) @@ -264,11 +236,6 @@ def test_object_create_amz_date_and_no_date(): # the teardown is really messed up here. check it out @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no authorization') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_none(): key = _setup_bad_object(remove=('Authorization',)) @@ -280,11 +247,6 @@ def test_object_create_bad_authorization_none(): @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no content length') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) @@ -292,11 +254,6 @@ def test_bucket_create_contentlength_none(): @tag('auth_common') -@attr(resource='bucket') -@attr(method='acls') -@attr(operation='set w/no content length') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_acl_create_contentlength_none(): bucket = get_new_bucket() @@ -321,11 +278,6 @@ def _create_new_connection(): return TargetConnection(targets.main.default.conf, conn) @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty content length') -@attr(assertion='fails 400') -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_empty(): conn = _create_new_connection() @@ -336,11 +288,6 @@ def test_bucket_create_bad_contentlength_empty(): @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no content length') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_bad_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) @@ -348,11 +295,6 @@ def test_bucket_create_bad_contentlength_none(): @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty authorization') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_bad_authorization_empty(): _add_custom_headers({'Authorization': ''}) @@ -364,11 +306,6 @@ def test_bucket_create_bad_authorization_empty(): # the teardown is really messed up here. check it out @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no authorization') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_bad_authorization_none(): _add_custom_headers(remove=('Authorization',)) @@ -382,11 +319,6 @@ def test_bucket_create_bad_authorization_none(): # @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/content length too short') -@attr(assertion='fails 400') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_contentlength_mismatch_below_aws2(): check_aws2_support() @@ -400,11 +332,6 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/incorrect authorization') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_incorrect_aws2(): check_aws2_support() @@ -416,11 +343,6 @@ def test_object_create_bad_authorization_incorrect_aws2(): @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid authorization') -@attr(assertion='fails 400') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_invalid_aws2(): check_aws2_support() @@ -431,11 +353,6 @@ def test_object_create_bad_authorization_invalid_aws2(): assert e.error_code == 'InvalidArgument' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no date') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_create_bad_date_none_aws2(): check_aws2_support() @@ -447,10 +364,6 @@ def test_object_create_bad_date_none_aws2(): @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/invalid authorization') -@attr(assertion='fails 400') def test_bucket_create_bad_authorization_invalid_aws2(): check_aws2_support() _add_custom_headers({'Authorization': 'AWS HAHAHA'}) @@ -460,11 +373,6 @@ def test_bucket_create_bad_authorization_invalid_aws2(): assert e.error_code == 'InvalidArgument' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no date') -@attr(assertion='fails 403') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_bad_date_none_aws2(): check_aws2_support() @@ -488,10 +396,6 @@ def check_aws2_support(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid MD5') -@attr(assertion='fails 400') def test_object_create_bad_md5_invalid_garbage_aws4(): check_aws4_support() key = _setup_bad_object({'Content-MD5':'AWS4 HAHAHA'}) @@ -503,10 +407,6 @@ def test_object_create_bad_md5_invalid_garbage_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/content length too short') -@attr(assertion='fails 400') def test_object_create_bad_contentlength_mismatch_below_aws4(): check_aws4_support() content = 'bar' @@ -520,10 +420,6 @@ def test_object_create_bad_contentlength_mismatch_below_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/incorrect authorization') -@attr(assertion='fails 403') def test_object_create_bad_authorization_incorrect_aws4(): check_aws4_support() key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=AKIAIGR7ZNNBHC5BKSUB/20150930/us-east-1/s3/aws4_request,SignedHeaders=host;user-agent,Signature=FWeDfwojDSdS2Ztmpfeubhd9isU='}) @@ -535,10 +431,6 @@ def test_object_create_bad_authorization_incorrect_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid authorization') -@attr(assertion='fails 400') def test_object_create_bad_authorization_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=HAHAHA'}) @@ -550,10 +442,6 @@ def test_object_create_bad_authorization_invalid_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty user agent') -@attr(assertion='fails 403') def test_object_create_bad_ua_empty_aws4(): check_aws4_support() key = _setup_bad_object({'User-Agent': ''}) @@ -565,10 +453,6 @@ def test_object_create_bad_ua_empty_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no user agent') -@attr(assertion='fails 403') def test_object_create_bad_ua_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('User-Agent',)) @@ -580,10 +464,6 @@ def test_object_create_bad_ua_none_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid date') -@attr(assertion='succeeds') def test_object_create_bad_date_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Bad Date'}) @@ -591,10 +471,6 @@ def test_object_create_bad_date_invalid_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid x-amz-date') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': 'Bad Date'}) @@ -606,10 +482,6 @@ def test_object_create_bad_amz_date_invalid_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty date') -@attr(assertion='succeeds') def test_object_create_bad_date_empty_aws4(): check_aws4_support() key = _setup_bad_object({'Date': ''}) @@ -617,10 +489,6 @@ def test_object_create_bad_date_empty_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty x-amz-date') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_empty_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': ''}) @@ -632,10 +500,6 @@ def test_object_create_bad_amz_date_empty_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no date') -@attr(assertion='succeeds') def test_object_create_bad_date_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('Date',)) @@ -643,10 +507,6 @@ def test_object_create_bad_date_none_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no x-amz-date') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('X-Amz-Date',)) @@ -658,10 +518,6 @@ def test_object_create_bad_amz_date_none_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date in past') -@attr(assertion='succeeds') def test_object_create_bad_date_before_today_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'}) @@ -669,10 +525,6 @@ def test_object_create_bad_date_before_today_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/x-amz-date in past') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_before_today_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '20100707T215304Z'}) @@ -684,10 +536,6 @@ def test_object_create_bad_amz_date_before_today_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date in future') -@attr(assertion='succeeds') def test_object_create_bad_date_after_today_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'}) @@ -695,10 +543,6 @@ def test_object_create_bad_date_after_today_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/x-amz-date in future') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_after_today_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '20300707T215304Z'}) @@ -710,10 +554,6 @@ def test_object_create_bad_amz_date_after_today_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date before epoch') -@attr(assertion='succeeds') def test_object_create_bad_date_before_epoch_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'}) @@ -721,10 +561,6 @@ def test_object_create_bad_date_before_epoch_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/x-amz-date before epoch') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_before_epoch_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '19500707T215304Z'}) @@ -736,10 +572,6 @@ def test_object_create_bad_amz_date_before_epoch_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date after 9999') -@attr(assertion='fails 403') def test_object_create_bad_date_after_end_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}) @@ -747,10 +579,6 @@ def test_object_create_bad_date_after_end_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/x-amz-date after 9999') -@attr(assertion='fails 403') def test_object_create_bad_amz_date_after_end_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '99990707T215304Z'}) @@ -762,10 +590,6 @@ def test_object_create_bad_amz_date_after_end_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(operation='create with missing signed custom header') -@attr(assertion='fails 403') def test_object_create_missing_signed_custom_header_aws4(): check_aws4_support() method='PUT' @@ -792,10 +616,6 @@ def test_object_create_missing_signed_custom_header_aws4(): @tag('auth_aws4') -@attr(resource='object') -@attr(method='put') -@attr(opearation='create with missing signed header') -@attr(assertion='fails 403') def test_object_create_missing_signed_header_aws4(): check_aws4_support() method='PUT' @@ -823,10 +643,6 @@ def test_object_create_missing_signed_header_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/invalid authorization') -@attr(assertion='fails 400') def test_bucket_create_bad_authorization_invalid_aws4(): check_aws4_support() _add_custom_headers({'Authorization': 'AWS4 HAHAHA'}) @@ -838,10 +654,6 @@ def test_bucket_create_bad_authorization_invalid_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty user agent') -@attr(assertion='fails 403') def test_bucket_create_bad_ua_empty_aws4(): check_aws4_support() _add_custom_headers({'User-Agent': ''}) @@ -852,10 +664,6 @@ def test_bucket_create_bad_ua_empty_aws4(): assert e.error_code == 'SignatureDoesNotMatch' @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no user agent') -@attr(assertion='fails 403') def test_bucket_create_bad_ua_none_aws4(): check_aws4_support() _add_custom_headers(remove=('User-Agent',)) @@ -867,10 +675,6 @@ def test_bucket_create_bad_ua_none_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/invalid date') -@attr(assertion='succeeds') def test_bucket_create_bad_date_invalid_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Bad Date'}) @@ -878,10 +682,6 @@ def test_bucket_create_bad_date_invalid_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/invalid x-amz-date') -@attr(assertion='fails 403') def test_bucket_create_bad_amz_date_invalid_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': 'Bad Date'}) @@ -893,10 +693,6 @@ def test_bucket_create_bad_amz_date_invalid_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty date') -@attr(assertion='succeeds') def test_bucket_create_bad_date_empty_aws4(): check_aws4_support() _add_custom_headers({'Date': ''}) @@ -904,10 +700,6 @@ def test_bucket_create_bad_date_empty_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty x-amz-date') -@attr(assertion='fails 403') def test_bucket_create_bad_amz_date_empty_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': ''}) @@ -918,10 +710,6 @@ def test_bucket_create_bad_amz_date_empty_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no date') -@attr(assertion='succeeds') def test_bucket_create_bad_date_none_aws4(): check_aws4_support() _add_custom_headers(remove=('Date',)) @@ -929,10 +717,6 @@ def test_bucket_create_bad_date_none_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no x-amz-date') -@attr(assertion='fails 403') def test_bucket_create_bad_amz_date_none_aws4(): check_aws4_support() _add_custom_headers(remove=('X-Amz-Date',)) @@ -944,10 +728,6 @@ def test_bucket_create_bad_amz_date_none_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/date in past') -@attr(assertion='succeeds') def test_bucket_create_bad_date_before_today_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'}) @@ -955,10 +735,6 @@ def test_bucket_create_bad_date_before_today_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/x-amz-date in past') -@attr(assertion='fails 403') def test_bucket_create_bad_amz_date_before_today_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '20100707T215304Z'}) @@ -970,10 +746,6 @@ def test_bucket_create_bad_amz_date_before_today_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/date in future') -@attr(assertion='succeeds') def test_bucket_create_bad_date_after_today_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'}) @@ -981,10 +753,6 @@ def test_bucket_create_bad_date_after_today_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/x-amz-date in future') -@attr(assertion='fails 403') def test_bucket_create_bad_amz_date_after_today_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '20300707T215304Z'}) @@ -996,10 +764,6 @@ def test_bucket_create_bad_amz_date_after_today_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/date before epoch') -@attr(assertion='succeeds') def test_bucket_create_bad_date_before_epoch_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'}) @@ -1007,10 +771,6 @@ def test_bucket_create_bad_date_before_epoch_aws4(): @tag('auth_aws4') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/x-amz-date before epoch') -@attr(assertion='fails 403') def test_bucket_create_bad_amz_date_before_epoch_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '19500707T215304Z'}) diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index fc84220..cc70c24 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -7,7 +7,6 @@ import datetime import time import email.utils import isodate -import nose import pytest import operator import socket @@ -28,8 +27,6 @@ import re from collections import defaultdict from urllib.parse import urlparse -from nose.plugins.attrib import attr - from . import utils from .utils import assert_raises @@ -95,12 +92,7 @@ def _get_alt_connection(): # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') @pytest.mark.fails_with_subdomain -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/! in name') -@attr(assertion='fails with subdomain') def test_bucket_create_naming_bad_punctuation(): # characters other than [a-zA-Z0-9._-] check_bad_bucket_name('alpha!soup') @@ -130,13 +122,7 @@ def check_configure_versioning_retry(bucket, status, expected_string): assert expected_string == read_status -@attr(resource='object') -@attr(method='create') -@attr(operation='create versioned object, read not exist null version') -@attr(assertion='read null version behaves correctly') -@attr('versioning') @pytest.mark.versioning -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_versioning_obj_read_not_exist_null(): bucket = get_new_bucket() @@ -153,17 +139,9 @@ def test_versioning_obj_read_not_exist_null(): key = bucket.get_key(objname, version_id='null') assert key == None -@attr(resource='object') -@attr(method='put') -@attr(operation='append object') -@attr(assertion='success') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_with_subdomain') @pytest.mark.fails_with_subdomain -@attr('appendobject') @pytest.mark.appendobject -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_append_object(): bucket = get_new_bucket() @@ -182,17 +160,9 @@ def test_append_object(): key = bucket.get_key('foo') assert key.size == 6 -@attr(resource='object') -@attr(method='put') -@attr(operation='append to normal object') -@attr(assertion='fails 409') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_with_subdomain') @pytest.mark.fails_with_subdomain -@attr('appendobject') @pytest.mark.appendobject -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_append_normal_object(): bucket = get_new_bucket() @@ -207,17 +177,9 @@ def test_append_normal_object(): assert res.status == 409 -@attr(resource='object') -@attr(method='put') -@attr(operation='append position not right') -@attr(assertion='fails 409') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_with_subdomain') @pytest.mark.fails_with_subdomain -@attr('appendobject') @pytest.mark.appendobject -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_append_object_position_wrong(): bucket = get_new_bucket() @@ -236,11 +198,6 @@ def test_append_object_position_wrong(): # TODO rgw log_bucket.set_as_logging_target() gives 403 Forbidden # http://tracker.newdream.net/issues/984 -@attr(resource='bucket.log') -@attr(method='put') -@attr(operation='set/enable/disable logging target') -@attr(assertion='operations succeed') -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_logging_toggle(): bucket = get_new_bucket() @@ -418,14 +375,8 @@ def lc_transitions(transitions=None): return result -@attr(resource='object') -@attr(method='put') -@attr(operation='test create object with storage class') -@attr('storage_class') @pytest.mark.storage_class -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_storage_class(): sc = configured_storage_classes() @@ -440,14 +391,8 @@ def test_object_storage_class(): verify_object(bucket, k, data, storage_class) -@attr(resource='object') -@attr(method='put') -@attr(operation='test create multipart object with storage class') -@attr('storage_class') @pytest.mark.storage_class -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_storage_class_multipart(): sc = configured_storage_classes() @@ -485,27 +430,15 @@ def _do_test_object_modify_storage_class(obj_write_func, size): copy_object_storage_class(bucket, k, bucket, k, new_storage_class) verify_object(bucket, k, data, storage_class) -@attr(resource='object') -@attr(method='put') -@attr(operation='test changing objects storage class') -@attr('storage_class') @pytest.mark.storage_class -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_modify_storage_class(): _do_test_object_modify_storage_class(_populate_key, size=9*1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='test changing objects storage class') -@attr('storage_class') @pytest.mark.storage_class -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_modify_storage_class_multipart(): _do_test_object_modify_storage_class(_populate_multipart_key, size=11*1024*1024) @@ -531,26 +464,14 @@ def _do_test_object_storage_class_copy(obj_write_func, size): copy_object_storage_class(src_bucket, src_key, dest_bucket, dest_key, new_storage_class) verify_object(dest_bucket, dest_key, data, new_storage_class) -@attr(resource='object') -@attr(method='copy') -@attr(operation='test copy object to object with different storage class') -@attr('storage_class') @pytest.mark.storage_class -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_storage_class_copy(): _do_test_object_storage_class_copy(_populate_key, size=9*1024*1024) -@attr(resource='object') -@attr(method='copy') -@attr(operation='test changing objects storage class') -@attr('storage_class') @pytest.mark.storage_class -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_storage_class_copy_multipart(): _do_test_object_storage_class_copy(_populate_multipart_key, size=9*1024*1024) @@ -653,24 +574,12 @@ def _test_atomic_dual_conditional_write(file_size): # verify the file _verify_atomic_key_data(key, file_size, 'B') -@attr(resource='object') -@attr(method='put') -@attr(operation='write one or the other') -@attr(assertion='1MB successful') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_atomic_dual_conditional_write_1mb(): _test_atomic_dual_conditional_write(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='write file in deleted bucket') -@attr(assertion='fail 404') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_atomic_write_bucket_gone(): bucket = get_new_bucket() @@ -711,13 +620,7 @@ def _multipart_upload_enc(bucket, s3_key_name, size, part_size=5*1024*1024, -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart upload with bad key for uploading chunks') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_invalid_chunks_1(): bucket = get_new_bucket() @@ -741,13 +644,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_1(): metadata={'foo': 'bar'}) assert e.status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart upload with bad md5 for chunks') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_invalid_chunks_2(): bucket = get_new_bucket() @@ -771,15 +668,8 @@ def test_encryption_sse_c_multipart_invalid_chunks_2(): metadata={'foo': 'bar'}) assert e.status == 400 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy for a user belonging to a different tenant') -@attr(assertion='succeeds') -@attr('fails_with_subdomain') @pytest.mark.fails_with_subdomain -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_different_tenant(): bucket = get_new_bucket() @@ -815,12 +705,7 @@ def test_bucket_policy_different_tenant(): b = new_conn.get_bucket(bucket_name) b.get_all_keys() -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put condition operator end with ifExists') -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_set_condition_operator_end_with_IfExists(): bucket = _create_keys(keys=['foo']) @@ -856,15 +741,8 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists(): def _make_arn_resource(path="*"): return "arn:aws:s3:::{}".format(path) -@attr(resource='object') -@attr(method='put') -@attr(operation='put obj with RequestObjectTag') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_request_obj_tag(): diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 76eb60c..8cc04ed 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -1,7 +1,5 @@ - import sys import collections -import nose import pytest import string import random @@ -12,9 +10,6 @@ import socket from urllib.parse import urlparse -from nose.plugins.attrib import attr -from nose.tools import timed - from .. import common from . import ( @@ -236,29 +231,15 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET', tim return res # ---------- Non-existant buckets via the website endpoint -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket, exposing security risk') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_website_nonexistant_bucket_s3(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') _website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket')) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -#@attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure') -@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_s3') @pytest.mark.fails_on_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_nonexistant_bucket_rgw(): bucket_name = get_new_bucket_name() @@ -267,15 +248,9 @@ def test_website_nonexistant_bucket_rgw(): _website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket')) #------------- IndexDocument only, successes -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty public buckets via s3website return page for /, where page is public') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@timed(10) +@pytest.mark.timeout(10) def test_website_public_bucket_list_public_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -297,13 +272,7 @@ def test_website_public_bucket_list_public_index(): indexhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty private buckets via s3website return page for /, where page is private') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_public_index(): bucket = get_new_bucket() @@ -329,13 +298,7 @@ def test_website_private_bucket_list_public_index(): # ---------- IndexDocument only, failures -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty private buckets via s3website return a 403 for /') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_empty(): bucket = get_new_bucket() @@ -347,13 +310,7 @@ def test_website_private_bucket_list_empty(): _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty public buckets via s3website return a 404 for /') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_empty(): bucket = get_new_bucket() @@ -364,13 +321,7 @@ def test_website_public_bucket_list_empty(): _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey')) bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty public buckets via s3website return page for /, where page is private') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_private_index(): bucket = get_new_bucket() @@ -391,13 +342,7 @@ def test_website_public_bucket_list_private_index(): indexhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty private buckets via s3website return page for /, where page is private') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_private_index(): bucket = get_new_bucket() @@ -419,13 +364,7 @@ def test_website_private_bucket_list_private_index(): bucket.delete() # ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but missing -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() @@ -437,13 +376,7 @@ def test_website_private_bucket_list_empty_missingerrordoc(): bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() @@ -454,13 +387,7 @@ def test_website_public_bucket_list_empty_missingerrordoc(): _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() @@ -480,13 +407,7 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): indexhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() @@ -507,13 +428,7 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): bucket.delete() # ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but not accessible -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() @@ -537,13 +452,7 @@ def test_website_private_bucket_list_empty_blockederrordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='check if there is an invalid payload after serving error doc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_pubilc_errordoc(): bucket = get_new_bucket() @@ -590,13 +499,7 @@ def test_website_public_bucket_list_pubilc_errordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() @@ -619,13 +522,7 @@ def test_website_public_bucket_list_empty_blockederrordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() @@ -654,13 +551,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() @@ -690,13 +581,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): bucket.delete() # ---------- IndexDocument & ErrorDocument, failures with errordoc available -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() @@ -715,13 +600,7 @@ def test_website_private_bucket_list_empty_gooderrordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() @@ -741,13 +620,7 @@ def test_website_public_bucket_list_empty_gooderrordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty public buckets via s3website return page for /, where page is private') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_public_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() @@ -772,13 +645,7 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): errorhtml.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-empty private buckets via s3website return page for /, where page is private') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_private_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() @@ -804,13 +671,7 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): bucket.delete() # ------ RedirectAll tests -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='RedirectAllRequestsTo without protocol should TODO') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_bucket_private_redirectall_base(): bucket = get_new_bucket() @@ -823,13 +684,7 @@ def test_website_bucket_private_redirectall_base(): bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='RedirectAllRequestsTo without protocol should TODO') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_bucket_private_redirectall_path(): bucket = get_new_bucket() @@ -844,13 +699,7 @@ def test_website_bucket_private_redirectall_path(): bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='RedirectAllRequestsTo without protocol should TODO') -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_bucket_private_redirectall_path_upgrade(): bucket = get_new_bucket() @@ -867,15 +716,8 @@ def test_website_bucket_private_redirectall_path_upgrade(): bucket.delete() # ------ x-amz redirect tests -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should not fire without websiteconf') -@attr('s3website') @pytest.mark.s3website -@attr('x-amz-website-redirect-location') @pytest.mark.s3website_redirect_location -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_xredirect_nonwebsite(): bucket = get_new_bucket() @@ -902,15 +744,8 @@ def test_website_xredirect_nonwebsite(): k.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key') -@attr('s3website') @pytest.mark.s3website -@attr('x-amz-website-redirect-location') @pytest.mark.s3website_redirect_location -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_xredirect_public_relative(): bucket = get_new_bucket() @@ -932,15 +767,8 @@ def test_website_xredirect_public_relative(): k.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key') -@attr('s3website') @pytest.mark.s3website -@attr('x-amz-website-redirect-location') @pytest.mark.s3website_redirect_location -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_xredirect_public_abs(): bucket = get_new_bucket() @@ -962,15 +790,8 @@ def test_website_xredirect_public_abs(): k.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key') -@attr('s3website') @pytest.mark.s3website -@attr('x-amz-website-redirect-location') @pytest.mark.s3website_redirect_location -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_xredirect_private_relative(): bucket = get_new_bucket() @@ -992,15 +813,8 @@ def test_website_xredirect_private_relative(): k.delete() bucket.delete() -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key') -@attr('s3website') @pytest.mark.s3website -@attr('x-amz-website-redirect-location') @pytest.mark.s3website_redirect_location -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_website_xredirect_private_abs(): bucket = get_new_bucket() @@ -1226,7 +1040,6 @@ def routing_teardown(**kwargs): print('Deleting', str(o)) o.delete() -#@timed(10) def routing_check(*args, **kwargs): bucket = kwargs['bucket'] args=args[0] @@ -1261,11 +1074,8 @@ def routing_check(*args, **kwargs): else: assert(False) -@attr('s3website_RoutingRules') @pytest.mark.s3website_routing_rules -@attr('s3website') @pytest.mark.s3website -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_routing_generator(): for t in ROUTING_RULES_TESTS: diff --git a/s3tests_boto3/functional/test_headers.py b/s3tests_boto3/functional/test_headers.py index 479b431..4573d72 100644 --- a/s3tests_boto3/functional/test_headers.py +++ b/s3tests_boto3/functional/test_headers.py @@ -1,6 +1,4 @@ import boto3 -from nose.plugins.attrib import attr -import nose import pytest from botocore.exceptions import ClientError from email.utils import formatdate @@ -163,10 +161,6 @@ def tag(*tags): # @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid MD5') -@attr(assertion='fails 400') def test_object_create_bad_md5_invalid_short(): e = _add_header_create_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='}) status, error_code = _get_status_and_error_code(e.response) @@ -174,10 +168,6 @@ def test_object_create_bad_md5_invalid_short(): assert error_code == 'InvalidDigest' @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/mismatched MD5') -@attr(assertion='fails 400') def test_object_create_bad_md5_bad(): e = _add_header_create_bad_object({'Content-MD5':'rL0Y20xC+Fzt72VPzMSk2A=='}) status, error_code = _get_status_and_error_code(e.response) @@ -185,10 +175,6 @@ def test_object_create_bad_md5_bad(): assert error_code == 'BadDigest' @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty MD5') -@attr(assertion='fails 400') def test_object_create_bad_md5_empty(): e = _add_header_create_bad_object({'Content-MD5':''}) status, error_code = _get_status_and_error_code(e.response) @@ -196,52 +182,31 @@ def test_object_create_bad_md5_empty(): assert error_code == 'InvalidDigest' @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no MD5 header') -@attr(assertion='succeeds') def test_object_create_bad_md5_none(): bucket_name, key_name = _remove_header_create_object('Content-MD5') client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/Expect 200') -@attr(assertion='garbage, but S3 succeeds!') def test_object_create_bad_expect_mismatch(): bucket_name, key_name = _add_header_create_object({'Expect': 200}) client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty expect') -@attr(assertion='succeeds ... should it?') def test_object_create_bad_expect_empty(): bucket_name, key_name = _add_header_create_object({'Expect': ''}) client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no expect') -@attr(assertion='succeeds') def test_object_create_bad_expect_none(): bucket_name, key_name = _remove_header_create_object('Expect') client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty content length') -@attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_empty(): e = _add_header_create_bad_object({'Content-Length':''}) @@ -249,11 +214,6 @@ def test_object_create_bad_contentlength_empty(): assert status == 400 @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/negative content length') -@attr(assertion='fails 400') -@attr('fails_on_mod_proxy_fcgi') @pytest.mark.fails_on_mod_proxy_fcgi def test_object_create_bad_contentlength_negative(): client = get_client() @@ -264,12 +224,7 @@ def test_object_create_bad_contentlength_negative(): assert status == 400 @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no content length') -@attr(assertion='fails 411') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_none(): remove = 'Content-Length' @@ -279,20 +234,12 @@ def test_object_create_bad_contentlength_none(): assert error_code == 'MissingContentLength' @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/content type text/plain') -@attr(assertion='succeeds') def test_object_create_bad_contenttype_invalid(): bucket_name, key_name = _add_header_create_object({'Content-Type': 'text/plain'}) client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty content type') -@attr(assertion='succeeds') def test_object_create_bad_contenttype_empty(): client = get_client() key_name = 'foo' @@ -300,10 +247,6 @@ def test_object_create_bad_contenttype_empty(): client.put_object(Bucket=bucket_name, Key=key_name, Body='bar', ContentType='') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no content type') -@attr(assertion='succeeds') def test_object_create_bad_contenttype_none(): bucket_name = get_new_bucket() key_name = 'foo' @@ -313,12 +256,7 @@ def test_object_create_bad_contenttype_none(): @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty authorization') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_empty(): e = _add_header_create_bad_object({'Authorization': ''}) @@ -326,12 +264,7 @@ def test_object_create_bad_authorization_empty(): assert status == 403 @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date and x-amz-date') -@attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_date_and_amz_date(): date = formatdate(usegmt=True) @@ -340,12 +273,7 @@ def test_object_create_date_and_amz_date(): client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/x-amz-date and no date') -@attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_amz_date_and_no_date(): date = formatdate(usegmt=True) @@ -355,12 +283,7 @@ def test_object_create_amz_date_and_no_date(): # the teardown is really messed up here. check it out @tag('auth_common') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no authorization') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_none(): e = _remove_header_create_bad_object('Authorization') @@ -368,24 +291,14 @@ def test_object_create_bad_authorization_none(): assert status == 403 @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no content length') -@attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_contentlength_none(): remove = 'Content-Length' _remove_header_create_bucket(remove) @tag('auth_common') -@attr(resource='bucket') -@attr(method='acls') -@attr(operation='set w/no content length') -@attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_acl_create_contentlength_none(): bucket_name = get_new_bucket() @@ -401,10 +314,6 @@ def test_object_acl_create_contentlength_none(): client.put_object_acl(Bucket=bucket_name, Key='foo', ACL='public-read') @tag('auth_common') -@attr(resource='bucket') -@attr(method='acls') -@attr(operation='set w/invalid permission') -@attr(assertion='fails 400') def test_bucket_put_bad_canned_acl(): bucket_name = get_new_bucket() client = get_client() @@ -418,10 +327,6 @@ def test_bucket_put_bad_canned_acl(): assert status == 400 @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/expect 200') -@attr(assertion='garbage, but S3 succeeds!') def test_bucket_create_bad_expect_mismatch(): bucket_name = get_new_bucket_name() client = get_client() @@ -432,22 +337,13 @@ def test_bucket_create_bad_expect_mismatch(): client.create_bucket(Bucket=bucket_name) @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/expect empty') -@attr(assertion='garbage, but S3 succeeds!') def test_bucket_create_bad_expect_empty(): headers = {'Expect': ''} _add_header_create_bucket(headers) @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty content length') -@attr(assertion='fails 400') # TODO: The request isn't even making it to the RGW past the frontend # This test had 'fails_on_rgw' before the move to boto3 -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_empty(): headers = {'Content-Length': ''} @@ -456,11 +352,6 @@ def test_bucket_create_bad_contentlength_empty(): assert status == 400 @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/negative content length') -@attr(assertion='fails 400') -@attr('fails_on_mod_proxy_fcgi') @pytest.mark.fails_on_mod_proxy_fcgi def test_bucket_create_bad_contentlength_negative(): headers = {'Content-Length': '-1'} @@ -469,24 +360,14 @@ def test_bucket_create_bad_contentlength_negative(): assert status == 400 @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no content length') -@attr(assertion='succeeds') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_none(): remove = 'Content-Length' _remove_header_create_bucket(remove) @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty authorization') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_empty(): headers = {'Authorization': ''} @@ -496,12 +377,7 @@ def test_bucket_create_bad_authorization_empty(): assert error_code == 'AccessDenied' @tag('auth_common') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no authorization') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_none(): e = _remove_header_create_bad_bucket('Authorization') @@ -510,10 +386,6 @@ def test_bucket_create_bad_authorization_none(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid MD5') -@attr(assertion='fails 400') def test_object_create_bad_md5_invalid_garbage_aws2(): v2_client = get_v2_client() headers = {'Content-MD5': 'AWS HAHAHA'} @@ -523,12 +395,7 @@ def test_object_create_bad_md5_invalid_garbage_aws2(): assert error_code == 'InvalidDigest' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/content length too short') -@attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the Content-Length header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_below_aws2(): v2_client = get_v2_client() @@ -541,12 +408,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): assert error_code == 'BadDigest' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/incorrect authorization') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_incorrect_aws2(): v2_client = get_v2_client() @@ -557,12 +419,7 @@ def test_object_create_bad_authorization_incorrect_aws2(): assert error_code == 'InvalidDigest' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid authorization') -@attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_invalid_aws2(): v2_client = get_v2_client() @@ -573,10 +430,6 @@ def test_object_create_bad_authorization_invalid_aws2(): assert error_code == 'InvalidArgument' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty user agent') -@attr(assertion='succeeds') def test_object_create_bad_ua_empty_aws2(): v2_client = get_v2_client() headers = {'User-Agent': ''} @@ -584,10 +437,6 @@ def test_object_create_bad_ua_empty_aws2(): v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no user agent') -@attr(assertion='succeeds') def test_object_create_bad_ua_none_aws2(): v2_client = get_v2_client() remove = 'User-Agent' @@ -595,10 +444,6 @@ def test_object_create_bad_ua_none_aws2(): v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/invalid date') -@attr(assertion='fails 403') def test_object_create_bad_date_invalid_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Bad Date'} @@ -608,10 +453,6 @@ def test_object_create_bad_date_invalid_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/empty date') -@attr(assertion='fails 403') def test_object_create_bad_date_empty_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': ''} @@ -621,12 +462,7 @@ def test_object_create_bad_date_empty_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/no date') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_create_bad_date_none_aws2(): v2_client = get_v2_client() @@ -637,10 +473,6 @@ def test_object_create_bad_date_none_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date in past') -@attr(assertion='fails 403') def test_object_create_bad_date_before_today_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'} @@ -650,10 +482,6 @@ def test_object_create_bad_date_before_today_aws2(): assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date before epoch') -@attr(assertion='fails 403') def test_object_create_bad_date_before_epoch_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'} @@ -663,10 +491,6 @@ def test_object_create_bad_date_before_epoch_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='object') -@attr(method='put') -@attr(operation='create w/date after 9999') -@attr(assertion='fails 403') def test_object_create_bad_date_after_end_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'} @@ -676,12 +500,7 @@ def test_object_create_bad_date_after_end_aws2(): assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/invalid authorization') -@attr(assertion='fails 400') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_invalid_aws2(): v2_client = get_v2_client() @@ -692,30 +511,18 @@ def test_bucket_create_bad_authorization_invalid_aws2(): assert error_code == 'InvalidArgument' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty user agent') -@attr(assertion='succeeds') def test_bucket_create_bad_ua_empty_aws2(): v2_client = get_v2_client() headers = {'User-Agent': ''} _add_header_create_bucket(headers, v2_client) @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no user agent') -@attr(assertion='succeeds') def test_bucket_create_bad_ua_none_aws2(): v2_client = get_v2_client() remove = 'User-Agent' _remove_header_create_bucket(remove, v2_client) @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/invalid date') -@attr(assertion='fails 403') def test_bucket_create_bad_date_invalid_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Bad Date'} @@ -725,10 +532,6 @@ def test_bucket_create_bad_date_invalid_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/empty date') -@attr(assertion='fails 403') def test_bucket_create_bad_date_empty_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': ''} @@ -738,12 +541,7 @@ def test_bucket_create_bad_date_empty_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/no date') -@attr(assertion='fails 403') # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_create_bad_date_none_aws2(): v2_client = get_v2_client() @@ -754,10 +552,6 @@ def test_bucket_create_bad_date_none_aws2(): assert error_code == 'AccessDenied' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/date in past') -@attr(assertion='fails 403') def test_bucket_create_bad_date_before_today_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'} @@ -767,10 +561,6 @@ def test_bucket_create_bad_date_before_today_aws2(): assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/date in future') -@attr(assertion='fails 403') def test_bucket_create_bad_date_after_today_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 2030 21:53:04 GMT'} @@ -780,10 +570,6 @@ def test_bucket_create_bad_date_after_today_aws2(): assert error_code == 'RequestTimeTooSkewed' @tag('auth_aws2') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/date before epoch') -@attr(assertion='fails 403') def test_bucket_create_bad_date_before_epoch_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'} diff --git a/s3tests_boto3/functional/test_iam.py b/s3tests_boto3/functional/test_iam.py index 9285ad7..0995f97 100644 --- a/s3tests_boto3/functional/test_iam.py +++ b/s3tests_boto3/functional/test_iam.py @@ -1,7 +1,6 @@ import json from botocore.exceptions import ClientError -from nose.plugins.attrib import attr import pytest from s3tests_boto3.functional.utils import assert_raises @@ -19,13 +18,7 @@ from . import ( from .utils import _get_status, _get_status_and_error_code -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify Put User Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_put_user_policy(): client = get_iam_client() @@ -45,13 +38,7 @@ def test_put_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify Put User Policy with invalid user') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_put_user_policy_invalid_user(): client = get_iam_client() @@ -69,13 +56,7 @@ def test_put_user_policy_invalid_user(): assert status == 404 -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify Put User Policy using parameter value outside limit') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_put_user_policy_parameter_limit(): client = get_iam_client() @@ -94,15 +75,8 @@ def test_put_user_policy_parameter_limit(): assert status == 400 -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify Put User Policy using invalid policy document elements') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_put_user_policy_invalid_element(): client = get_iam_client() @@ -167,13 +141,7 @@ def test_put_user_policy_invalid_element(): assert status == 400 -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify Put a policy that already exists') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_put_existing_user_policy(): client = get_iam_client() @@ -194,13 +162,7 @@ def test_put_existing_user_policy(): client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify List User policies') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_list_user_policy(): client = get_iam_client() @@ -221,13 +183,7 @@ def test_list_user_policy(): client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify List User policies with invalid user') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_list_user_policy_invalid_user(): client = get_iam_client() @@ -236,13 +192,7 @@ def test_list_user_policy_invalid_user(): assert status == 404 -@attr(resource='user-policy') -@attr(method='get') -@attr(operation='Verify Get User policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_get_user_policy(): client = get_iam_client() @@ -265,13 +215,7 @@ def test_get_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='get') -@attr(operation='Verify Get User Policy with invalid user') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_get_user_policy_invalid_user(): client = get_iam_client() @@ -293,15 +237,8 @@ def test_get_user_policy_invalid_user(): client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) -@attr(resource='user-policy') -@attr(method='get') -@attr(operation='Verify Get User Policy with invalid policy name') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_get_user_policy_invalid_policy_name(): client = get_iam_client() @@ -322,15 +259,8 @@ def test_get_user_policy_invalid_policy_name(): client.delete_user_policy(PolicyName='AllAccessPolicy', UserName=get_alt_user_id()) -@attr(resource='user-policy') -@attr(method='get') -@attr(operation='Verify Get Deleted User Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_get_deleted_user_policy(): client = get_iam_client() @@ -351,13 +281,7 @@ def test_get_deleted_user_policy(): assert status == 404 -@attr(resource='user-policy') -@attr(method='get') -@attr(operation='Verify Get a policy from multiple policies for a user') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_get_user_policy_from_multiple_policies(): client = get_iam_client() @@ -390,13 +314,7 @@ def test_get_user_policy_from_multiple_policies(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='delete') -@attr(operation='Verify Delete User Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_delete_user_policy(): client = get_iam_client() @@ -418,13 +336,7 @@ def test_delete_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='delete') -@attr(operation='Verify Delete User Policy with invalid user') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_delete_user_policy_invalid_user(): client = get_iam_client() @@ -450,13 +362,7 @@ def test_delete_user_policy_invalid_user(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='delete') -@attr(operation='Verify Delete User Policy with invalid policy name') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_delete_user_policy_invalid_policy_name(): client = get_iam_client() @@ -482,13 +388,7 @@ def test_delete_user_policy_invalid_policy_name(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='delete') -@attr(operation='Verify Delete multiple User policies for a user') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_delete_user_policy_from_multiple_policies(): client = get_iam_client() @@ -528,13 +428,7 @@ def test_delete_user_policy_from_multiple_policies(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Allow Bucket Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_allow_bucket_actions_in_user_policy(): client = get_iam_client() @@ -581,15 +475,8 @@ def test_allow_bucket_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Deny Bucket Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_deny_bucket_actions_in_user_policy(): client = get_iam_client() s3_client = get_alt_client() @@ -623,13 +510,7 @@ def test_deny_bucket_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 204 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Allow Object Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_allow_object_actions_in_user_policy(): client = get_iam_client() @@ -668,15 +549,8 @@ def test_allow_object_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Deny Object Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_deny_object_actions_in_user_policy(): client = get_iam_client() s3_client_alt = get_alt_client() @@ -714,13 +588,7 @@ def test_deny_object_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Allow Multipart Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_allow_multipart_actions_in_user_policy(): client = get_iam_client() @@ -755,15 +623,8 @@ def test_allow_multipart_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Deny Multipart Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_deny_multipart_actions_in_user_policy(): client = get_iam_client() s3_client = get_alt_client() @@ -804,15 +665,8 @@ def test_deny_multipart_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Allow Tagging Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_allow_tagging_actions_in_user_policy(): client = get_iam_client() s3_client_alt = get_alt_client() @@ -856,15 +710,8 @@ def test_allow_tagging_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='s3 Actions') -@attr(operation='Verify Deny Tagging Actions in user Policy') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_deny_tagging_actions_in_user_policy(): client = get_iam_client() s3_client = get_alt_client() @@ -914,15 +761,8 @@ def test_deny_tagging_actions_in_user_policy(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify conflicting user policy statements') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_verify_conflicting_user_policy_statements(): s3client = get_alt_client() bucket = get_new_bucket(client=s3client) @@ -952,15 +792,8 @@ def test_verify_conflicting_user_policy_statements(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(method='put') -@attr(operation='Verify conflicting user policies') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam -@attr('fails_on_dbstore') def test_verify_conflicting_user_policies(): s3client = get_alt_client() bucket = get_new_bucket(client=s3client) @@ -997,12 +830,7 @@ def test_verify_conflicting_user_policies(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='user-policy') -@attr(operation='Verify Allow Actions for IAM user policies') -@attr(assertion='succeeds') -@attr('user-policy') @pytest.mark.user_policy -@attr('test_of_iam') @pytest.mark.test_of_iam def test_verify_allow_iam_actions(): policy1 = json.dumps( diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 52cef44..4b89623 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -2,7 +2,6 @@ import boto3 import botocore.session from botocore.exceptions import ClientError from botocore.exceptions import ParamValidationError -from nose.plugins.attrib import attr import isodate import email.utils import datetime @@ -18,7 +17,6 @@ import hashlib import xml.etree.ElementTree as ET import time import operator -import nose import pytest import os import string @@ -94,20 +92,11 @@ def _bucket_is_empty(bucket): break return is_empty -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty buckets return no contents') def test_bucket_list_empty(): bucket = get_new_bucket_resource() is_empty = _bucket_is_empty(bucket) assert is_empty == True -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='distinct buckets have different contents') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_list_distinct(): bucket1 = get_new_bucket_resource() @@ -151,11 +140,6 @@ def _get_prefixes(response): prefixes = [prefix['Prefix'] for prefix in prefix_list] return prefixes -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='pagination w/max_keys=2, no marker') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_many(): bucket_name = _create_objects(keys=['foo', 'bar', 'baz']) @@ -173,13 +157,7 @@ def test_bucket_list_many(): assert response['IsTruncated'] == False assert keys == ['foo'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='pagination w/max_keys=2, no marker') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_listv2_many(): bucket_name = _create_objects(keys=['foo', 'bar', 'baz']) @@ -197,11 +175,6 @@ def test_bucket_listv2_many(): assert response['IsTruncated'] == False assert keys == ['foo'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='keycount in listobjectsv2') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_basic_key_count(): client = get_client() @@ -213,10 +186,6 @@ def test_basic_key_count(): response1 = client.list_objects_v2(Bucket=bucket_name) assert response1['KeyCount'] == 5 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefixes in multi-component object names') def test_bucket_list_delimiter_basic(): bucket_name = _create_objects(keys=['foo/bar', 'foo/bar/xyzzy', 'quux/thud', 'asdf']) client = get_client() @@ -230,11 +199,6 @@ def test_bucket_list_delimiter_basic(): assert len(prefixes) == 2 assert prefixes == ['foo/', 'quux/'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefixes in multi-component object names') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_basic(): bucket_name = _create_objects(keys=['foo/bar', 'foo/bar/xyzzy', 'quux/thud', 'asdf']) @@ -251,11 +215,6 @@ def test_bucket_listv2_delimiter_basic(): assert response['KeyCount'] == len(prefixes) + len(keys) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='test url encoding') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_encoding_basic(): bucket_name = _create_objects(keys=['foo+1/bar', 'foo/bar/xyzzy', 'quux ab/thud', 'asdf+b']) @@ -270,11 +229,6 @@ def test_bucket_listv2_encoding_basic(): assert len(prefixes) == 3 assert prefixes == ['foo%2B1/', 'foo/', 'quux%20ab/'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='test url encoding') -@attr('list-objects') def test_bucket_list_encoding_basic(): bucket_name = _create_objects(keys=['foo+1/bar', 'foo/bar/xyzzy', 'quux ab/thud', 'asdf+b']) client = get_client() @@ -336,11 +290,6 @@ def validate_bucket_listv2(bucket_name, prefix, delimiter, continuation_token, m return response['NextContinuationToken'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefixes in multi-component object names') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_delimiter_prefix(): bucket_name = _create_objects(keys=['asdf', 'boo/bar', 'boo/baz/xyzzy', 'cquux/thud', 'cquux/bla']) @@ -363,13 +312,7 @@ def test_bucket_list_delimiter_prefix(): marker = validate_bucket_list(bucket_name, prefix, delim, '', 2, False, ['boo/bar'], ['boo/baz/'], None) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefixes in multi-component object names') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_listv2_delimiter_prefix(): bucket_name = _create_objects(keys=['asdf', 'boo/bar', 'boo/baz/xyzzy', 'cquux/thud', 'cquux/bla']) @@ -393,28 +336,15 @@ def test_bucket_listv2_delimiter_prefix(): continuation_token = validate_bucket_listv2(bucket_name, prefix, delim, None, 2, False, ['boo/bar'], ['boo/baz/'], last=True) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefix and delimiter handling when object ends with delimiter') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_prefix_ends_with_delimiter(): bucket_name = _create_objects(keys=['asdf/']) validate_bucket_listv2(bucket_name, 'asdf/', '/', None, 1000, False, ['asdf/'], [], last=True) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefix and delimiter handling when object ends with delimiter') def test_bucket_list_delimiter_prefix_ends_with_delimiter(): bucket_name = _create_objects(keys=['asdf/']) validate_bucket_list(bucket_name, 'asdf/', '/', '', 1000, False, ['asdf/'], [], None) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-slash delimiter characters') def test_bucket_list_delimiter_alt(): bucket_name = _create_objects(keys=['bar', 'baz', 'cab', 'foo']) client = get_client() @@ -431,10 +361,6 @@ def test_bucket_list_delimiter_alt(): assert len(prefixes) == 2 assert prefixes == ['ba', 'ca'] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='non-slash delimiter characters') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_alt(): bucket_name = _create_objects(keys=['bar', 'baz', 'cab', 'foo']) @@ -452,11 +378,6 @@ def test_bucket_listv2_delimiter_alt(): assert len(prefixes) == 2 assert prefixes == ['ba', 'ca'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefixes starting with underscore') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_delimiter_prefix_underscore(): bucket_name = _create_objects(keys=['_obj1_','_under1/bar', '_under1/baz/xyzzy', '_under2/thud', '_under2/bla']) @@ -478,13 +399,7 @@ def test_bucket_list_delimiter_prefix_underscore(): marker = validate_bucket_list(bucket_name, prefix, delim, '', 2, False, ['_under1/bar'], ['_under1/baz/'], None) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='prefixes starting with underscore') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_listv2_delimiter_prefix_underscore(): bucket_name = _create_objects(keys=['_obj1_','_under1/bar', '_under1/baz/xyzzy', '_under2/thud', '_under2/bla']) @@ -507,10 +422,6 @@ def test_bucket_listv2_delimiter_prefix_underscore(): continuation_token = validate_bucket_listv2(bucket_name, prefix, delim, None, 2, False, ['_under1/bar'], ['_under1/baz/'], last=True) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='percentage delimiter characters') def test_bucket_list_delimiter_percentage(): bucket_name = _create_objects(keys=['b%ar', 'b%az', 'c%ab', 'foo']) client = get_client() @@ -526,10 +437,6 @@ def test_bucket_list_delimiter_percentage(): # bar, baz, and cab should be broken up by the 'a' delimiters assert prefixes == ['b%', 'c%'] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='percentage delimiter characters') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_percentage(): bucket_name = _create_objects(keys=['b%ar', 'b%az', 'c%ab', 'foo']) @@ -546,10 +453,6 @@ def test_bucket_listv2_delimiter_percentage(): # bar, baz, and cab should be broken up by the 'a' delimiters assert prefixes == ['b%', 'c%'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='whitespace delimiter characters') def test_bucket_list_delimiter_whitespace(): bucket_name = _create_objects(keys=['b ar', 'b az', 'c ab', 'foo']) client = get_client() @@ -565,10 +468,6 @@ def test_bucket_list_delimiter_whitespace(): # bar, baz, and cab should be broken up by the 'a' delimiters assert prefixes == ['b ', 'c '] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='whitespace delimiter characters') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_whitespace(): bucket_name = _create_objects(keys=['b ar', 'b az', 'c ab', 'foo']) @@ -585,10 +484,6 @@ def test_bucket_listv2_delimiter_whitespace(): # bar, baz, and cab should be broken up by the 'a' delimiters assert prefixes == ['b ', 'c '] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='dot delimiter characters') def test_bucket_list_delimiter_dot(): bucket_name = _create_objects(keys=['b.ar', 'b.az', 'c.ab', 'foo']) client = get_client() @@ -604,10 +499,6 @@ def test_bucket_list_delimiter_dot(): # bar, baz, and cab should be broken up by the 'a' delimiters assert prefixes == ['b.', 'c.'] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='dot delimiter characters') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_dot(): bucket_name = _create_objects(keys=['b.ar', 'b.az', 'c.ab', 'foo']) @@ -624,10 +515,6 @@ def test_bucket_listv2_delimiter_dot(): # bar, baz, and cab should be broken up by the 'a' delimiters assert prefixes == ['b.', 'c.'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='non-printable delimiter can be specified') def test_bucket_list_delimiter_unreadable(): key_names=['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -641,10 +528,6 @@ def test_bucket_list_delimiter_unreadable(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='non-printable delimiter can be specified') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_unreadable(): key_names=['bar', 'baz', 'cab', 'foo'] @@ -659,10 +542,6 @@ def test_bucket_listv2_delimiter_unreadable(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='empty delimiter can be specified') def test_bucket_list_delimiter_empty(): key_names = ['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -677,10 +556,6 @@ def test_bucket_list_delimiter_empty(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='empty delimiter can be specified') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_empty(): key_names = ['bar', 'baz', 'cab', 'foo'] @@ -696,10 +571,6 @@ def test_bucket_listv2_delimiter_empty(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='unspecified delimiter defaults to none') def test_bucket_list_delimiter_none(): key_names = ['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -714,10 +585,6 @@ def test_bucket_list_delimiter_none(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='unspecified delimiter defaults to none') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_none(): key_names = ['bar', 'baz', 'cab', 'foo'] @@ -733,7 +600,6 @@ def test_bucket_listv2_delimiter_none(): assert keys == key_names assert prefixes == [] -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_fetchowner_notempty(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -744,7 +610,6 @@ def test_bucket_listv2_fetchowner_notempty(): objs_list = response['Contents'] assert 'Owner' in objs_list[0] -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_fetchowner_defaultempty(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -755,7 +620,6 @@ def test_bucket_listv2_fetchowner_defaultempty(): objs_list = response['Contents'] assert not 'Owner' in objs_list[0] -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_fetchowner_empty(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -766,10 +630,6 @@ def test_bucket_listv2_fetchowner_empty(): objs_list = response['Contents'] assert not 'Owner' in objs_list[0] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='unused delimiter is not found') def test_bucket_list_delimiter_not_exist(): key_names = ['bar', 'baz', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -784,10 +644,6 @@ def test_bucket_list_delimiter_not_exist(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(assertion='unused delimiter is not found') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_delimiter_not_exist(): key_names = ['bar', 'baz', 'cab', 'foo'] @@ -804,11 +660,6 @@ def test_bucket_listv2_delimiter_not_exist(): assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list') -@attr(assertion='list with delimiter not skip special keys') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_delimiter_not_skip_special(): key_names = ['0/'] + ['0/%s' % i for i in range(1000, 1999)] @@ -825,10 +676,6 @@ def test_bucket_list_delimiter_not_skip_special(): assert keys == key_names2 assert prefixes == ['0/'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix') -@attr(assertion='returns only objects under prefix') def test_bucket_list_prefix_basic(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -842,11 +689,6 @@ def test_bucket_list_prefix_basic(): assert keys == ['foo/bar', 'foo/baz'] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix with list-objects-v2') -@attr(assertion='returns only objects under prefix') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_basic(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -862,10 +704,6 @@ def test_bucket_listv2_prefix_basic(): assert prefixes == [] # just testing that we can do the delimeter and prefix logic on non-slashes -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix') -@attr(assertion='prefixes w/o delimiters') def test_bucket_list_prefix_alt(): key_names = ['bar', 'baz', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -879,11 +717,6 @@ def test_bucket_list_prefix_alt(): assert keys == ['bar', 'baz'] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix with list-objects-v2') -@attr(assertion='prefixes w/o delimiters') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_alt(): key_names = ['bar', 'baz', 'foo'] @@ -898,10 +731,6 @@ def test_bucket_listv2_prefix_alt(): assert keys == ['bar', 'baz'] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix') -@attr(assertion='empty prefix returns everything') def test_bucket_list_prefix_empty(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -915,11 +744,6 @@ def test_bucket_list_prefix_empty(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix with list-objects-v2') -@attr(assertion='empty prefix returns everything') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_empty(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -934,10 +758,6 @@ def test_bucket_listv2_prefix_empty(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix') -@attr(assertion='unspecified prefix returns everything') def test_bucket_list_prefix_none(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -951,11 +771,6 @@ def test_bucket_list_prefix_none(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix with list-objects-v2') -@attr(assertion='unspecified prefix returns everything') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_none(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -970,10 +785,6 @@ def test_bucket_listv2_prefix_none(): assert keys == key_names assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix') -@attr(assertion='nonexistent prefix returns nothing') def test_bucket_list_prefix_not_exist(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -987,11 +798,6 @@ def test_bucket_list_prefix_not_exist(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix with list-objects-v2') -@attr(assertion='nonexistent prefix returns nothing') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_not_exist(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -1006,10 +812,6 @@ def test_bucket_listv2_prefix_not_exist(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix') -@attr(assertion='non-printable prefix can be specified') def test_bucket_list_prefix_unreadable(): key_names = ['foo/bar', 'foo/baz', 'quux'] bucket_name = _create_objects(keys=key_names) @@ -1023,11 +825,6 @@ def test_bucket_list_prefix_unreadable(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix with list-objects-v2') -@attr(assertion='non-printable prefix can be specified') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_unreadable(): key_names = ['foo/bar', 'foo/baz', 'quux'] @@ -1042,10 +839,6 @@ def test_bucket_listv2_prefix_unreadable(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix w/delimiter') -@attr(assertion='returns only objects directly under prefix') def test_bucket_list_prefix_delimiter_basic(): key_names = ['foo/bar', 'foo/baz/xyzzy', 'quux/thud', 'asdf'] bucket_name = _create_objects(keys=key_names) @@ -1060,11 +853,6 @@ def test_bucket_list_prefix_delimiter_basic(): assert keys == ['foo/bar'] assert prefixes == ['foo/baz/'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list-objects-v2 under prefix w/delimiter') -@attr(assertion='returns only objects directly under prefix') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_basic(): key_names = ['foo/bar', 'foo/baz/xyzzy', 'quux/thud', 'asdf'] @@ -1080,10 +868,6 @@ def test_bucket_listv2_prefix_delimiter_basic(): assert keys == ['foo/bar'] assert prefixes == ['foo/baz/'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix w/delimiter') -@attr(assertion='non-slash delimiters') def test_bucket_list_prefix_delimiter_alt(): key_names = ['bar', 'bazar', 'cab', 'foo'] bucket_name = _create_objects(keys=key_names) @@ -1098,7 +882,6 @@ def test_bucket_list_prefix_delimiter_alt(): assert keys == ['bar'] assert prefixes == ['baza'] -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_alt(): key_names = ['bar', 'bazar', 'cab', 'foo'] @@ -1114,10 +897,6 @@ def test_bucket_listv2_prefix_delimiter_alt(): assert keys == ['bar'] assert prefixes == ['baza'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix w/delimiter') -@attr(assertion='finds nothing w/unmatched prefix') def test_bucket_list_prefix_delimiter_prefix_not_exist(): key_names = ['b/a/r', 'b/a/c', 'b/a/g', 'g'] bucket_name = _create_objects(keys=key_names) @@ -1130,11 +909,6 @@ def test_bucket_list_prefix_delimiter_prefix_not_exist(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list-objects-v2 under prefix w/delimiter') -@attr(assertion='finds nothing w/unmatched prefix') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_prefix_not_exist(): key_names = ['b/a/r', 'b/a/c', 'b/a/g', 'g'] @@ -1148,10 +922,6 @@ def test_bucket_listv2_prefix_delimiter_prefix_not_exist(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix w/delimiter') -@attr(assertion='over-ridden slash ceases to be a delimiter') def test_bucket_list_prefix_delimiter_delimiter_not_exist(): key_names = ['b/a/c', 'b/a/g', 'b/a/r', 'g'] bucket_name = _create_objects(keys=key_names) @@ -1164,11 +934,6 @@ def test_bucket_list_prefix_delimiter_delimiter_not_exist(): assert keys == ['b/a/c', 'b/a/g', 'b/a/r'] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list-objects-v2 under prefix w/delimiter') -@attr(assertion='over-ridden slash ceases to be a delimiter') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_delimiter_not_exist(): key_names = ['b/a/c', 'b/a/g', 'b/a/r', 'g'] @@ -1182,10 +947,6 @@ def test_bucket_listv2_prefix_delimiter_delimiter_not_exist(): assert keys == ['b/a/c', 'b/a/g', 'b/a/r'] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list under prefix w/delimiter') -@attr(assertion='finds nothing w/unmatched prefix and delimiter') def test_bucket_list_prefix_delimiter_prefix_delimiter_not_exist(): key_names = ['b/a/c', 'b/a/g', 'b/a/r', 'g'] bucket_name = _create_objects(keys=key_names) @@ -1198,11 +959,6 @@ def test_bucket_list_prefix_delimiter_prefix_delimiter_not_exist(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list-objects-v2 under prefix w/delimiter') -@attr(assertion='finds nothing w/unmatched prefix and delimiter') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_prefix_delimiter_prefix_delimiter_not_exist(): key_names = ['b/a/c', 'b/a/g', 'b/a/r', 'g'] @@ -1216,11 +972,6 @@ def test_bucket_listv2_prefix_delimiter_prefix_delimiter_not_exist(): assert keys == [] assert prefixes == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='pagination w/max_keys=1, marker') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_maxkeys_one(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1239,13 +990,7 @@ def test_bucket_list_maxkeys_one(): keys = _get_keys(response) assert keys == key_names[1:] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='pagination w/max_keys=1, marker') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_listv2_maxkeys_one(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1264,10 +1009,6 @@ def test_bucket_listv2_maxkeys_one(): keys = _get_keys(response) assert keys == key_names[1:] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='pagination w/max_keys=0') def test_bucket_list_maxkeys_zero(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1279,11 +1020,6 @@ def test_bucket_list_maxkeys_zero(): keys = _get_keys(response) assert keys == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='pagination w/max_keys=0') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_maxkeys_zero(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1296,10 +1032,6 @@ def test_bucket_listv2_maxkeys_zero(): keys = _get_keys(response) assert keys == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='pagination w/o max_keys') def test_bucket_list_maxkeys_none(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1311,11 +1043,6 @@ def test_bucket_list_maxkeys_none(): assert keys == key_names assert response['MaxKeys'] == 1000 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='pagination w/o max_keys') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_maxkeys_none(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1346,11 +1073,6 @@ def parseXmlToJson(xml): return response -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get usage by client') -@attr(assertion='account usage api') -@attr('fails_on_aws') # allow-unordered is a non-standard extension @pytest.mark.fails_on_aws def test_account_usage(): # boto3.set_stream_logger(name='botocore') @@ -1370,13 +1092,7 @@ def test_account_usage(): assert summary['QuotaMaxBytesPerBucket'] == '-1' assert summary['QuotaMaxObjCountPerBucket'] == '-1' -@attr(resource='bucket') -@attr(method='head') -@attr(operation='get usage by client') -@attr(assertion='account usage by head bucket') -@attr('fails_on_aws') # allow-unordered is a non-standard extension @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_head_bucket_usage(): # boto3.set_stream_logger(name='botocore') @@ -1394,13 +1110,7 @@ def test_head_bucket_usage(): assert hdrs['X-RGW-Quota-Bucket-Size'] == '-1' assert hdrs['X-RGW-Quota-Bucket-Objects'] == '-1' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='bucket list unordered') -@attr('fails_on_aws') # allow-unordered is a non-standard extension @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_unordered(): # boto3.set_stream_logger(name='botocore') @@ -1453,15 +1163,8 @@ def test_bucket_list_unordered(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='bucket list unordered') -@attr('fails_on_aws') # allow-unordered is a non-standard extension @pytest.mark.fails_on_aws -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_listv2_unordered(): # boto3.set_stream_logger(name='botocore') @@ -1515,10 +1218,6 @@ def test_bucket_listv2_unordered(): assert error_code == 'InvalidArgument' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='invalid max_keys') def test_bucket_list_maxkeys_invalid(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1537,10 +1236,6 @@ def test_bucket_list_maxkeys_invalid(): -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='no pagination, no marker') def test_bucket_list_marker_none(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1550,10 +1245,6 @@ def test_bucket_list_marker_none(): assert response['Marker'] == '' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='no pagination, empty marker') def test_bucket_list_marker_empty(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1565,11 +1256,6 @@ def test_bucket_list_marker_empty(): keys = _get_keys(response) assert keys == key_names -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='no pagination, empty continuationtoken') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_continuationtoken_empty(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1582,11 +1268,6 @@ def test_bucket_listv2_continuationtoken_empty(): keys = _get_keys(response) assert keys == key_names -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list keys with list-objects-v2') -@attr(assertion='no pagination, non-empty continuationtoken') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_continuationtoken(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1603,13 +1284,7 @@ def test_bucket_listv2_continuationtoken(): keys = _get_keys(response2) assert keys == key_names2 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list keys with list-objects-v2') -@attr(assertion='no pagination, non-empty continuationtoken and startafter') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_listv2_both_continuationtoken_startafter(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1627,10 +1302,6 @@ def test_bucket_listv2_both_continuationtoken_startafter(): keys = _get_keys(response2) assert keys == key_names2 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='non-printing marker') def test_bucket_list_marker_unreadable(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1642,11 +1313,6 @@ def test_bucket_list_marker_unreadable(): keys = _get_keys(response) assert keys == key_names -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='non-printing startafter') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_startafter_unreadable(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1659,10 +1325,6 @@ def test_bucket_listv2_startafter_unreadable(): keys = _get_keys(response) assert keys == key_names -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='marker not-in-list') def test_bucket_list_marker_not_in_list(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1673,11 +1335,6 @@ def test_bucket_list_marker_not_in_list(): keys = _get_keys(response) assert keys == [ 'foo','quxx'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='startafter not-in-list') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_startafter_not_in_list(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1689,10 +1346,6 @@ def test_bucket_listv2_startafter_not_in_list(): keys = _get_keys(response) assert keys == ['foo', 'quxx'] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys') -@attr(assertion='marker after list') def test_bucket_list_marker_after_list(): key_names = ['bar', 'baz', 'foo', 'quxx'] bucket_name = _create_objects(keys=key_names) @@ -1704,11 +1357,6 @@ def test_bucket_list_marker_after_list(): assert response['IsTruncated'] == False assert keys == [] -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all keys with list-objects-v2') -@attr(assertion='startafter after list') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_startafter_after_list(): key_names = ['bar', 'baz', 'foo', 'quxx'] @@ -1730,11 +1378,6 @@ def _compare_dates(datetime1, datetime2): datetime1 = datetime1.replace(microsecond=0) assert datetime1 == datetime2 -@attr(resource='object') -@attr(method='head') -@attr(operation='compare w/bucket list') -@attr(assertion='return same metadata') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_return_data(): key_names = ['bar', 'baz', 'foo'] @@ -1767,12 +1410,6 @@ def test_bucket_list_return_data(): _compare_dates(obj['LastModified'],key_data['LastModified']) -@attr(resource='object') -@attr(method='head') -@attr(operation='compare w/bucket list when bucket versioning is configured') -@attr(assertion='return same metadata') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_list_return_data_versioning(): bucket_name = get_new_bucket() @@ -1810,10 +1447,6 @@ def test_bucket_list_return_data_versioning(): assert obj['VersionId'] == key_data['VersionId'] _compare_dates(obj['LastModified'],key_data['LastModified']) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all objects (anonymous)') -@attr(assertion='succeeds') def test_bucket_list_objects_anonymous(): bucket_name = get_new_bucket() client = get_client() @@ -1822,11 +1455,6 @@ def test_bucket_list_objects_anonymous(): unauthenticated_client = get_unauthenticated_client() unauthenticated_client.list_objects(Bucket=bucket_name) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all objects (anonymous) with list-objects-v2') -@attr(assertion='succeeds') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_objects_anonymous(): bucket_name = get_new_bucket() @@ -1836,10 +1464,6 @@ def test_bucket_listv2_objects_anonymous(): unauthenticated_client = get_unauthenticated_client() unauthenticated_client.list_objects_v2(Bucket=bucket_name) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all objects (anonymous)') -@attr(assertion='fails') def test_bucket_list_objects_anonymous_fail(): bucket_name = get_new_bucket() @@ -1850,11 +1474,6 @@ def test_bucket_list_objects_anonymous_fail(): assert status == 403 assert error_code == 'AccessDenied' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all objects (anonymous) with list-objects-v2') -@attr(assertion='fails') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucket_listv2_objects_anonymous_fail(): bucket_name = get_new_bucket() @@ -1866,10 +1485,6 @@ def test_bucket_listv2_objects_anonymous_fail(): assert status == 403 assert error_code == 'AccessDenied' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='non-existant bucket') -@attr(assertion='fails 404') def test_bucket_notexist(): bucket_name = get_new_bucket_name() client = get_client() @@ -1880,11 +1495,6 @@ def test_bucket_notexist(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='non-existant bucket with list-objects-v2') -@attr(assertion='fails 404') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucketv2_notexist(): bucket_name = get_new_bucket_name() @@ -1896,10 +1506,6 @@ def test_bucketv2_notexist(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='non-existant bucket') -@attr(assertion='fails 404') def test_bucket_delete_notexist(): bucket_name = get_new_bucket_name() client = get_client() @@ -1910,10 +1516,6 @@ def test_bucket_delete_notexist(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='non-empty bucket') -@attr(assertion='fails 409') def test_bucket_delete_nonempty(): key_names = ['foo'] bucket_name = _create_objects(keys=key_names) @@ -1944,10 +1546,6 @@ def _do_wait_completion(t): for thr in t: thr.join() -@attr(resource='bucket') -@attr(method='put') -@attr(operation='concurrent set of acls on a bucket') -@attr(assertion='works') def test_bucket_concurrent_set_canned_acl(): bucket_name = get_new_bucket() client = get_client() @@ -1963,10 +1561,6 @@ def test_bucket_concurrent_set_canned_acl(): for r in results: assert r == True -@attr(resource='object') -@attr(method='put') -@attr(operation='non-existant bucket') -@attr(assertion='fails 404') def test_object_write_to_nonexist_bucket(): key_names = ['foo'] bucket_name = 'whatchutalkinboutwillis' @@ -1979,10 +1573,6 @@ def test_object_write_to_nonexist_bucket(): assert error_code == 'NoSuchBucket' -@attr(resource='bucket') -@attr(method='del') -@attr(operation='deleted bucket') -@attr(assertion='fails 404') def test_bucket_create_delete(): bucket_name = get_new_bucket() client = get_client() @@ -1994,10 +1584,6 @@ def test_bucket_create_delete(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='object') -@attr(method='get') -@attr(operation='read contents that were never written') -@attr(assertion='fails 404') def test_object_read_not_exist(): bucket_name = get_new_bucket() client = get_client() @@ -2014,11 +1600,6 @@ def get_http_response(**kwargs): global http_response http_response = kwargs['http_response'].__dict__ -@attr(resource='object') -@attr(method='get') -@attr(operation='read contents that were never written to raise one error response') -@attr(assertion='RequestId appears in the error response') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_requestid_matches_header_on_error(): bucket_name = get_new_bucket() @@ -2043,10 +1624,6 @@ def _make_objs_dict(key_names): objs_dict = {'Objects': objs_list} return objs_dict -@attr(resource='object') -@attr(method='post') -@attr(operation='delete multiple objects') -@attr(assertion='deletes multiple objects with a single call') def test_multi_object_delete(): key_names = ['key0', 'key1', 'key2'] bucket_name = _create_objects(keys=key_names) @@ -2068,11 +1645,6 @@ def test_multi_object_delete(): response = client.list_objects(Bucket=bucket_name) assert 'Contents' not in response -@attr(resource='object') -@attr(method='post') -@attr(operation='delete multiple objects with list-objects-v2') -@attr(assertion='deletes multiple objects with a single call') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_multi_objectv2_delete(): key_names = ['key0', 'key1', 'key2'] @@ -2095,10 +1667,6 @@ def test_multi_objectv2_delete(): response = client.list_objects_v2(Bucket=bucket_name) assert 'Contents' not in response -@attr(resource='object') -@attr(method='post') -@attr(operation='delete multiple objects has upper limit of 1000 keys') -@attr(assertion='fails 400') def test_multi_object_delete_key_limit(): key_names = [f"key-{i}" for i in range(1001)] bucket_name = _create_objects(keys=key_names) @@ -2116,10 +1684,6 @@ def test_multi_object_delete_key_limit(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='delete multiple objects has upper limit of 1000 keys with list-objects-v2') -@attr(assertion='fails 400') def test_multi_objectv2_delete_key_limit(): key_names = [f"key-{i}" for i in range(1001)] bucket_name = _create_objects(keys=key_names) @@ -2137,10 +1701,6 @@ def test_multi_objectv2_delete_key_limit(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='write zero-byte key') -@attr(assertion='correct content length') def test_object_head_zero_bytes(): bucket_name = get_new_bucket() client = get_client() @@ -2149,10 +1709,6 @@ def test_object_head_zero_bytes(): response = client.head_object(Bucket=bucket_name, Key='foo') assert response['ContentLength'] == 0 -@attr(resource='object') -@attr(method='put') -@attr(operation='write key') -@attr(assertion='correct etag') def test_object_write_check_etag(): bucket_name = get_new_bucket() client = get_client() @@ -2160,10 +1716,6 @@ def test_object_write_check_etag(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 assert response['ETag'] == '"37b51d194a7513e45b56f6524f2d51f2"' -@attr(resource='object') -@attr(method='put') -@attr(operation='write key') -@attr(assertion='correct cache control header') def test_object_write_cache_control(): bucket_name = get_new_bucket() client = get_client() @@ -2173,10 +1725,6 @@ def test_object_write_cache_control(): response = client.head_object(Bucket=bucket_name, Key='foo') assert response['ResponseMetadata']['HTTPHeaders']['cache-control'] == cache_control -@attr(resource='object') -@attr(method='put') -@attr(operation='write key') -@attr(assertion='correct expires header') def test_object_write_expires(): bucket_name = get_new_bucket() client = get_client() @@ -2195,10 +1743,6 @@ def _get_body(response): got = got.decode() return got -@attr(resource='object') -@attr(method='all') -@attr(operation='complete object life cycle') -@attr(assertion='read back what we wrote and rewrote') def test_object_write_read_update_read_delete(): bucket_name = get_new_bucket() client = get_client() @@ -2235,26 +1779,14 @@ def _set_get_metadata(metadata, bucket_name=None): response = client.get_object(Bucket=bucket_name, Key='foo') return response['Metadata']['meta1'] -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write/re-read') -@attr(assertion='reread what we wrote') def test_object_set_get_metadata_none_to_good(): got = _set_get_metadata('mymeta') assert got == 'mymeta' -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write/re-read') -@attr(assertion='write empty value, returns empty value') def test_object_set_get_metadata_none_to_empty(): got = _set_get_metadata('') assert got == '' -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write/re-write') -@attr(assertion='empty value replaces old') def test_object_set_get_metadata_overwrite_to_empty(): bucket_name = get_new_bucket() got = _set_get_metadata('oldmeta', bucket_name) @@ -2262,12 +1794,7 @@ def test_object_set_get_metadata_overwrite_to_empty(): got = _set_get_metadata('', bucket_name) assert got == '' -@attr(resource='object.metadata') -@attr(method='put') -@attr(operation='metadata write/re-write') -@attr(assertion='UTF-8 values passed through') # TODO: the decoding of this unicode metadata is not happening properly for unknown reasons -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_set_get_unicode_metadata(): bucket_name = get_new_bucket() @@ -2302,10 +1829,6 @@ def _set_get_metadata_unreadable(metadata, bucket_name=None): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key='bar', Metadata=metadata_dict) return e -@attr(resource='object') -@attr(method='put') -@attr(operation='data re-write') -@attr(assertion='replaces previous metadata') def test_object_metadata_replaced_on_put(): bucket_name = get_new_bucket() client = get_client() @@ -2318,10 +1841,6 @@ def test_object_metadata_replaced_on_put(): got = response['Metadata'] assert got == {} -@attr(resource='object') -@attr(method='put') -@attr(operation='data write from file (w/100-Continue)') -@attr(assertion='succeeds and returns written data') def test_object_write_file(): bucket_name = get_new_bucket() client = get_client() @@ -2336,10 +1855,6 @@ def _get_post_url(bucket_name): endpoint = get_config_endpoint() return '{endpoint}/{bucket_name}'.format(endpoint=endpoint, bucket_name=bucket_name) -@attr(resource='object') -@attr(method='post') -@attr(operation='anonymous browser based upload via POST request') -@attr(assertion='succeeds and returns written data') def test_post_object_anonymous_request(): bucket_name = get_new_bucket_name() client = get_client() @@ -2354,10 +1869,6 @@ def test_post_object_anonymous_request(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds and returns written data') def test_post_object_authenticated_request(): bucket_name = get_new_bucket() client = get_client() @@ -2396,10 +1907,6 @@ def test_post_object_authenticated_request(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request, no content-type header') -@attr(assertion='succeeds and returns written data') def test_post_object_authenticated_no_content_type(): bucket_name = get_new_bucket_name() client = get_client() @@ -2437,10 +1944,6 @@ def test_post_object_authenticated_no_content_type(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request, bad access key') -@attr(assertion='fails') def test_post_object_authenticated_request_bad_access_key(): bucket_name = get_new_bucket_name() client = get_client() @@ -2476,10 +1979,6 @@ def test_post_object_authenticated_request_bad_access_key(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='anonymous browser based upload via POST request') -@attr(assertion='succeeds with status 201') def test_post_object_set_success_code(): bucket_name = get_new_bucket_name() client = get_client() @@ -2495,10 +1994,6 @@ def test_post_object_set_success_code(): message = ET.fromstring(r.content).find('Key') assert message.text == 'foo.txt' -@attr(resource='object') -@attr(method='post') -@attr(operation='anonymous browser based upload via POST request') -@attr(assertion='succeeds with status 204') def test_post_object_set_invalid_success_code(): bucket_name = get_new_bucket_name() client = get_client() @@ -2514,10 +2009,6 @@ def test_post_object_set_invalid_success_code(): content = r.content.decode() assert content == '' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds and returns written data') def test_post_object_upload_larger_than_chunk(): bucket_name = get_new_bucket() client = get_client() @@ -2557,10 +2048,6 @@ def test_post_object_upload_larger_than_chunk(): body = _get_body(response) assert body == foo_string -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds and returns written data') def test_post_object_set_key_from_filename(): bucket_name = get_new_bucket() client = get_client() @@ -2597,10 +2084,6 @@ def test_post_object_set_key_from_filename(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds with status 204') def test_post_object_ignored_header(): bucket_name = get_new_bucket() client = get_client() @@ -2635,10 +2118,6 @@ def test_post_object_ignored_header(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 204 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds with status 204') def test_post_object_case_insensitive_condition_fields(): bucket_name = get_new_bucket() client = get_client() @@ -2674,10 +2153,6 @@ def test_post_object_case_insensitive_condition_fields(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 204 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds with escaped leading $ and returns written data') def test_post_object_escaped_field_values(): bucket_name = get_new_bucket() client = get_client() @@ -2714,10 +2189,6 @@ def test_post_object_escaped_field_values(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds and returns redirect url') def test_post_object_success_redirect_action(): bucket_name = get_new_bucket_name() client = get_client() @@ -2760,10 +2231,6 @@ def test_post_object_success_redirect_action(): assert url == '{rurl}?bucket={bucket}&key={key}&etag=%22{etag}%22'.format(\ rurl = redirect_url, bucket = bucket_name, key = 'foo.txt', etag = response['ETag'].strip('"')) -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with invalid signature error') def test_post_object_invalid_signature(): bucket_name = get_new_bucket() client = get_client() @@ -2797,10 +2264,6 @@ def test_post_object_invalid_signature(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with access key does not exist error') def test_post_object_invalid_access_key(): bucket_name = get_new_bucket() client = get_client() @@ -2834,10 +2297,6 @@ def test_post_object_invalid_access_key(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with invalid expiration error') def test_post_object_invalid_date_format(): bucket_name = get_new_bucket() client = get_client() @@ -2871,10 +2330,6 @@ def test_post_object_invalid_date_format(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with missing key error') def test_post_object_no_key_specified(): bucket_name = get_new_bucket() client = get_client() @@ -2907,10 +2362,6 @@ def test_post_object_no_key_specified(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with missing signature error') def test_post_object_missing_signature(): bucket_name = get_new_bucket() client = get_client() @@ -2944,10 +2395,6 @@ def test_post_object_missing_signature(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with extra input fields policy error') def test_post_object_missing_policy_condition(): bucket_name = get_new_bucket() client = get_client() @@ -2980,10 +2427,6 @@ def test_post_object_missing_policy_condition(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds using starts-with restriction on metadata header') def test_post_object_user_specified_header(): bucket_name = get_new_bucket() client = get_client() @@ -3020,10 +2463,6 @@ def test_post_object_user_specified_header(): response = client.get_object(Bucket=bucket_name, Key='foo.txt') assert response['Metadata']['foo'] == 'barclamp' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with policy condition failed error due to missing field in POST request') def test_post_object_request_missing_policy_specified_field(): bucket_name = get_new_bucket() client = get_client() @@ -3058,10 +2497,6 @@ def test_post_object_request_missing_policy_specified_field(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with conditions must be list error') def test_post_object_condition_is_case_sensitive(): bucket_name = get_new_bucket() client = get_client() @@ -3095,10 +2530,6 @@ def test_post_object_condition_is_case_sensitive(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with expiration must be string error') def test_post_object_expires_is_case_sensitive(): bucket_name = get_new_bucket() client = get_client() @@ -3132,10 +2563,6 @@ def test_post_object_expires_is_case_sensitive(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with policy expired error') def test_post_object_expired_policy(): bucket_name = get_new_bucket() client = get_client() @@ -3169,10 +2596,6 @@ def test_post_object_expired_policy(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails using equality restriction on metadata header') def test_post_object_invalid_request_field_value(): bucket_name = get_new_bucket() client = get_client() @@ -3206,10 +2629,6 @@ def test_post_object_invalid_request_field_value(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 403 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with policy missing expiration error') def test_post_object_missing_expires_condition(): bucket_name = get_new_bucket() client = get_client() @@ -3243,10 +2662,6 @@ def test_post_object_missing_expires_condition(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with policy missing conditions error') def test_post_object_missing_conditions_list(): bucket_name = get_new_bucket() client = get_client() @@ -3272,10 +2687,6 @@ def test_post_object_missing_conditions_list(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with allowable upload size exceeded error') def test_post_object_upload_size_limit_exceeded(): bucket_name = get_new_bucket() client = get_client() @@ -3309,10 +2720,6 @@ def test_post_object_upload_size_limit_exceeded(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with invalid content length error') def test_post_object_missing_content_length_argument(): bucket_name = get_new_bucket() client = get_client() @@ -3346,10 +2753,6 @@ def test_post_object_missing_content_length_argument(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with invalid JSON error') def test_post_object_invalid_content_length_argument(): bucket_name = get_new_bucket() client = get_client() @@ -3383,10 +2786,6 @@ def test_post_object_invalid_content_length_argument(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='fails with upload size less than minimum allowable error') def test_post_object_upload_size_below_minimum(): bucket_name = get_new_bucket() client = get_client() @@ -3420,10 +2819,6 @@ def test_post_object_upload_size_below_minimum(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='empty conditions return appropriate error response') def test_post_object_empty_conditions(): bucket_name = get_new_bucket() client = get_client() @@ -3453,10 +2848,6 @@ def test_post_object_empty_conditions(): r = requests.post(url, files=payload, verify=get_config_ssl_verify()) assert r.status_code == 400 -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Match: the latest ETag') -@attr(assertion='succeeds') def test_get_object_ifmatch_good(): bucket_name = get_new_bucket() client = get_client() @@ -3467,10 +2858,6 @@ def test_get_object_ifmatch_good(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Match: bogus ETag') -@attr(assertion='fails 412') def test_get_object_ifmatch_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3481,10 +2868,6 @@ def test_get_object_ifmatch_failed(): assert status == 412 assert error_code == 'PreconditionFailed' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-None-Match: the latest ETag') -@attr(assertion='fails 304') def test_get_object_ifnonematch_good(): bucket_name = get_new_bucket() client = get_client() @@ -3496,10 +2879,6 @@ def test_get_object_ifnonematch_good(): assert status == 304 assert e.response['Error']['Message'] == 'Not Modified' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-None-Match: bogus ETag') -@attr(assertion='succeeds') def test_get_object_ifnonematch_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3509,10 +2888,6 @@ def test_get_object_ifnonematch_failed(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Modified-Since: before') -@attr(assertion='succeeds') def test_get_object_ifmodifiedsince_good(): bucket_name = get_new_bucket() client = get_client() @@ -3522,11 +2897,6 @@ def test_get_object_ifmodifiedsince_good(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Modified-Since: after') -@attr(assertion='fails 304') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_get_object_ifmodifiedsince_failed(): bucket_name = get_new_bucket() @@ -3548,11 +2918,6 @@ def test_get_object_ifmodifiedsince_failed(): assert status == 304 assert e.response['Error']['Message'] == 'Not Modified' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Unmodified-Since: before') -@attr(assertion='fails 412') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_get_object_ifunmodifiedsince_good(): bucket_name = get_new_bucket() @@ -3564,10 +2929,6 @@ def test_get_object_ifunmodifiedsince_good(): assert status == 412 assert error_code == 'PreconditionFailed' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Unmodified-Since: after') -@attr(assertion='succeeds') def test_get_object_ifunmodifiedsince_failed(): bucket_name = get_new_bucket() client = get_client() @@ -3578,11 +2939,6 @@ def test_get_object_ifunmodifiedsince_failed(): assert body == 'bar' -@attr(resource='object') -@attr(method='put') -@attr(operation='data re-write w/ If-Match: the latest ETag') -@attr(assertion='replaces previous data and metadata') -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_put_object_ifmatch_good(): bucket_name = get_new_bucket() @@ -3604,11 +2960,6 @@ def test_put_object_ifmatch_good(): body = _get_body(response) assert body == 'zar' -@attr(resource='object') -@attr(method='get') -@attr(operation='get w/ If-Match: bogus ETag') -@attr(assertion='fails 412') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_object_ifmatch_failed(): bucket_name = get_new_bucket() @@ -3631,11 +2982,6 @@ def test_put_object_ifmatch_failed(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='put') -@attr(operation='overwrite existing object w/ If-Match: *') -@attr(assertion='replaces previous data and metadata') -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_put_object_ifmatch_overwrite_existed_good(): bucket_name = get_new_bucket() @@ -3653,13 +2999,7 @@ def test_put_object_ifmatch_overwrite_existed_good(): body = _get_body(response) assert body == 'zar' -@attr(resource='object') -@attr(method='put') -@attr(operation='overwrite non-existing object w/ If-Match: *') -@attr(assertion='fails 412') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_object_ifmatch_nonexisted_failed(): bucket_name = get_new_bucket() @@ -3677,11 +3017,6 @@ def test_put_object_ifmatch_nonexisted_failed(): assert status == 404 assert error_code == 'NoSuchKey' -@attr(resource='object') -@attr(method='put') -@attr(operation='overwrite existing object w/ If-None-Match: outdated ETag') -@attr(assertion='replaces previous data and metadata') -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_put_object_ifnonmatch_good(): bucket_name = get_new_bucket() @@ -3699,13 +3034,7 @@ def test_put_object_ifnonmatch_good(): body = _get_body(response) assert body == 'zar' -@attr(resource='object') -@attr(method='put') -@attr(operation='overwrite existing object w/ If-None-Match: the latest ETag') -@attr(assertion='fails 412') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_object_ifnonmatch_failed(): bucket_name = get_new_bucket() @@ -3730,11 +3059,6 @@ def test_put_object_ifnonmatch_failed(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='put') -@attr(operation='overwrite non-existing object w/ If-None-Match: *') -@attr(assertion='succeeds') -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_put_object_ifnonmatch_nonexisted_good(): bucket_name = get_new_bucket() @@ -3748,13 +3072,7 @@ def test_put_object_ifnonmatch_nonexisted_good(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='put') -@attr(operation='overwrite existing object w/ If-None-Match: *') -@attr(assertion='fails 412') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_object_ifnonmatch_overwrite_existed_failed(): bucket_name = get_new_bucket() @@ -3799,10 +3117,6 @@ def _setup_bucket_acl(bucket_acl=None): return bucket_name -@attr(resource='object') -@attr(method='get') -@attr(operation='publically readable bucket') -@attr(assertion='bucket is readable') def test_object_raw_get(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') @@ -3810,10 +3124,6 @@ def test_object_raw_get(): response = unauthenticated_client.get_object(Bucket=bucket_name, Key='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='get') -@attr(operation='deleted object and bucket') -@attr(assertion='fails 404') def test_object_raw_get_bucket_gone(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -3828,10 +3138,6 @@ def test_object_raw_get_bucket_gone(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='object') -@attr(method='get') -@attr(operation='deleted object and bucket') -@attr(assertion='fails 404') def test_object_delete_key_bucket_gone(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -3846,10 +3152,6 @@ def test_object_delete_key_bucket_gone(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='object') -@attr(method='get') -@attr(operation='deleted object') -@attr(assertion='fails 404') def test_object_raw_get_object_gone(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -3863,10 +3165,6 @@ def test_object_raw_get_object_gone(): assert status == 404 assert error_code == 'NoSuchKey' -@attr(resource='bucket') -@attr(method='head') -@attr(operation='head bucket') -@attr(assertion='succeeds') def test_bucket_head(): bucket_name = get_new_bucket() client = get_client() @@ -3874,10 +3172,6 @@ def test_bucket_head(): response = client.head_bucket(Bucket=bucket_name) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='head') -@attr(operation='non-existant bucket') -@attr(assertion='fails 404') def test_bucket_head_notexist(): bucket_name = get_new_bucket_name() client = get_client() @@ -3891,13 +3185,7 @@ def test_bucket_head_notexist(): # https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadBucket.html #assert error_code == 'NoSuchKey' -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr(resource='bucket') -@attr(method='head') -@attr(operation='read bucket extended information') -@attr(assertion='extended information is getting updated') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_head_extended(): bucket_name = get_new_bucket() @@ -3913,10 +3201,6 @@ def test_bucket_head_extended(): assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-object-count']) == 3 assert int(response['ResponseMetadata']['HTTPHeaders']['x-rgw-bytes-used']) == 9 -@attr(resource='bucket.acl') -@attr(method='get') -@attr(operation='unauthenticated on private bucket') -@attr(assertion='succeeds') def test_object_raw_get_bucket_acl(): bucket_name = _setup_bucket_object_acl('private', 'public-read') @@ -3924,10 +3208,6 @@ def test_object_raw_get_bucket_acl(): response = unauthenticated_client.get_object(Bucket=bucket_name, Key='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object.acl') -@attr(method='get') -@attr(operation='unauthenticated on private object') -@attr(assertion='fails 403') def test_object_raw_get_object_acl(): bucket_name = _setup_bucket_object_acl('public-read', 'private') @@ -3937,10 +3217,6 @@ def test_object_raw_get_object_acl(): assert status == 403 assert error_code == 'AccessDenied' -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='authenticated on public bucket/object') -@attr(assertion='succeeds') def test_object_raw_authenticated(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') @@ -3948,10 +3224,6 @@ def test_object_raw_authenticated(): response = client.get_object(Bucket=bucket_name, Key='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='get') -@attr(operation='authenticated on private bucket/private object with modified response headers') -@attr(assertion='succeeds') def test_object_raw_response_headers(): bucket_name = _setup_bucket_object_acl('private', 'private') @@ -3965,10 +3237,6 @@ def test_object_raw_response_headers(): assert response['ResponseMetadata']['HTTPHeaders']['content-encoding'] == 'aaa' assert response['ResponseMetadata']['HTTPHeaders']['cache-control'] == 'no-cache' -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='authenticated on private bucket/public object') -@attr(assertion='succeeds') def test_object_raw_authenticated_bucket_acl(): bucket_name = _setup_bucket_object_acl('private', 'public-read') @@ -3976,10 +3244,6 @@ def test_object_raw_authenticated_bucket_acl(): response = client.get_object(Bucket=bucket_name, Key='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='authenticated on public bucket/private object') -@attr(assertion='succeeds') def test_object_raw_authenticated_object_acl(): bucket_name = _setup_bucket_object_acl('public-read', 'private') @@ -3987,10 +3251,6 @@ def test_object_raw_authenticated_object_acl(): response = client.get_object(Bucket=bucket_name, Key='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='get') -@attr(operation='authenticated on deleted object and bucket') -@attr(assertion='fails 404') def test_object_raw_authenticated_bucket_gone(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -4003,10 +3263,6 @@ def test_object_raw_authenticated_bucket_gone(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='object') -@attr(method='get') -@attr(operation='authenticated on deleted object') -@attr(assertion='fails 404') def test_object_raw_authenticated_object_gone(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -4018,10 +3274,6 @@ def test_object_raw_authenticated_object_gone(): assert status == 404 assert error_code == 'NoSuchKey' -@attr(resource='object') -@attr(method='get') -@attr(operation='x-amz-expires check not expired') -@attr(assertion='succeeds') def test_object_raw_get_x_amz_expires_not_expired(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -4032,10 +3284,6 @@ def test_object_raw_get_x_amz_expires_not_expired(): res = requests.get(url, verify=get_config_ssl_verify()).__dict__ assert res['status_code'] == 200 -@attr(resource='object') -@attr(method='get') -@attr(operation='check x-amz-expires value out of range zero') -@attr(assertion='fails 403') def test_object_raw_get_x_amz_expires_out_range_zero(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -4046,10 +3294,6 @@ def test_object_raw_get_x_amz_expires_out_range_zero(): res = requests.get(url, verify=get_config_ssl_verify()).__dict__ assert res['status_code'] == 403 -@attr(resource='object') -@attr(method='get') -@attr(operation='check x-amz-expires value out of max range') -@attr(assertion='fails 403') def test_object_raw_get_x_amz_expires_out_max_range(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -4060,10 +3304,6 @@ def test_object_raw_get_x_amz_expires_out_max_range(): res = requests.get(url, verify=get_config_ssl_verify()).__dict__ assert res['status_code'] == 403 -@attr(resource='object') -@attr(method='get') -@attr(operation='check x-amz-expires value out of positive range') -@attr(assertion='succeeds') def test_object_raw_get_x_amz_expires_out_positive_range(): bucket_name = _setup_bucket_object_acl('public-read', 'public-read') client = get_client() @@ -4075,10 +3315,6 @@ def test_object_raw_get_x_amz_expires_out_positive_range(): assert res['status_code'] == 403 -@attr(resource='object') -@attr(method='put') -@attr(operation='unauthenticated, no object acls') -@attr(assertion='fails 403') def test_object_anon_put(): bucket_name = get_new_bucket() client = get_client() @@ -4092,10 +3328,6 @@ def test_object_anon_put(): assert status == 403 assert error_code == 'AccessDenied' -@attr(resource='object') -@attr(method='put') -@attr(operation='unauthenticated, publically writable object') -@attr(assertion='succeeds') def test_object_anon_put_write_access(): bucket_name = _setup_bucket_acl('public-read-write') client = get_client() @@ -4106,10 +3338,6 @@ def test_object_anon_put_write_access(): response = unauthenticated_client.put_object(Bucket=bucket_name, Key='foo', Body='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='put') -@attr(operation='authenticated, no object acls') -@attr(assertion='succeeds') def test_object_put_authenticated(): bucket_name = get_new_bucket() client = get_client() @@ -4117,10 +3345,6 @@ def test_object_put_authenticated(): response = client.put_object(Bucket=bucket_name, Key='foo', Body='foo') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='put') -@attr(operation='authenticated, no object acls') -@attr(assertion='succeeds') def test_object_raw_put_authenticated_expired(): bucket_name = get_new_bucket() client = get_client() @@ -4147,14 +3371,8 @@ def check_bad_bucket_name(bucket_name): # AWS does not enforce all documented bucket restrictions. # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html -@attr('fails_on_aws') @pytest.mark.fails_on_aws # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='name begins with underscore') -@attr(assertion='fails with subdomain: 400') def test_bucket_create_naming_bad_starts_nonalpha(): bucket_name = get_new_bucket_name() check_bad_bucket_name('_' + bucket_name) @@ -4177,17 +3395,9 @@ def check_invalid_bucketname(invalid_name): status, error_code = _get_status_and_error_code(e.response) return (status, error_code) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='short (one character) name') -@attr(assertion='fails 400') def test_bucket_create_naming_bad_short_one(): check_bad_bucket_name('a') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='short (two character) name') -@attr(assertion='fails 400') def test_bucket_create_naming_bad_short_two(): check_bad_bucket_name('aa') @@ -4238,36 +3448,18 @@ def _test_bucket_create_naming_good_long(length): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/60 byte name') -@attr(assertion='fails with subdomain') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_good_long_60(): _test_bucket_create_naming_good_long(60) # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/61 byte name') -@attr(assertion='fails with subdomain') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_good_long_61(): _test_bucket_create_naming_good_long(61) # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/62 byte name') -@attr(assertion='fails with subdomain') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_good_long_62(): @@ -4275,22 +3467,11 @@ def test_bucket_create_naming_good_long_62(): # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/63 byte name') -@attr(assertion='fails with subdomain') def test_bucket_create_naming_good_long_63(): _test_bucket_create_naming_good_long(63) # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list w/61 byte name') -@attr(assertion='fails with subdomain') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_list_long_name(): @@ -4309,21 +3490,11 @@ def test_bucket_list_long_name(): # AWS does not enforce all documented bucket restrictions. # http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/ip address for name') -@attr(assertion='fails on aws') def test_bucket_create_naming_bad_ip(): check_bad_bucket_name('192.168.5.123') # test_bucket_create_naming_dns_* are valid but not recommended -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/underscore in name') -@attr(assertion='fails') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_underscore(): @@ -4333,12 +3504,6 @@ def test_bucket_create_naming_dns_underscore(): assert error_code == 'InvalidBucketName' # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/100 byte name') -@attr(assertion='fails with subdomain') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws def test_bucket_create_naming_dns_long(): prefix = get_prefix() @@ -4347,12 +3512,6 @@ def test_bucket_create_naming_dns_long(): check_good_bucket_name(num * 'a') # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/dash at end of name') -@attr(assertion='fails') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dash_at_end(): @@ -4363,12 +3522,6 @@ def test_bucket_create_naming_dns_dash_at_end(): # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/.. in name') -@attr(assertion='fails') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dot_dot(): @@ -4379,12 +3532,6 @@ def test_bucket_create_naming_dns_dot_dot(): # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/.- in name') -@attr(assertion='fails') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dot_dash(): @@ -4395,12 +3542,6 @@ def test_bucket_create_naming_dns_dot_dash(): # Breaks DNS with SubdomainCallingFormat -@attr('fails_with_subdomain') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create w/-. in name') -@attr(assertion='fails') -@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... @pytest.mark.fails_on_aws # Should now pass on AWS even though it has 'fails_on_aws' attr. def test_bucket_create_naming_dns_dash_dot(): @@ -4409,9 +3550,6 @@ def test_bucket_create_naming_dns_dash_dot(): assert status == 400 assert error_code == 'InvalidBucketName' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='re-create') def test_bucket_create_exists(): # aws-s3 default region allows recreation of buckets # but all other regions fail with BucketAlreadyOwnedByYou. @@ -4426,10 +3564,6 @@ def test_bucket_create_exists(): assert e.status == 409 assert e.error_code == 'BucketAlreadyOwnedByYou' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get location') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_get_location(): location_constraint = get_main_api_name() @@ -4445,11 +3579,6 @@ def test_bucket_get_location(): location_constraint = None assert response['LocationConstraint'] == location_constraint -@attr(resource='bucket') -@attr(method='put') -@attr(operation='re-create by non-owner') -@attr(assertion='fails 409') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_exists_nonowner(): # Names are shared across a global namespace. As such, no two @@ -4465,11 +3594,6 @@ def test_bucket_create_exists_nonowner(): assert status == 409 assert error_code == 'BucketAlreadyExists' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='re-create with existing acl') -@attr(assertion='fails 409') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_recreate_overwrite_acl(): bucket_name = get_new_bucket_name() @@ -4481,11 +3605,6 @@ def test_bucket_recreate_overwrite_acl(): assert status == 409 assert error_code == 'BucketAlreadyExists' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='re-create with new acl') -@attr(assertion='fails 409') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_recreate_new_acl(): bucket_name = get_new_bucket_name() @@ -4527,10 +3646,6 @@ def check_grants(got, want): assert g == {'Grantee': {}} -@attr(resource='bucket') -@attr(method='get') -@attr(operation='default acl') -@attr(assertion='read back expected defaults') def test_bucket_acl_default(): bucket_name = get_new_bucket() client = get_client() @@ -4558,11 +3673,6 @@ def test_bucket_acl_default(): ], ) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='public-read acl') -@attr(assertion='read back expected defaults') -@attr('fails_on_aws') # IllegalLocationConstraintExceptionThe unspecified location constraint is incompatible for the region specific endpoint this request was sent to. @pytest.mark.fails_on_aws def test_bucket_acl_canned_during_create(): bucket_name = get_new_bucket_name() @@ -4596,10 +3706,6 @@ def test_bucket_acl_canned_during_create(): ], ) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='acl: public-read,private') -@attr(assertion='read back expected values') def test_bucket_acl_canned(): bucket_name = get_new_bucket_name() client = get_client() @@ -4650,10 +3756,6 @@ def test_bucket_acl_canned(): ], ) -@attr(resource='bucket.acls') -@attr(method='put') -@attr(operation='acl: public-read-write') -@attr(assertion='read back expected values') def test_bucket_acl_canned_publicreadwrite(): bucket_name = get_new_bucket_name() client = get_client() @@ -4693,10 +3795,6 @@ def test_bucket_acl_canned_publicreadwrite(): ], ) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='acl: authenticated-read') -@attr(assertion='read back expected values') def test_bucket_acl_canned_authenticatedread(): bucket_name = get_new_bucket_name() client = get_client() @@ -4729,10 +3827,6 @@ def test_bucket_acl_canned_authenticatedread(): ], ) -@attr(resource='object.acls') -@attr(method='get') -@attr(operation='default acl') -@attr(assertion='read back expected defaults') def test_object_acl_default(): bucket_name = get_new_bucket() client = get_client() @@ -4759,10 +3853,6 @@ def test_object_acl_default(): ], ) -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='acl public-read') -@attr(assertion='read back expected values') def test_object_acl_canned_during_create(): bucket_name = get_new_bucket() client = get_client() @@ -4797,10 +3887,6 @@ def test_object_acl_canned_during_create(): ], ) -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='acl public-read,private') -@attr(assertion='read back expected values') def test_object_acl_canned(): bucket_name = get_new_bucket() client = get_client() @@ -4854,10 +3940,6 @@ def test_object_acl_canned(): ], ) -@attr(resource='object') -@attr(method='put') -@attr(operation='acl public-read-write') -@attr(assertion='read back expected values') def test_object_acl_canned_publicreadwrite(): bucket_name = get_new_bucket() client = get_client() @@ -4899,10 +3981,6 @@ def test_object_acl_canned_publicreadwrite(): ], ) -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='acl authenticated-read') -@attr(assertion='read back expected values') def test_object_acl_canned_authenticatedread(): bucket_name = get_new_bucket() client = get_client() @@ -4936,10 +4014,6 @@ def test_object_acl_canned_authenticatedread(): ], ) -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='acl bucket-owner-read') -@attr(assertion='read back expected values') def test_object_acl_canned_bucketownerread(): bucket_name = get_new_bucket_name() main_client = get_client() @@ -4982,10 +4056,6 @@ def test_object_acl_canned_bucketownerread(): ], ) -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='acl bucket-owner-read') -@attr(assertion='read back expected values') def test_object_acl_canned_bucketownerfullcontrol(): bucket_name = get_new_bucket_name() main_client = get_client() @@ -5028,11 +4098,6 @@ def test_object_acl_canned_bucketownerfullcontrol(): ], ) -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='set write-acp') -@attr(assertion='does not modify owner') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_object_acl_full_control_verify_owner(): bucket_name = get_new_bucket_name() @@ -5082,10 +4147,6 @@ def add_obj_user_grant(bucket_name, key, grant): return grant -@attr(resource='object.acls') -@attr(method='put') -@attr(operation='set write-acp') -@attr(assertion='does not modify other attributes') def test_object_acl_full_control_verify_attributes(): bucket_name = get_new_bucket_name() main_client = get_client() @@ -5116,10 +4177,6 @@ def test_object_acl_full_control_verify_attributes(): assert content_type == response['ContentType'] assert etag == response['ETag'] -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl private') -@attr(assertion='a private object can be set to private') def test_bucket_acl_canned_private_to_private(): bucket_name = get_new_bucket() client = get_client() @@ -5188,49 +4245,24 @@ def _check_object_acl(permission): ) -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set acl FULL_CONTRO') -@attr(assertion='reads back correctly') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} @pytest.mark.fails_on_aws def test_object_acl(): _check_object_acl('FULL_CONTROL') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set acl WRITE') -@attr(assertion='reads back correctly') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} @pytest.mark.fails_on_aws def test_object_acl_write(): _check_object_acl('WRITE') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set acl WRITE_ACP') -@attr(assertion='reads back correctly') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} @pytest.mark.fails_on_aws def test_object_acl_writeacp(): _check_object_acl('WRITE_ACP') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set acl READ') -@attr(assertion='reads back correctly') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} @pytest.mark.fails_on_aws def test_object_acl_read(): _check_object_acl('READ') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set acl READ_ACP') -@attr(assertion='reads back correctly') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} @pytest.mark.fails_on_aws def test_object_acl_readacp(): _check_object_acl('READ_ACP') @@ -5340,11 +4372,6 @@ def _check_bucket_acl_grant_cant_writeacp(bucket_name): alt_client = get_alt_client() check_access_denied(alt_client.put_bucket_acl,Bucket=bucket_name, ACL='public-read') -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl w/userid FULL_CONTROL') -@attr(assertion='can read/write data/acls') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} @pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_fullcontrol(): bucket_name = _bucket_acl_grant_userid('FULL_CONTROL') @@ -5370,11 +4397,6 @@ def test_bucket_acl_grant_userid_fullcontrol(): assert owner_id == main_user_id assert owner_display_name == main_display_name -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl w/userid READ') -@attr(assertion='can read data, no other r/w') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_read(): bucket_name = _bucket_acl_grant_userid('READ') @@ -5388,11 +4410,6 @@ def test_bucket_acl_grant_userid_read(): # can't write acl _check_bucket_acl_grant_cant_writeacp(bucket_name) -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl w/userid READ_ACP') -@attr(assertion='can read acl, no other r/w') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_readacp(): bucket_name = _bucket_acl_grant_userid('READ_ACP') @@ -5407,11 +4424,6 @@ def test_bucket_acl_grant_userid_readacp(): #_check_bucket_acl_grant_cant_writeacp_can_readacp(bucket) _check_bucket_acl_grant_cant_writeacp(bucket_name) -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl w/userid WRITE') -@attr(assertion='can write data, no other r/w') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_write(): bucket_name = _bucket_acl_grant_userid('WRITE') @@ -5425,11 +4437,6 @@ def test_bucket_acl_grant_userid_write(): # can't write acl _check_bucket_acl_grant_cant_writeacp(bucket_name) -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl w/userid WRITE_ACP') -@attr(assertion='can write acls, no other r/w') -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_bucket_acl_grant_userid_writeacp(): bucket_name = _bucket_acl_grant_userid('WRITE_ACP') @@ -5443,10 +4450,6 @@ def test_bucket_acl_grant_userid_writeacp(): # can write acl _check_bucket_acl_grant_can_writeacp(bucket_name) -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='set acl w/invalid userid') -@attr(assertion='fails 400') def test_bucket_acl_grant_nonexist_user(): bucket_name = get_new_bucket() client = get_client() @@ -5463,10 +4466,6 @@ def test_bucket_acl_grant_nonexist_user(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='revoke all ACLs') -@attr(assertion='can: read obj, get/set bucket acl, cannot write objs') def test_bucket_acl_no_grants(): bucket_name = get_new_bucket() client = get_client() @@ -5520,13 +4519,7 @@ def _get_acl_header(user_id=None, perms=None): return headers -@attr(resource='object') -@attr(method='PUT') -@attr(operation='add all grants to user through headers') -@attr(assertion='adds all grants individually to second user') -@attr('fails_on_dho') @pytest.mark.fails_on_dho -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_object_header_acl_grants(): bucket_name = get_new_bucket() @@ -5594,13 +4587,7 @@ def test_object_header_acl_grants(): ], ) -@attr(resource='bucket') -@attr(method='PUT') -@attr(operation='add all grants to user through headers') -@attr(assertion='adds all grants individually to second user') -@attr('fails_on_dho') @pytest.mark.fails_on_dho -@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} @pytest.mark.fails_on_aws def test_bucket_header_acl_grants(): headers = _get_acl_header() @@ -5680,11 +4667,6 @@ def test_bucket_header_acl_grants(): # This test will fail on DH Objects. DHO allows multiple users with one account, which # would violate the uniqueness requirement of a user's email. As such, DHO users are # created without an email. -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='add second FULL_CONTROL user') -@attr(assertion='works for S3, fails for DHO') -@attr('fails_on_aws') # AmbiguousGrantByEmailAddressThe e-mail address you provided is associated with more than one account. Please retry your request using a different identification method or after resolving the ambiguity. @pytest.mark.fails_on_aws def test_bucket_acl_grant_email(): bucket_name = get_new_bucket() @@ -5728,10 +4710,6 @@ def test_bucket_acl_grant_email(): ] ) -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='add acl for nonexistent user') -@attr(assertion='fail 400') def test_bucket_acl_grant_email_not_exist(): # behavior not documented by amazon bucket_name = get_new_bucket() @@ -5751,10 +4729,6 @@ def test_bucket_acl_grant_email_not_exist(): assert status == 400 assert error_code == 'UnresolvableGrantByEmailAddress' -@attr(resource='bucket') -@attr(method='ACLs') -@attr(operation='revoke all ACLs') -@attr(assertion='acls read back as empty') def test_bucket_acl_revoke_all(): # revoke all access, including the owner's access bucket_name = get_new_bucket() @@ -5781,11 +4755,6 @@ def test_bucket_acl_revoke_all(): # TODO rgw log_bucket.set_as_logging_target() gives 403 Forbidden # http://tracker.newdream.net/issues/984 -@attr(resource='bucket.log') -@attr(method='put') -@attr(operation='set/enable/disable logging target') -@attr(assertion='operations succeed') -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_logging_toggle(): bucket_name = get_new_bucket() @@ -5844,10 +4813,6 @@ def list_bucket_versions(client, bucket_name): return result -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: private/private') -@attr(assertion='public has no access to bucket or objects') def test_access_bucket_private_object_private(): # all the test_access_* tests follow this template bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='private') @@ -5875,11 +4840,6 @@ def test_access_bucket_private_object_private(): alt_client3 = get_alt_client() check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: private/private with list-objects-v2') -@attr(assertion='public has no access to bucket or objects') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_access_bucket_private_objectv2_private(): # all the test_access_* tests follow this template @@ -5908,10 +4868,6 @@ def test_access_bucket_private_objectv2_private(): alt_client3 = get_alt_client() check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: private/public-read') -@attr(assertion='public can only read readable object') def test_access_bucket_private_object_publicread(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='public-read') @@ -5932,11 +4888,6 @@ def test_access_bucket_private_object_publicread(): check_access_denied(alt_client3.list_objects, Bucket=bucket_name) check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: private/public-read with list-objects-v2') -@attr(assertion='public can only read readable object') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_access_bucket_private_objectv2_publicread(): @@ -5958,10 +4909,6 @@ def test_access_bucket_private_objectv2_publicread(): check_access_denied(alt_client3.list_objects_v2, Bucket=bucket_name) check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: private/public-read/write') -@attr(assertion='public can only read the readable object') def test_access_bucket_private_object_publicreadwrite(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='public-read-write') alt_client = get_alt_client() @@ -5982,11 +4929,6 @@ def test_access_bucket_private_object_publicreadwrite(): check_access_denied(alt_client3.list_objects, Bucket=bucket_name) check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: private/public-read/write with list-objects-v2') -@attr(assertion='public can only read the readable object') -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_access_bucket_private_objectv2_publicreadwrite(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='private', object_acl='public-read-write') @@ -6008,10 +4950,6 @@ def test_access_bucket_private_objectv2_publicreadwrite(): check_access_denied(alt_client3.list_objects_v2, Bucket=bucket_name) check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: public-read/private') -@attr(assertion='public can only list the bucket') def test_access_bucket_publicread_object_private(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='public-read', object_acl='private') alt_client = get_alt_client() @@ -6031,10 +4969,6 @@ def test_access_bucket_publicread_object_private(): assert objs == ['bar', 'foo'] check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: public-read/public-read') -@attr(assertion='public can read readable objects and list bucket') def test_access_bucket_publicread_object_publicread(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='public-read', object_acl='public-read') alt_client = get_alt_client() @@ -6059,10 +4993,6 @@ def test_access_bucket_publicread_object_publicread(): check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: public-read/public-read-write') -@attr(assertion='public can read readable objects and list bucket') def test_access_bucket_publicread_object_publicreadwrite(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='public-read', object_acl='public-read-write') alt_client = get_alt_client() @@ -6089,10 +5019,6 @@ def test_access_bucket_publicread_object_publicreadwrite(): check_access_denied(alt_client3.put_object, Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: public-read-write/private') -@attr(assertion='private objects cannot be read, but can be overwritten') def test_access_bucket_publicreadwrite_object_private(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='public-read-write', object_acl='private') alt_client = get_alt_client() @@ -6108,10 +5034,6 @@ def test_access_bucket_publicreadwrite_object_private(): assert objs == ['bar', 'foo'] alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: public-read-write/public-read') -@attr(assertion='private objects cannot be read, but can be overwritten') def test_access_bucket_publicreadwrite_object_publicread(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='public-read-write', object_acl='public-read') alt_client = get_alt_client() @@ -6130,10 +5052,6 @@ def test_access_bucket_publicreadwrite_object_publicread(): assert objs == ['bar', 'foo'] alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='object') -@attr(method='ACLs') -@attr(operation='set bucket/object acls: public-read-write/public-read-write') -@attr(assertion='private objects cannot be read, but can be overwritten') def test_access_bucket_publicreadwrite_object_publicreadwrite(): bucket_name, key1, key2, newkey = _setup_access(bucket_acl='public-read-write', object_acl='public-read-write') alt_client = get_alt_client() @@ -6149,10 +5067,6 @@ def test_access_bucket_publicreadwrite_object_publicreadwrite(): assert objs == ['bar', 'foo'] alt_client.put_object(Bucket=bucket_name, Key=newkey, Body='newcontent') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all buckets') -@attr(assertion='returns all expected buckets') def test_buckets_create_then_list(): client = get_client() bucket_names = [] @@ -6173,10 +5087,6 @@ def test_buckets_create_then_list(): if name not in buckets_list: raise RuntimeError("S3 implementation's GET on Service did not return bucket we created: %r", bucket.name) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all buckets') -@attr(assertion='all buckets have a sane creation time') def test_buckets_list_ctime(): # check that creation times are within a day before = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=1) @@ -6190,11 +5100,6 @@ def test_buckets_list_ctime(): ctime = bucket['CreationDate'] assert before <= ctime, '%r > %r' % (before, ctime) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all buckets (anonymous)') -@attr(assertion='succeeds') -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_list_buckets_anonymous(): # Get a connection with bad authorization, then change it to be our new Anonymous auth mechanism, @@ -6206,10 +5111,6 @@ def test_list_buckets_anonymous(): response = unauthenticated_client.list_buckets() assert len(response['Buckets']) == 0 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all buckets (bad auth)') -@attr(assertion='fails 403') def test_list_buckets_invalid_auth(): bad_auth_client = get_bad_auth_client() e = assert_raises(ClientError, bad_auth_client.list_buckets) @@ -6217,10 +5118,6 @@ def test_list_buckets_invalid_auth(): assert status == 403 assert error_code == 'InvalidAccessKeyId' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list all buckets (bad auth)') -@attr(assertion='fails 403') def test_list_buckets_bad_auth(): main_access_key = get_main_aws_access_key() bad_auth_client = get_bad_auth_client(aws_access_key_id=main_access_key) @@ -6235,10 +5132,6 @@ def override_prefix_a(): yield nuke_prefixed_buckets(prefix='a'+get_prefix()) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create bucket') -@attr(assertion='name starts with alphabetic works') # this test goes outside the user-configure prefix because it needs to # control the initial character of the bucket name def test_bucket_create_naming_good_starts_alpha(override_prefix_a): @@ -6250,33 +5143,17 @@ def override_prefix_0(): yield nuke_prefixed_buckets(prefix='0'+get_prefix()) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create bucket') -@attr(assertion='name starts with numeric works') # this test goes outside the user-configure prefix because it needs to # control the initial character of the bucket name def test_bucket_create_naming_good_starts_digit(override_prefix_0): check_good_bucket_name('foo', _prefix='0'+get_prefix()) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create bucket') -@attr(assertion='name containing dot works') def test_bucket_create_naming_good_contains_period(): check_good_bucket_name('aaa.111') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create bucket') -@attr(assertion='name containing hyphen works') def test_bucket_create_naming_good_contains_hyphen(): check_good_bucket_name('aaa-111') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='create bucket with objects and recreate it') -@attr(assertion='bucket recreation not overriding index') def test_bucket_recreate_not_overriding(): key_names = ['mykey1', 'mykey2'] bucket_name = _create_objects(keys=key_names) @@ -6290,11 +5167,6 @@ def test_bucket_recreate_not_overriding(): objs_list = get_objects_list(bucket_name) assert key_names == objs_list -@attr(resource='object') -@attr(method='put') -@attr(operation='create and list objects with special names') -@attr(assertion='special names work') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_create_special_key_names(): key_names = [ @@ -6326,10 +5198,6 @@ def test_bucket_create_special_key_names(): assert name == body client.put_object_acl(Bucket=bucket_name, Key=name, ACL='private') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='create and list objects with underscore as prefix, list using prefix') -@attr(assertion='listing works correctly') def test_bucket_list_special_prefix(): key_names = ['_bla/1', '_bla/2', '_bla/3', '_bla/4', 'abcd'] bucket_name = _create_objects(keys=key_names) @@ -6341,11 +5209,6 @@ def test_bucket_list_special_prefix(): objs_list = get_objects_list(bucket_name, prefix='_bla/') assert len(objs_list) == 4 -@attr(resource='object') -@attr(method='put') -@attr(operation='copy zero sized object in same bucket') -@attr(assertion='works') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_zero_size(): key = 'foo123bar' @@ -6360,11 +5223,6 @@ def test_object_copy_zero_size(): response = client.get_object(Bucket=bucket_name, Key='bar321foo') assert response['ContentLength'] == 0 -@attr(resource='object') -@attr(method='put') -@attr(operation='copy 16mb object in same bucket') -@attr(assertion='works') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_16m(): bucket_name = get_new_bucket() @@ -6378,11 +5236,6 @@ def test_object_copy_16m(): response = client.get_object(Bucket=bucket_name, Key=key2) assert response['ContentLength'] == 16*1024*1024 -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object in same bucket') -@attr(assertion='works') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_same_bucket(): bucket_name = get_new_bucket() @@ -6397,11 +5250,6 @@ def test_object_copy_same_bucket(): body = _get_body(response) assert 'foo' == body -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object with content-type') -@attr(assertion='works') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_verify_contenttype(): bucket_name = get_new_bucket() @@ -6420,10 +5268,6 @@ def test_object_copy_verify_contenttype(): response_content_type = response['ContentType'] assert response_content_type == content_type -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object to itself') -@attr(assertion='fails') def test_object_copy_to_itself(): bucket_name = get_new_bucket() client = get_client() @@ -6436,11 +5280,6 @@ def test_object_copy_to_itself(): assert status == 400 assert error_code == 'InvalidRequest' -@attr(resource='object') -@attr(method='put') -@attr(operation='modify object metadata by copying') -@attr(assertion='fails') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_to_itself_with_metadata(): bucket_name = get_new_bucket() @@ -6453,11 +5292,6 @@ def test_object_copy_to_itself_with_metadata(): response = client.get_object(Bucket=bucket_name, Key='foo123bar') assert response['Metadata'] == metadata -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object from different bucket') -@attr(assertion='works') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_diff_bucket(): bucket_name1 = get_new_bucket() @@ -6474,10 +5308,6 @@ def test_object_copy_diff_bucket(): body = _get_body(response) assert 'foo' == body -@attr(resource='object') -@attr(method='put') -@attr(operation='copy to an inaccessible bucket') -@attr(assertion='fails w/AttributeError') def test_object_copy_not_owned_bucket(): client = get_client() alt_client = get_alt_client() @@ -6494,10 +5324,6 @@ def test_object_copy_not_owned_bucket(): status, error_code = _get_status_and_error_code(e.response) assert status == 403 -@attr(resource='object') -@attr(method='put') -@attr(operation='copy a non-owned object in a non-owned bucket, but with perms') -@attr(assertion='works') def test_object_copy_not_owned_object_bucket(): client = get_client() alt_client = get_alt_client() @@ -6519,11 +5345,6 @@ def test_object_copy_not_owned_object_bucket(): copy_source = {'Bucket': bucket_name, 'Key': 'foo123bar'} alt_client.copy(copy_source, bucket_name, 'bar321foo') -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object and change acl') -@attr(assertion='works') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_canned_acl(): bucket_name = get_new_bucket() @@ -6544,10 +5365,6 @@ def test_object_copy_canned_acl(): # check ACL is applied by doing GET from another user alt_client.get_object(Bucket=bucket_name, Key='foo123bar') -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object and retain metadata') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_retaining_metadata(): for size in [3, 1024 * 1024]: @@ -6567,10 +5384,6 @@ def test_object_copy_retaining_metadata(): body = _get_body(response) assert size == response['ContentLength'] -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object and replace metadata') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_replacing_metadata(): for size in [3, 1024 * 1024]: @@ -6592,9 +5405,6 @@ def test_object_copy_replacing_metadata(): assert metadata == response['Metadata'] assert size == response['ContentLength'] -@attr(resource='object') -@attr(method='put') -@attr(operation='copy from non-existent bucket') def test_object_copy_bucket_not_found(): bucket_name = get_new_bucket() client = get_client() @@ -6604,9 +5414,6 @@ def test_object_copy_bucket_not_found(): status = _get_status(e.response) assert status == 404 -@attr(resource='object') -@attr(method='put') -@attr(operation='copy from non-existent object') def test_object_copy_key_not_found(): bucket_name = get_new_bucket() client = get_client() @@ -6616,12 +5423,6 @@ def test_object_copy_key_not_found(): status = _get_status(e.response) assert status == 404 -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object to/from versioned bucket') -@attr(assertion='works') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_versioned_bucket(): bucket_name = get_new_bucket() @@ -6686,12 +5487,6 @@ def test_object_copy_versioned_bucket(): assert data_str == body assert size == response['ContentLength'] -@attr(resource='object') -@attr(method='put') -@attr(operation='copy object to/from versioned bucket with url-encoded name') -@attr(assertion='works') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_versioned_url_encoding(): bucket = get_new_bucket_resource() @@ -6755,12 +5550,6 @@ def _multipart_upload(bucket_name, key, size, part_size=5*1024*1024, client=None return (upload_id, s, parts) -@attr(resource='object') -@attr(method='put') -@attr(operation='test copy object of a multipart upload') -@attr(assertion='successful') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_copy_versioning_multipart_upload(): bucket_name = get_new_bucket() @@ -6838,9 +5627,6 @@ def test_object_copy_versioning_multipart_upload(): assert key1_metadata == response['Metadata'] assert content_type == response['ContentType'] -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart upload without parts') def test_multipart_upload_empty(): bucket_name = get_new_bucket() client = get_client() @@ -6853,10 +5639,6 @@ def test_multipart_upload_empty(): assert status == 400 assert error_code == 'MalformedXML' -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart uploads with single small part') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_upload_small(): bucket_name = get_new_bucket() @@ -6932,10 +5714,6 @@ def _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name, ver src_data = _get_body(response) assert src_data == dest_data -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart copies with single small part') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_copy_small(): src_key = 'foo' @@ -6953,9 +5731,6 @@ def test_multipart_copy_small(): assert size == response['ContentLength'] _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name) -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart copies with an invalid range') def test_multipart_copy_invalid_range(): client = get_client() src_key = 'source' @@ -6975,11 +5750,7 @@ def test_multipart_copy_invalid_range(): assert error_code == 'InvalidRange' -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart copy with an improperly formatted range') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/40795 is resolved -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_multipart_copy_improper_range(): client = get_client() @@ -7010,9 +5781,6 @@ def test_multipart_copy_improper_range(): assert error_code == 'InvalidArgument' -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart copies without x-amz-copy-source-range') def test_multipart_copy_without_range(): client = get_client() src_key = 'source' @@ -7038,10 +5806,6 @@ def test_multipart_copy_without_range(): assert response['ContentLength'] == 10 _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name) -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart copies with single small part') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_copy_special_names(): src_bucket_name = get_new_bucket() @@ -7076,13 +5840,7 @@ def _check_content_using_range(key, bucket_name, data, step): body = _get_body(response) assert body == data[ofs:end+1] -@attr(resource='object') -@attr(method='put') -@attr(operation='complete multi-part upload') -@attr(assertion='successful') -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_upload(): bucket_name = get_new_bucket() @@ -7144,11 +5902,6 @@ def check_configure_versioning_retry(bucket_name, status, expected_string): assert expected_string == read_status -@attr(resource='object') -@attr(method='put') -@attr(operation='check multipart copies of versioned objects') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_copy_versioned(): src_bucket_name = get_new_bucket() @@ -7195,14 +5948,6 @@ def _check_upload_multipart_resend(bucket_name, key, objlen, resend_parts): _check_content_using_range(key, bucket_name, data, 1000000) _check_content_using_range(key, bucket_name, data, 10000000) -@attr(resource='object') -@attr(method='put') -@attr(operation='complete multiple multi-part upload with different sizes') -@attr(resource='object') -@attr(method='put') -@attr(operation='complete multi-part upload') -@attr(assertion='successful') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_upload_resend_part(): bucket_name = get_new_bucket() @@ -7215,7 +5960,6 @@ def test_multipart_upload_resend_part(): _check_upload_multipart_resend(bucket_name, key, objlen, [1,2]) _check_upload_multipart_resend(bucket_name, key, objlen, [0,1,2,3,4,5]) -@attr(assertion='successful') def test_multipart_upload_multiple_sizes(): bucket_name = get_new_bucket() key="mymultipart" @@ -7245,8 +5989,6 @@ def test_multipart_upload_multiple_sizes(): (upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen) client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) -@attr(assertion='successful') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_copy_multiple_sizes(): src_key = 'foo' @@ -7286,10 +6028,6 @@ def test_multipart_copy_multiple_sizes(): client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts}) _check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name) -@attr(resource='object') -@attr(method='put') -@attr(operation='check failure on multiple multi-part upload with size too small') -@attr(assertion='fails 400') def test_multipart_upload_size_too_small(): bucket_name = get_new_bucket() key="mymultipart" @@ -7335,20 +6073,11 @@ def _do_test_multipart_upload_contents(bucket_name, key, num_parts): return all_payload -@attr(resource='object') -@attr(method='put') -@attr(operation='check contents of multi-part upload') -@attr(assertion='successful') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_multipart_upload_contents(): bucket_name = get_new_bucket() _do_test_multipart_upload_contents(bucket_name, 'mymultipart', 3) -@attr(resource='object') -@attr(method='put') -@attr(operation=' multi-part upload overwrites existing key') -@attr(assertion='successful') def test_multipart_upload_overwrite_existing_object(): bucket_name = get_new_bucket() client = get_client() @@ -7374,10 +6103,6 @@ def test_multipart_upload_overwrite_existing_object(): assert test_string == payload*num_parts -@attr(resource='object') -@attr(method='put') -@attr(operation='abort multi-part upload') -@attr(assertion='successful') def test_abort_multipart_upload(): bucket_name = get_new_bucket() key="mymultipart" @@ -7394,10 +6119,6 @@ def test_abort_multipart_upload(): rgw_object_count = int(response['ResponseMetadata']['HTTPHeaders'].get('x-rgw-object-count', 0)) assert rgw_object_count == 0 -@attr(resource='object') -@attr(method='put') -@attr(operation='abort non-existent multi-part upload') -@attr(assertion='fails 404') def test_abort_multipart_upload_not_found(): bucket_name = get_new_bucket() client = get_client() @@ -7409,11 +6130,6 @@ def test_abort_multipart_upload_not_found(): assert status == 404 assert error_code == 'NoSuchUpload' -@attr(resource='object') -@attr(method='put') -@attr(operation='concurrent multi-part uploads') -@attr(assertion='successful') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_list_multipart_upload(): bucket_name = get_new_bucket() @@ -7445,11 +6161,6 @@ def test_list_multipart_upload(): client.abort_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id2) client.abort_multipart_upload(Bucket=bucket_name, Key=key2, UploadId=upload_id3) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='list multipart uploads with different owners') -@attr(assertion='successful') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_list_multipart_upload_owner(): bucket_name = get_new_bucket() @@ -7496,9 +6207,6 @@ def test_list_multipart_upload_owner(): finally: client1.abort_multipart_upload(Bucket=bucket_name, Key=key1, UploadId=upload1) -@attr(resource='object') -@attr(method='put') -@attr(operation='multi-part upload with missing part') def test_multipart_upload_missing_part(): bucket_name = get_new_bucket() client = get_client() @@ -7518,9 +6226,6 @@ def test_multipart_upload_missing_part(): assert status == 400 assert error_code == 'InvalidPart' -@attr(resource='object') -@attr(method='put') -@attr(operation='multi-part upload with incorrect ETag') def test_multipart_upload_incorrect_etag(): bucket_name = get_new_bucket() client = get_client() @@ -7574,12 +6279,6 @@ def _simple_http_req_100_cont(host, port, is_secure, method, resource): return l[1] -@attr(resource='object') -@attr(method='put') -@attr(operation='w/expect continue') -@attr(assertion='succeeds if object is public-read-write') -@attr('100_continue') -@attr('fails_on_mod_proxy_fcgi') def test_100_continue(): bucket_name = get_new_bucket_name() client = get_client() @@ -7600,11 +6299,6 @@ def test_100_continue(): status = _simple_http_req_100_cont(host, port, is_secure, 'PUT', resource) assert status == '100' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set cors') -@attr(assertion='succeeds') -@attr('cors') def test_set_cors(): bucket_name = get_new_bucket() client = get_client() @@ -7640,11 +6334,6 @@ def _cors_request_and_check(func, url, headers, expect_status, expect_allow_orig assert r.headers.get('access-control-allow-origin', None) == expect_allow_origin assert r.headers.get('access-control-allow-methods', None) == expect_allow_methods -@attr(resource='bucket') -@attr(method='get') -@attr(operation='check cors response when origin header set') -@attr(assertion='returning cors header') -@attr('cors') def test_cors_origin_response(): bucket_name = _setup_bucket_acl(bucket_acl='public-read') client = get_client() @@ -7720,11 +6409,6 @@ def test_cors_origin_response(): _cors_request_and_check(requests.options, url, {'Origin': 'foo.put', 'Access-Control-Request-Method': 'GET'}, 403, None, None) _cors_request_and_check(requests.options, url, {'Origin': 'foo.put', 'Access-Control-Request-Method': 'PUT'}, 200, 'foo.put', 'PUT') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='check cors response when origin is set to wildcard') -@attr(assertion='returning cors header') -@attr('cors') def test_cors_origin_wildcard(): bucket_name = _setup_bucket_acl(bucket_acl='public-read') client = get_client() @@ -7750,11 +6434,6 @@ def test_cors_origin_wildcard(): _cors_request_and_check(requests.get, url, None, 200, None, None) _cors_request_and_check(requests.get, url, {'Origin': 'example.origin'}, 200, '*', 'GET') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='check cors response when Access-Control-Request-Headers is set in option request') -@attr(assertion='returning cors header') -@attr('cors') def test_cors_header_option(): bucket_name = _setup_bucket_acl(bucket_acl='public-read') client = get_client() @@ -7781,11 +6460,6 @@ def test_cors_header_option(): _cors_request_and_check(requests.options, obj_url, {'Origin': 'example.origin','Access-Control-Request-Headers':'x-amz-meta-header2','Access-Control-Request-Method':'GET'}, 403, None, None) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='put tags') -@attr(assertion='succeeds') -@attr('tagging') @pytest.mark.tagging def test_set_bucket_tagging(): bucket_name = get_new_bucket() @@ -7935,24 +6609,12 @@ def _test_atomic_read(file_size): _verify_atomic_key_data(bucket_name, 'testobj', file_size, 'B') -@attr(resource='object') -@attr(method='put') -@attr(operation='read atomicity') -@attr(assertion='1MB successful') def test_atomic_read_1mb(): _test_atomic_read(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='read atomicity') -@attr(assertion='4MB successful') def test_atomic_read_4mb(): _test_atomic_read(1024*1024*4) -@attr(resource='object') -@attr(method='put') -@attr(operation='read atomicity') -@attr(assertion='8MB successful') def test_atomic_read_8mb(): _test_atomic_read(1024*1024*8) @@ -7988,24 +6650,12 @@ def _test_atomic_write(file_size): # verify B's _verify_atomic_key_data(bucket_name, objname, file_size, 'B') -@attr(resource='object') -@attr(method='put') -@attr(operation='write atomicity') -@attr(assertion='1MB successful') def test_atomic_write_1mb(): _test_atomic_write(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='write atomicity') -@attr(assertion='4MB successful') def test_atomic_write_4mb(): _test_atomic_write(1024*1024*4) -@attr(resource='object') -@attr(method='put') -@attr(operation='write atomicity') -@attr(assertion='8MB successful') def test_atomic_write_8mb(): _test_atomic_write(1024*1024*8) @@ -8033,24 +6683,12 @@ def _test_atomic_dual_write(file_size): # verify the file _verify_atomic_key_data(bucket_name, objname, file_size, 'B') -@attr(resource='object') -@attr(method='put') -@attr(operation='write one or the other') -@attr(assertion='1MB successful') def test_atomic_dual_write_1mb(): _test_atomic_dual_write(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='write one or the other') -@attr(assertion='4MB successful') def test_atomic_dual_write_4mb(): _test_atomic_dual_write(1024*1024*4) -@attr(resource='object') -@attr(method='put') -@attr(operation='write one or the other') -@attr(assertion='8MB successful') def test_atomic_dual_write_8mb(): _test_atomic_dual_write(1024*1024*8) @@ -8083,11 +6721,6 @@ def _test_atomic_conditional_write(file_size): # verify B's _verify_atomic_key_data(bucket_name, objname, file_size, 'B') -@attr(resource='object') -@attr(method='put') -@attr(operation='write atomicity') -@attr(assertion='1MB successful') -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_atomic_conditional_write_1mb(): _test_atomic_conditional_write(1024*1024) @@ -8125,26 +6758,14 @@ def _test_atomic_dual_conditional_write(file_size): # verify the file _verify_atomic_key_data(bucket_name, objname, file_size, 'B') -@attr(resource='object') -@attr(method='put') -@attr(operation='write one or the other') -@attr(assertion='1MB successful') -@attr('fails_on_aws') @pytest.mark.fails_on_aws # TODO: test not passing with SSL, fix this -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_atomic_dual_conditional_write_1mb(): _test_atomic_dual_conditional_write(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='write file in deleted bucket') -@attr(assertion='fail 404') -@attr('fails_on_aws') @pytest.mark.fails_on_aws # TODO: test not passing with SSL, fix this -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_atomic_write_bucket_gone(): bucket_name = get_new_bucket() @@ -8161,10 +6782,6 @@ def test_atomic_write_bucket_gone(): assert status == 404 assert error_code == 'NoSuchBucket' -@attr(resource='object') -@attr(method='put') -@attr(operation='begin to overwrite file with multipart upload then abort') -@attr(assertion='read back original key contents') def test_atomic_multipart_upload_write(): bucket_name = get_new_bucket() client = get_client() @@ -8203,10 +6820,6 @@ class ActionOnCount: if self.count == self.trigger_count: self.result = self.action() -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart check for two writes of the same part, first write finishes last') -@attr(assertion='object contains correct content') def test_multipart_resend_first_finishes_last(): bucket_name = get_new_bucket() client = get_client() @@ -8263,11 +6876,6 @@ def test_multipart_resend_first_finishes_last(): _verify_atomic_key_data(bucket_name, key_name, file_size, 'A') -@attr(resource='object') -@attr(method='get') -@attr(operation='range') -@attr(assertion='returns correct data, 206') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_ranged_request_response_code(): content = 'testcontent' @@ -8286,11 +6894,6 @@ def test_ranged_request_response_code(): def _generate_random_string(size): return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(size)) -@attr(resource='object') -@attr(method='get') -@attr(operation='range') -@attr(assertion='returns correct data, 206') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_ranged_big_request_response_code(): content = _generate_random_string(8*1024*1024) @@ -8306,11 +6909,6 @@ def test_ranged_big_request_response_code(): assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 3145728-5242880/8388608' assert response['ResponseMetadata']['HTTPStatusCode'] == 206 -@attr(resource='object') -@attr(method='get') -@attr(operation='range') -@attr(assertion='returns correct data, 206') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_ranged_request_skip_leading_bytes_response_code(): content = 'testcontent' @@ -8326,11 +6924,6 @@ def test_ranged_request_skip_leading_bytes_response_code(): assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-10/11' assert response['ResponseMetadata']['HTTPStatusCode'] == 206 -@attr(resource='object') -@attr(method='get') -@attr(operation='range') -@attr(assertion='returns correct data, 206') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_ranged_request_return_trailing_bytes_response_code(): content = 'testcontent' @@ -8346,10 +6939,6 @@ def test_ranged_request_return_trailing_bytes_response_code(): assert response['ResponseMetadata']['HTTPHeaders']['content-range'] == 'bytes 4-10/11' assert response['ResponseMetadata']['HTTPStatusCode'] == 206 -@attr(resource='object') -@attr(method='get') -@attr(operation='range') -@attr(assertion='returns invalid range, 416') def test_ranged_request_invalid_range(): content = 'testcontent' @@ -8364,10 +6953,6 @@ def test_ranged_request_invalid_range(): assert status == 416 assert error_code == 'InvalidRange' -@attr(resource='object') -@attr(method='get') -@attr(operation='range') -@attr(assertion='returns invalid range, 416') def test_ranged_request_empty_object(): content = '' @@ -8382,10 +6967,6 @@ def test_ranged_request_empty_object(): assert status == 416 assert error_code == 'InvalidRange' -@attr(resource='bucket') -@attr(method='create') -@attr(operation='create versioned bucket') -@attr(assertion='can create and suspend bucket versioning') def test_versioning_bucket_create_suspend(): bucket_name = get_new_bucket() check_versioning(bucket_name, None) @@ -8469,11 +7050,6 @@ def _do_test_create_remove_versions(client, bucket_name, key, num_versions, remo print(response['Versions']) -@attr(resource='object') -@attr(method='create') -@attr(operation='create and remove versioned object') -@attr(assertion='can create access and remove appropriate versions') -@attr('versioning') def test_versioning_obj_create_read_remove(): bucket_name = get_new_bucket() client = get_client() @@ -8488,11 +7064,6 @@ def test_versioning_obj_create_read_remove(): _do_test_create_remove_versions(client, bucket_name, key, num_versions, 4, -1) _do_test_create_remove_versions(client, bucket_name, key, num_versions, 3, 3) -@attr(resource='object') -@attr(method='create') -@attr(operation='create and remove versioned object and head') -@attr(assertion='can create access and remove appropriate versions') -@attr('versioning') def test_versioning_obj_create_read_remove_head(): bucket_name = get_new_bucket() @@ -8527,11 +7098,6 @@ def test_versioning_obj_create_read_remove_head(): clean_up_bucket(client, bucket_name, key, version_ids) -@attr(resource='object') -@attr(method='create') -@attr(operation='create object, then switch to versioning') -@attr(assertion='behaves correctly') -@attr('versioning') def test_versioning_obj_plain_null_version_removal(): bucket_name = get_new_bucket() check_versioning(bucket_name, None) @@ -8552,11 +7118,6 @@ def test_versioning_obj_plain_null_version_removal(): response = client.list_object_versions(Bucket=bucket_name) assert not 'Versions' in response -@attr(resource='object') -@attr(method='create') -@attr(operation='create object, then switch to versioning') -@attr(assertion='behaves correctly') -@attr('versioning') def test_versioning_obj_plain_null_version_overwrite(): bucket_name = get_new_bucket() check_versioning(bucket_name, None) @@ -8590,11 +7151,6 @@ def test_versioning_obj_plain_null_version_overwrite(): response = client.list_object_versions(Bucket=bucket_name) assert not 'Versions' in response -@attr(resource='object') -@attr(method='create') -@attr(operation='create object, then switch to versioning') -@attr(assertion='behaves correctly') -@attr('versioning') def test_versioning_obj_plain_null_version_overwrite_suspended(): bucket_name = get_new_bucket() check_versioning(bucket_name, None) @@ -8660,11 +7216,6 @@ def overwrite_suspended_versioning_obj(client, bucket_name, key, version_ids, co return (version_ids, contents) -@attr(resource='object') -@attr(method='create') -@attr(operation='suspend versioned bucket') -@attr(assertion='suspended versioning behaves correctly') -@attr('versioning') def test_versioning_obj_suspend_versions(): bucket_name = get_new_bucket() client = get_client() @@ -8697,11 +7248,6 @@ def test_versioning_obj_suspend_versions(): assert len(version_ids) == 0 assert len(version_ids) == len(contents) -@attr(resource='object') -@attr(method='remove') -@attr(operation='create and remove versions') -@attr(assertion='everything works') -@attr('versioning') def test_versioning_obj_create_versions_remove_all(): bucket_name = get_new_bucket() client = get_client() @@ -8718,11 +7264,6 @@ def test_versioning_obj_create_versions_remove_all(): assert len(version_ids) == 0 assert len(version_ids) == len(contents) -@attr(resource='object') -@attr(method='remove') -@attr(operation='create and remove versions') -@attr(assertion='everything works') -@attr('versioning') def test_versioning_obj_create_versions_remove_special_names(): bucket_name = get_new_bucket() client = get_client() @@ -8740,12 +7281,6 @@ def test_versioning_obj_create_versions_remove_special_names(): assert len(version_ids) == 0 assert len(version_ids) == len(contents) -@attr(resource='object') -@attr(method='multipart') -@attr(operation='create and test multipart object') -@attr(assertion='everything works') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_versioning_obj_create_overwrite_multipart(): bucket_name = get_new_bucket() @@ -8775,11 +7310,6 @@ def test_versioning_obj_create_overwrite_multipart(): assert len(version_ids) == 0 assert len(version_ids) == len(contents) -@attr(resource='object') -@attr(method='multipart') -@attr(operation='list versioned objects') -@attr(assertion='everything works') -@attr('versioning') def test_versioning_obj_list_marker(): bucket_name = get_new_bucket() client = get_client() @@ -8835,12 +7365,6 @@ def test_versioning_obj_list_marker(): check_obj_content(client, bucket_name, key, version['VersionId'], contents[j]) i += 1 -@attr(resource='object') -@attr(method='multipart') -@attr(operation='create and test versioned object copying') -@attr(assertion='everything works') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_versioning_copy_obj_version(): bucket_name = get_new_bucket() @@ -8879,11 +7403,6 @@ def test_versioning_copy_obj_version(): body = _get_body(response) assert body == contents[-1] -@attr(resource='object') -@attr(method='delete') -@attr(operation='delete multiple versions') -@attr(assertion='deletes multiple versions of an object with a single call') -@attr('versioning') def test_versioning_multi_object_delete(): bucket_name = get_new_bucket() client = get_client() @@ -8912,11 +7431,6 @@ def test_versioning_multi_object_delete(): response = client.list_object_versions(Bucket=bucket_name) assert not 'Versions' in response -@attr(resource='object') -@attr(method='delete') -@attr(operation='delete multiple versions') -@attr(assertion='deletes multiple versions of an object and delete marker with a single call') -@attr('versioning') def test_versioning_multi_object_delete_with_marker(): bucket_name = get_new_bucket() client = get_client() @@ -8958,12 +7472,6 @@ def test_versioning_multi_object_delete_with_marker(): assert not 'Versions' in response assert not 'DeleteMarkers' in response -@attr(resource='object') -@attr(method='delete') -@attr(operation='multi delete create marker') -@attr(assertion='returns correct marker version id') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_versioning_multi_object_delete_with_marker_create(): bucket_name = get_new_bucket() @@ -8983,11 +7491,6 @@ def test_versioning_multi_object_delete_with_marker_create(): assert delete_marker_version_id == delete_markers[0]['VersionId'] assert key == delete_markers[0]['Key'] -@attr(resource='object') -@attr(method='put') -@attr(operation='change acl on an object version changes specific version') -@attr(assertion='works') -@attr('versioning') def test_versioned_object_acl(): bucket_name = get_new_bucket() client = get_client() @@ -9055,12 +7558,6 @@ def test_versioned_object_acl(): grants = response['Grants'] check_grants(grants, default_policy) -@attr(resource='object') -@attr(method='put') -@attr(operation='change acl on an object with no version specified changes latest version') -@attr(assertion='works') -@attr('versioning') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_versioned_object_acl_no_version_specified(): bucket_name = get_new_bucket() @@ -9148,14 +7645,8 @@ def _do_clear_versioned_bucket_concurrent(client, bucket_name): t.append(thr) return t -@attr(resource='object') -@attr(method='put') -@attr(operation='concurrent creation of objects, concurrent removal') -@attr(assertion='works') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/39142 is resolved -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw -@attr('versioning') def test_versioned_concurrent_object_create_concurrent_remove(): bucket_name = get_new_bucket() client = get_client() @@ -9180,11 +7671,6 @@ def test_versioned_concurrent_object_create_concurrent_remove(): response = client.list_object_versions(Bucket=bucket_name) assert not 'Versions' in response -@attr(resource='object') -@attr(method='put') -@attr(operation='concurrent creation and removal of objects') -@attr(assertion='works') -@attr('versioning') def test_versioned_concurrent_object_create_and_remove(): bucket_name = get_new_bucket() client = get_client() @@ -9213,10 +7699,6 @@ def test_versioned_concurrent_object_create_and_remove(): response = client.list_object_versions(Bucket=bucket_name) assert not 'Versions' in response -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set(): bucket_name = get_new_bucket() @@ -9227,10 +7709,6 @@ def test_lifecycle_set(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get lifecycle config') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_get(): bucket_name = get_new_bucket() @@ -9242,10 +7720,6 @@ def test_lifecycle_get(): response = client.get_bucket_lifecycle_configuration(Bucket=bucket_name) assert response['Rules'] == rules -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get lifecycle config no id') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_get_no_id(): bucket_name = get_new_bucket() @@ -9277,16 +7751,9 @@ def test_lifecycle_get_no_id(): assert False # The test harness for lifecycle is configured to treat days as 10 second intervals. -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration(): bucket_name = _create_objects(keys=['expire1/foo', 'expire1/bar', 'keep2/foo', @@ -9318,18 +7785,10 @@ def test_lifecycle_expiration(): assert len(keep2_objects) == 4 assert len(expire3_objects) == 2 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration with list-objects-v2') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('list-objects-v2') @pytest.mark.list_objects_v2 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecyclev2_expiration(): bucket_name = _create_objects(keys=['expire1/foo', 'expire1/bar', 'keep2/foo', @@ -9361,14 +7820,8 @@ def test_lifecyclev2_expiration(): assert len(keep2_objects) == 4 assert len(expire3_objects) == 2 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration on versioning enabled bucket') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_lifecycle_expiration_versioning_enabled(): bucket_name = get_new_bucket() @@ -9391,14 +7844,8 @@ def test_lifecycle_expiration_versioning_enabled(): assert len(versions) == 1 assert len(delete_markers) == 1 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration with 1 tag') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_lifecycle_expiration_tags1(): bucket_name = get_new_bucket() @@ -9505,16 +7952,9 @@ def setup_lifecycle_tags2(client, bucket_name): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 return response -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration with 2 tags') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration_tags2(): bucket_name = get_new_bucket() @@ -9530,16 +7970,9 @@ def test_lifecycle_expiration_tags2(): assert len(expire1_objects) == 1 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration with versioning and 2 tags') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration_versioned_tags2(): bucket_name = get_new_bucket() @@ -9607,16 +8040,9 @@ def verify_lifecycle_expiration_noncur_tags(client, bucket_name, secs): objs_list = [] return len(objs_list) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle noncurrent expiration with 1 tag filter') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration_noncur_tags1(): bucket_name = get_new_bucket() @@ -9642,12 +8068,7 @@ def test_lifecycle_expiration_noncur_tags1(): # at T+60, only the current object version should exist assert num_objs == 1 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='id too long in lifecycle rule') -@attr('lifecycle') @pytest.mark.lifecycle -@attr(assertion='fails 400') def test_lifecycle_id_too_long(): bucket_name = get_new_bucket() client = get_client() @@ -9659,12 +8080,7 @@ def test_lifecycle_id_too_long(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='same id') -@attr('lifecycle') @pytest.mark.lifecycle -@attr(assertion='fails 400') def test_lifecycle_same_id(): bucket_name = get_new_bucket() client = get_client() @@ -9677,12 +8093,7 @@ def test_lifecycle_same_id(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='invalid status in lifecycle rule') -@attr('lifecycle') @pytest.mark.lifecycle -@attr(assertion='fails 400') def test_lifecycle_invalid_status(): bucket_name = get_new_bucket() client = get_client() @@ -9710,10 +8121,6 @@ def test_lifecycle_invalid_status(): assert status == 400 assert error_code == 'MalformedXML' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with expiration date') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set_date(): bucket_name = get_new_bucket() @@ -9724,12 +8131,7 @@ def test_lifecycle_set_date(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with not iso8601 date') -@attr('lifecycle') @pytest.mark.lifecycle -@attr(assertion='fails 400') def test_lifecycle_set_invalid_date(): bucket_name = get_new_bucket() client = get_client() @@ -9740,16 +8142,9 @@ def test_lifecycle_set_invalid_date(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration with date') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration_date(): bucket_name = _create_objects(keys=['past/foo', 'future/bar']) @@ -9771,12 +8166,7 @@ def test_lifecycle_expiration_date(): assert len(init_objects) == 2 assert len(expire_objects) == 1 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration days 0') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration def test_lifecycle_expiration_days0(): bucket_name = _create_objects(keys=['days0/foo', 'days0/bar']) @@ -9827,12 +8217,7 @@ def check_lifecycle_expiration_header(response, start_time, rule_id, return days_to_expire and rule_eq_id -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration header put') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration def test_lifecycle_expiration_header_put(): bucket_name = get_new_bucket() @@ -9843,14 +8228,8 @@ def test_lifecycle_expiration_header_put(): client, bucket_name, 'rule1', 1, 'days1/') assert check_lifecycle_expiration_header(response, now, 'rule1', 1) -@attr(resource='bucket') -@attr(method='head') -@attr(operation='test lifecycle expiration header head') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration_header_head(): bucket_name = get_new_bucket() @@ -9867,14 +8246,8 @@ def test_lifecycle_expiration_header_head(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 assert check_lifecycle_expiration_header(response, now, 'rule1', 1) -@attr(resource='bucket') -@attr(method='head') -@attr(operation='test lifecycle expiration header head with tags') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_expiration_header_tags_head(): bucket_name = get_new_bucket() @@ -9929,12 +8302,7 @@ def test_lifecycle_expiration_header_tags_head(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) -@attr(resource='bucket') -@attr(method='head') -@attr(operation='test lifecycle expiration header head with tags and And') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration def test_lifecycle_expiration_header_and_tags_head(): now = datetime.datetime.now(None) @@ -9979,10 +8347,6 @@ def test_lifecycle_expiration_header_and_tags_head(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 assert check_lifecycle_expiration_header(response, datetime.datetime.now(None), 'rule1', 1) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with noncurrent version expiration') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set_noncurrent(): bucket_name = _create_objects(keys=['past/foo', 'future/bar']) @@ -9993,16 +8357,9 @@ def test_lifecycle_set_noncurrent(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle non-current version expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_noncur_expiration(): bucket_name = get_new_bucket() @@ -10029,10 +8386,6 @@ def test_lifecycle_noncur_expiration(): assert len(init_versions) == 6 assert len(expire_versions) == 4 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with delete marker expiration') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set_deletemarker(): bucket_name = get_new_bucket() @@ -10042,10 +8395,6 @@ def test_lifecycle_set_deletemarker(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with Filter') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set_filter(): bucket_name = get_new_bucket() @@ -10055,10 +8404,6 @@ def test_lifecycle_set_filter(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with empty Filter') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set_empty_filter(): bucket_name = get_new_bucket() @@ -10068,16 +8413,9 @@ def test_lifecycle_set_empty_filter(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle delete marker expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_deletemarker_expiration(): bucket_name = get_new_bucket() @@ -10110,10 +8448,6 @@ def test_lifecycle_deletemarker_expiration(): assert len(total_init_versions) == 4 assert len(total_expire_versions) == 2 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with multipart expiration') -@attr('lifecycle') @pytest.mark.lifecycle def test_lifecycle_set_multipart(): bucket_name = get_new_bucket() @@ -10128,16 +8462,9 @@ def test_lifecycle_set_multipart(): response = client.put_bucket_lifecycle_configuration(Bucket=bucket_name, LifecycleConfiguration=lifecycle) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle multipart expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_multipart_expiration(): bucket_name = get_new_bucket() @@ -10170,12 +8497,7 @@ def test_lifecycle_multipart_expiration(): assert len(init_uploads) == 2 assert len(expired_uploads) == 1 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config transition with not iso8601 date') -@attr('lifecycle') @pytest.mark.lifecycle -@attr(assertion='fails 400') def test_lifecycle_transition_set_invalid_date(): bucket_name = get_new_bucket() client = get_client() @@ -10212,14 +8534,8 @@ def _test_encryption_sse_customer_write(file_size): assert body == data # The test harness for lifecycle is configured to treat days as 10 second intervals. -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle transition') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_lifecycle_transition(): sc = configured_storage_classes() @@ -10263,14 +8579,8 @@ def test_lifecycle_transition(): assert len(expire3_keys[sc[2]]) == 2 # The test harness for lifecycle is configured to treat days as 10 second intervals. -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_lifecycle_transition_single_rule_multi_trans(): sc = configured_storage_classes() @@ -10312,12 +8622,7 @@ def test_lifecycle_transition_single_rule_multi_trans(): assert len(expire3_keys[sc[1]]) == 0 assert len(expire3_keys[sc[2]]) == 2 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='set lifecycle config with noncurrent version expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition def test_lifecycle_set_noncurrent_transition(): sc = configured_storage_classes() @@ -10353,16 +8658,9 @@ def test_lifecycle_set_noncurrent_transition(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle non-current version expiration') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws def test_lifecycle_noncur_transition(): sc = configured_storage_classes() @@ -10438,18 +8736,10 @@ def verify_object(client, bucket, key, content=None, sc=None): assert body == content # The test harness for lifecycle is configured to treat days as 10 second intervals. -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle transition for cloud') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('cloud_transition') @pytest.mark.cloud_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_cloud_transition(): cloud_sc = get_cloud_storage_class() @@ -10528,18 +8818,10 @@ def test_lifecycle_cloud_transition(): assert error_code == 'NoSuchKey' # Similar to 'test_lifecycle_transition' but for cloud transition -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle transition for cloud') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('cloud_transition') @pytest.mark.cloud_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_cloud_multiple_transition(): cloud_sc = get_cloud_storage_class() @@ -10599,20 +8881,11 @@ def test_lifecycle_cloud_multiple_transition(): assert len(expire3_keys[sc[2]]) == 0 # Noncurrent objects for cloud transition -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle non-current version expiration on cloud transition') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_expiration') @pytest.mark.lifecycle_expiration -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('cloud_transition') @pytest.mark.cloud_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_noncur_cloud_transition(): cloud_sc = get_cloud_storage_class() @@ -10697,18 +8970,10 @@ def test_lifecycle_noncur_cloud_transition(): verify_object(cloud_client, target_path, expire1_key1_str, None, target_sc) # The test harness for lifecycle is configured to treat days as 10 second intervals. -@attr(resource='bucket') -@attr(method='put') -@attr(operation='test lifecycle transition for cloud') -@attr('lifecycle') @pytest.mark.lifecycle -@attr('lifecycle_transition') @pytest.mark.lifecycle_transition -@attr('cloud_transition') @pytest.mark.cloud_transition -@attr('fails_on_aws') @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_lifecycle_cloud_transition_large_obj(): cloud_sc = get_cloud_storage_class() @@ -10759,56 +9024,30 @@ def test_lifecycle_cloud_transition_large_obj(): expire1_key1_str = prefix + keys[1] verify_object(cloud_client, target_path, expire1_key1_str, data, target_sc) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-C encrypted transfer 1 byte') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encrypted_transfer_1b(): _test_encryption_sse_customer_write(1) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-C encrypted transfer 1KB') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encrypted_transfer_1kb(): _test_encryption_sse_customer_write(1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-C encrypted transfer 1MB') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encrypted_transfer_1MB(): _test_encryption_sse_customer_write(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-C encrypted transfer 13 bytes') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encrypted_transfer_13b(): _test_encryption_sse_customer_write(13) -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption def test_encryption_sse_c_method_head(): bucket_name = get_new_bucket() @@ -10834,11 +9073,6 @@ def test_encryption_sse_c_method_head(): response = client.head_object(Bucket=bucket_name, Key=key) assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='object') -@attr(method='put') -@attr(operation='write encrypted with SSE-C and read without SSE-C') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_encryption_sse_c_present(): bucket_name = get_new_bucket() @@ -10859,11 +9093,6 @@ def test_encryption_sse_c_present(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='write encrypted with SSE-C but read with other key') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_encryption_sse_c_other_key(): bucket_name = get_new_bucket() @@ -10891,11 +9120,6 @@ def test_encryption_sse_c_other_key(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='write encrypted with SSE-C, but md5 is bad') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_encryption_sse_c_invalid_md5(): bucket_name = get_new_bucket() @@ -10914,11 +9138,6 @@ def test_encryption_sse_c_invalid_md5(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='write encrypted with SSE-C, but dont provide MD5') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_encryption_sse_c_no_md5(): bucket_name = get_new_bucket() @@ -10934,11 +9153,6 @@ def test_encryption_sse_c_no_md5(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data) -@attr(resource='object') -@attr(method='put') -@attr(operation='declare SSE-C but do not provide key') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_encryption_sse_c_no_key(): bucket_name = get_new_bucket() @@ -10953,11 +9167,6 @@ def test_encryption_sse_c_no_key(): client.meta.events.register('before-call.s3.PutObject', lf) e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data) -@attr(resource='object') -@attr(method='put') -@attr(operation='Do not declare SSE-C but provide key and MD5') -@attr(assertion='operation successfull, no encryption') -@attr('encryption') @pytest.mark.encryption def test_encryption_key_no_sse_c(): bucket_name = get_new_bucket() @@ -11026,15 +9235,8 @@ def _check_content_using_range_enc(client, bucket_name, key, data, step, enc_hea assert read_range == toread assert body == data[ofs:end+1] -@attr(resource='object') -@attr(method='put') -@attr(operation='complete multi-part upload') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_aws') # allow-unordered is a non-standard extension @pytest.mark.fails_on_aws -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_upload(): bucket_name = get_new_bucket() @@ -11079,14 +9281,8 @@ def test_encryption_sse_c_multipart_upload(): _check_content_using_range_enc(client, bucket_name, key, data, 1000000, enc_headers=enc_headers) _check_content_using_range_enc(client, bucket_name, key, data, 10000000, enc_headers=enc_headers) -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart upload with bad key for uploading chunks') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption # TODO: remove this fails_on_rgw when I fix it -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_encryption_sse_c_multipart_invalid_chunks_1(): bucket_name = get_new_bucket() @@ -11113,14 +9309,8 @@ def test_encryption_sse_c_multipart_invalid_chunks_1(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart upload with bad md5 for chunks') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption # TODO: remove this fails_on_rgw when I fix it -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_encryption_sse_c_multipart_invalid_chunks_2(): bucket_name = get_new_bucket() @@ -11147,13 +9337,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_2(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='complete multi-part upload and download with bad key') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encryption_sse_c_multipart_bad_download(): bucket_name = get_new_bucket() @@ -11202,13 +9386,7 @@ def test_encryption_sse_c_multipart_bad_download(): assert status == 400 -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr(assertion='succeeds and returns written data') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_encryption_sse_c_post_object_authenticated_request(): bucket_name = get_new_bucket() @@ -11262,10 +9440,7 @@ def test_encryption_sse_c_post_object_authenticated_request(): body = _get_body(response) assert body == 'bar' -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'): """ @@ -11294,13 +9469,7 @@ def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'): -@attr(resource='object') -@attr(method='head') -@attr(operation='Test SSE-KMS encrypted does perform head properly') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_method_head(): kms_keyid = get_main_kms_keyid() @@ -11327,13 +9496,7 @@ def test_sse_kms_method_head(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='write encrypted with SSE-KMS and read without SSE-KMS') -@attr(assertion='operation success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_present(): kms_keyid = get_main_kms_keyid() @@ -11354,11 +9517,6 @@ def test_sse_kms_present(): body = _get_body(response) assert body == data -@attr(resource='object') -@attr(method='put') -@attr(operation='declare SSE-KMS but do not provide key_id') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_sse_kms_no_key(): bucket_name = get_new_bucket() @@ -11375,11 +9533,6 @@ def test_sse_kms_no_key(): e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key, Body=data) -@attr(resource='object') -@attr(method='put') -@attr(operation='Do not declare SSE-KMS but provide key_id') -@attr(assertion='operation successfull, no encryption') -@attr('encryption') @pytest.mark.encryption def test_sse_kms_not_declared(): bucket_name = get_new_bucket() @@ -11397,13 +9550,7 @@ def test_sse_kms_not_declared(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='complete KMS multi-part upload') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_multipart_upload(): kms_keyid = get_main_kms_keyid() @@ -11450,13 +9597,7 @@ def test_sse_kms_multipart_upload(): _check_content_using_range(key, bucket_name, data, 10000000) -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart KMS upload with bad key_id for uploading chunks') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_multipart_invalid_chunks_1(): kms_keyid = get_main_kms_keyid() @@ -11483,13 +9624,7 @@ def test_sse_kms_multipart_invalid_chunks_1(): resend_parts=resend_parts) -@attr(resource='object') -@attr(method='put') -@attr(operation='multipart KMS upload with unexistent key_id for chunks') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_multipart_invalid_chunks_2(): kms_keyid = get_main_kms_keyid() @@ -11515,13 +9650,7 @@ def test_sse_kms_multipart_invalid_chunks_2(): resend_parts=resend_parts) -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated KMS browser based upload via POST request') -@attr(assertion='succeeds and returns written data') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_post_object_authenticated_request(): kms_keyid = get_main_kms_keyid() @@ -11567,13 +9696,7 @@ def test_sse_kms_post_object_authenticated_request(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-KMS encrypted transfer 1 byte') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_transfer_1b(): kms_keyid = get_main_kms_keyid() @@ -11582,13 +9705,7 @@ def test_sse_kms_transfer_1b(): _test_sse_kms_customer_write(1, key_id = kms_keyid) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-KMS encrypted transfer 1KB') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_transfer_1kb(): kms_keyid = get_main_kms_keyid() @@ -11597,13 +9714,7 @@ def test_sse_kms_transfer_1kb(): _test_sse_kms_customer_write(1024, key_id = kms_keyid) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-KMS encrypted transfer 1MB') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_transfer_1MB(): kms_keyid = get_main_kms_keyid() @@ -11612,13 +9723,7 @@ def test_sse_kms_transfer_1MB(): _test_sse_kms_customer_write(1024*1024, key_id = kms_keyid) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test SSE-KMS encrypted transfer 13 bytes') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_transfer_13b(): kms_keyid = get_main_kms_keyid() @@ -11627,11 +9732,6 @@ def test_sse_kms_transfer_13b(): _test_sse_kms_customer_write(13, key_id = kms_keyid) -@attr(resource='object') -@attr(method='get') -@attr(operation='write encrypted with SSE-KMS and read with SSE-KMS') -@attr(assertion='operation fails') -@attr('encryption') @pytest.mark.encryption def test_sse_kms_read_declare(): bucket_name = get_new_bucket() @@ -11651,11 +9751,6 @@ def test_sse_kms_read_declare(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy') -@attr(assertion='succeeds') -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy(): bucket_name = get_new_bucket() @@ -11685,9 +9780,7 @@ def test_bucket_policy(): response = alt_client.list_objects(Bucket=bucket_name) assert len(response['Contents']) == 1 -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucketv2_policy(): bucket_name = get_new_bucket() @@ -11717,11 +9810,6 @@ def test_bucketv2_policy(): response = alt_client.list_objects_v2(Bucket=bucket_name) assert len(response['Contents']) == 1 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy and ACL') -@attr(assertion='fails') -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy_acl(): bucket_name = get_new_bucket() @@ -11757,13 +9845,7 @@ def test_bucket_policy_acl(): client.delete_bucket_policy(Bucket=bucket_name) client.put_bucket_acl(Bucket=bucket_name, ACL='public-read') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy and ACL with list-objects-v2') -@attr(assertion='fails') -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucketv2_policy_acl(): bucket_name = get_new_bucket() @@ -11799,14 +9881,8 @@ def test_bucketv2_policy_acl(): client.delete_bucket_policy(Bucket=bucket_name) client.put_bucket_acl(Bucket=bucket_name, ACL='public-read') -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy for a user belonging to a different tenant') -@attr(assertion='succeeds') -@attr('bucket-policy') @pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_policy_different_tenant(): bucket_name = get_new_bucket() @@ -11849,16 +9925,9 @@ def test_bucket_policy_different_tenant(): assert len(response['Contents']) == 1 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy for a user belonging to a different tenant') -@attr(assertion='succeeds') -@attr('bucket-policy') @pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucketv2_policy_different_tenant(): bucket_name = get_new_bucket() @@ -11901,11 +9970,6 @@ def test_bucketv2_policy_different_tenant(): assert len(response['Contents']) == 1 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy on another bucket') -@attr(assertion='succeeds') -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy_another_bucket(): bucket_name = get_new_bucket() @@ -11943,13 +10007,7 @@ def test_bucket_policy_another_bucket(): response = alt_client.list_objects(Bucket=bucket_name2) assert len(response['Contents']) == 1 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test Bucket Policy on another bucket with list-objects-v2') -@attr(assertion='succeeds') -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('list-objects-v2') @pytest.mark.list_objects_v2 def test_bucketv2_policy_another_bucket(): bucket_name = get_new_bucket() @@ -11987,13 +10045,8 @@ def test_bucketv2_policy_another_bucket(): response = alt_client.list_objects_v2(Bucket=bucket_name2) assert len(response['Contents']) == 1 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put condition operator end with ifExists') -@attr('bucket-policy') @pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_policy_set_condition_operator_end_with_IfExists(): bucket_name = get_new_bucket() @@ -12063,13 +10116,7 @@ def _make_random_string(size): return ''.join(random.choice(string.ascii_letters) for _ in range(size)) -@attr(resource='object') -@attr(method='get') -@attr(operation='Test Get/PutObjTagging output') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_get_obj_tagging(): key = 'testputtags' @@ -12084,11 +10131,6 @@ def test_get_obj_tagging(): assert response['TagSet'] == input_tagset['TagSet'] -@attr(resource='object') -@attr(method='get') -@attr(operation='Test HEAD obj tagging output') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging def test_get_obj_head_tagging(): key = 'testputtags' @@ -12104,13 +10146,7 @@ def test_get_obj_head_tagging(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 assert response['ResponseMetadata']['HTTPHeaders']['x-amz-tagging-count'] == str(count) -@attr(resource='object') -@attr(method='get') -@attr(operation='Test Put max allowed tags') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_max_tags(): key = 'testputmaxtags' @@ -12124,11 +10160,6 @@ def test_put_max_tags(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert response['TagSet'] == input_tagset['TagSet'] -@attr(resource='object') -@attr(method='get') -@attr(operation='Test Put max allowed tags') -@attr(assertion='fails') -@attr('tagging') @pytest.mark.tagging def test_put_excess_tags(): key = 'testputmaxtags' @@ -12144,11 +10175,6 @@ def test_put_excess_tags(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert len(response['TagSet']) == 0 -@attr(resource='object') -@attr(method='get') -@attr(operation='Test Put max allowed k-v size') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging def test_put_max_kvsize_tags(): key = 'testputmaxkeysize' @@ -12170,11 +10196,6 @@ def test_put_max_kvsize_tags(): for kv_pair in response['TagSet']: assert kv_pair in input_tagset['TagSet'] -@attr(resource='object') -@attr(method='get') -@attr(operation='Test exceed key size') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging def test_put_excess_key_tags(): key = 'testputexcesskeytags' @@ -12197,11 +10218,6 @@ def test_put_excess_key_tags(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert len(response['TagSet']) == 0 -@attr(resource='object') -@attr(method='get') -@attr(operation='Test exceed val size') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging def test_put_excess_val_tags(): key = 'testputexcesskeytags' @@ -12224,13 +10240,7 @@ def test_put_excess_val_tags(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert len(response['TagSet']) == 0 -@attr(resource='object') -@attr(method='get') -@attr(operation='Test PUT modifies existing tags') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_modify_tags(): key = 'testputmodifytags' @@ -12260,13 +10270,7 @@ def test_put_modify_tags(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert response['TagSet'] == input_tagset2['TagSet'] -@attr(resource='object') -@attr(method='get') -@attr(operation='Test Delete tags') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_delete_tags(): key = 'testputmodifytags' @@ -12286,13 +10290,7 @@ def test_put_delete_tags(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert len(response['TagSet']) == 0 -@attr(resource='object') -@attr(method='post') -@attr(operation='anonymous browser based upload via POST request') -@attr('tagging') @pytest.mark.tagging -@attr(assertion='succeeds and returns written data') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_post_object_tags_anonymous_request(): bucket_name = get_new_bucket_name() @@ -12324,12 +10322,7 @@ def test_post_object_tags_anonymous_request(): response = client.get_object_tagging(Bucket=bucket_name, Key=key_name) assert response['TagSet'] == input_tagset['TagSet'] -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated browser based upload via POST request') -@attr('tagging') @pytest.mark.tagging -@attr(assertion='succeeds and returns written data') def test_post_object_tags_authenticated_request(): bucket_name = get_new_bucket() client = get_client() @@ -12375,13 +10368,7 @@ def test_post_object_tags_authenticated_request(): assert body == 'bar' -@attr(resource='object') -@attr(method='put') -@attr(operation='Test PutObj with tagging headers') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_obj_with_tags(): bucket_name = get_new_bucket() @@ -12413,15 +10400,8 @@ def test_put_obj_with_tags(): def _make_arn_resource(path="*"): return "arn:aws:s3:::{}".format(path) -@attr(resource='object') -@attr(method='get') -@attr(operation='Test GetObjTagging public read') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_get_tags_acl_public(): key = 'testputtagsacl' @@ -12443,15 +10423,8 @@ def test_get_tags_acl_public(): response = alt_client.get_object_tagging(Bucket=bucket_name, Key=key) assert response['TagSet'] == input_tagset['TagSet'] -@attr(resource='object') -@attr(method='get') -@attr(operation='Test PutObjTagging public wrote') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_put_tags_acl_public(): key = 'testputtagsacl' @@ -12472,13 +10445,7 @@ def test_put_tags_acl_public(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert response['TagSet'] == input_tagset['TagSet'] -@attr(resource='object') -@attr(method='get') -@attr(operation='test deleteobjtagging public') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy def test_delete_tags_obj_public(): key = 'testputtagsacl' @@ -12503,11 +10470,6 @@ def test_delete_tags_obj_public(): response = client.get_object_tagging(Bucket=bucket_name, Key=key) assert len(response['TagSet']) == 0 -@attr(resource='object') -@attr(method='put') -@attr(operation='test whether a correct version-id returned') -@attr(assertion='version-id is same as bucket list') -@attr('versioning') def test_versioning_bucket_atomic_upload_return_version_id(): bucket_name = get_new_bucket() client = get_client() @@ -12537,11 +10499,6 @@ def test_versioning_bucket_atomic_upload_return_version_id(): response = client.put_object(Bucket=bucket_name, Key=key) assert not 'VersionId' in response -@attr(resource='object') -@attr(method='put') -@attr(operation='test whether a correct version-id returned') -@attr(assertion='version-id is same as bucket list') -@attr('versioning') def test_versioning_bucket_multipart_upload_return_version_id(): content_type='text/bla' objlen = 30 * 1024 * 1024 @@ -12583,15 +10540,8 @@ def test_versioning_bucket_multipart_upload_return_version_id(): response = client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts}) assert not 'VersionId' in response -@attr(resource='object') -@attr(method='get') -@attr(operation='Test ExistingObjectTag conditional on get object') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_get_obj_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) @@ -12645,15 +10595,8 @@ def test_bucket_policy_get_obj_existing_tag(): status, error_code = _get_status_and_error_code(e.response) assert status == 403 -@attr(resource='object') -@attr(method='get') -@attr(operation='Test ExistingObjectTag conditional on get object tagging') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_get_obj_tagging_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) @@ -12714,15 +10657,8 @@ def test_bucket_policy_get_obj_tagging_existing_tag(): assert status == 403 -@attr(resource='object') -@attr(method='get') -@attr(operation='Test ExistingObjectTag conditional on put object tagging') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_tagging_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) @@ -12790,15 +10726,8 @@ def test_bucket_policy_put_obj_tagging_existing_tag(): assert status == 403 -@attr(resource='object') -@attr(method='put') -@attr(operation='Test copy-source conditional on put obj') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_copy_source(): bucket_name = _create_objects(keys=['public/foo', 'public/bar', 'private/foo']) @@ -12844,15 +10773,8 @@ def test_bucket_policy_put_obj_copy_source(): copy_source = {'Bucket': bucket_name, 'Key': 'private/foo'} check_access_denied(alt_client.copy_object, Bucket=bucket_name2, CopySource=copy_source, Key='new_foo2') -@attr(resource='object') -@attr(method='put') -@attr(operation='Test copy-source conditional on put obj') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_copy_source_meta(): src_bucket_name = _create_objects(keys=['public/foo', 'public/bar']) @@ -12902,13 +10824,7 @@ def test_bucket_policy_put_obj_copy_source_meta(): check_access_denied(alt_client.copy_object, Bucket=bucket_name, CopySource=copy_source, Key='new_foo2', Metadata={"foo": "bar"}) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test put obj with canned-acl not to be public') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy_put_obj_acl(): bucket_name = get_new_bucket() @@ -12949,11 +10865,6 @@ def test_bucket_policy_put_obj_acl(): assert status == 403 -@attr(resource='object') -@attr(method='put') -@attr(operation='Test put obj with amz-grant back to bucket-owner') -@attr(assertion='success') -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy_put_obj_grant(): @@ -13016,11 +10927,6 @@ def test_bucket_policy_put_obj_grant(): assert acl2_response['Grants'][0]['Grantee']['ID'] == alt_user_id -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj specifying both sse-c and sse-s3') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption def test_put_obj_enc_conflict_c_s3(): bucket_name = get_new_bucket() @@ -13044,11 +10950,6 @@ def test_put_obj_enc_conflict_c_s3(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj specifying both sse-c and sse-kms') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption def test_put_obj_enc_conflict_c_kms(): kms_keyid = get_main_kms_keyid() @@ -13076,11 +10977,6 @@ def test_put_obj_enc_conflict_c_kms(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj specifying sse-s3 with kms key id') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption def test_put_obj_enc_conflict_s3_kms(): kms_keyid = get_main_kms_keyid() @@ -13105,11 +11001,6 @@ def test_put_obj_enc_conflict_s3_kms(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj specifying invalid algorithm' ) -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption def test_put_obj_enc_conflict_bad_enc_kms(): kms_keyid = get_main_kms_keyid() @@ -13133,17 +11024,9 @@ def test_put_obj_enc_conflict_bad_enc_kms(): assert status == 400 assert error_code == 'InvalidArgument' -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj requests if not sse-s3: without encryption') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_put_obj_s3_noenc(): bucket_name = get_new_bucket() @@ -13195,15 +11078,8 @@ def test_bucket_policy_put_obj_s3_noenc(): # errors out before it consumes the data...) check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str) -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj requests if not sse-s3: kms') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('sse-s3') @pytest.mark.sse_s3 def test_bucket_policy_put_obj_s3_kms(): kms_keyid = get_main_kms_keyid() @@ -13248,15 +11124,8 @@ def test_bucket_policy_put_obj_s3_kms(): client.meta.events.register('before-call.s3.PutObject', lf) check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str) -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj requests if not sse-kms: without encryption') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy_put_obj_kms_noenc(): kms_keyid = get_main_kms_keyid() @@ -13302,13 +11171,7 @@ def test_bucket_policy_put_obj_kms_noenc(): check_access_denied(client.put_object, Bucket=bucket_name, Key=key2_str, Body=key2_str) -@attr(resource='object') -@attr(method='put') -@attr(operation='Deny put obj requests if not sse-kms: s3') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-policy') @pytest.mark.bucket_policy def test_bucket_policy_put_obj_kms_s3(): bucket_name = get_new_bucket() @@ -13349,16 +11212,9 @@ def test_bucket_policy_put_obj_kms_s3(): client.meta.events.register('before-call.s3.PutObject', lf) check_access_denied(client.put_object, Bucket=bucket_name, Key=key1_str, Body=key1_str) -@attr(resource='object') -@attr(method='put') -@attr(operation='put obj with RequestObjectTag') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy # TODO: remove this fails_on_rgw when I fix it -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_bucket_policy_put_obj_request_obj_tag(): bucket_name = get_new_bucket() @@ -13386,15 +11242,8 @@ def test_bucket_policy_put_obj_request_obj_tag(): #TODO: why is this a 400 and not passing alt_client.put_object(Bucket=bucket_name, Key=key1_str, Body=key1_str) -@attr(resource='object') -@attr(method='get') -@attr(operation='Test ExistingObjectTag conditional on get object acl') -@attr(assertion='success') -@attr('tagging') @pytest.mark.tagging -@attr('bucket-policy') @pytest.mark.bucket_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_bucket_policy_get_obj_acl_existing_tag(): bucket_name = _create_objects(keys=['publictag', 'privatetag', 'invalidtag']) @@ -13454,12 +11303,6 @@ def test_bucket_policy_get_obj_acl_existing_tag(): assert status == 403 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with defalut retention') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock(): bucket_name = get_new_bucket_name() @@ -13493,11 +11336,6 @@ def test_object_lock_put_obj_lock(): assert response['Status'] == 'Enabled' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with bucket object lock not enabled') -@attr(assertion='fails') -@attr('object-lock') def test_object_lock_put_obj_lock_invalid_bucket(): bucket_name = get_new_bucket_name() client = get_client() @@ -13515,12 +11353,6 @@ def test_object_lock_put_obj_lock_invalid_bucket(): assert error_code == 'InvalidBucketState' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with days and years') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_with_days_and_years(): bucket_name = get_new_bucket_name() @@ -13540,12 +11372,6 @@ def test_object_lock_put_obj_lock_with_days_and_years(): assert error_code == 'MalformedXML' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with invalid days') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_days(): bucket_name = get_new_bucket_name() @@ -13564,12 +11390,6 @@ def test_object_lock_put_obj_lock_invalid_days(): assert error_code == 'InvalidRetentionPeriod' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with invalid years') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_years(): bucket_name = get_new_bucket_name() @@ -13588,12 +11408,6 @@ def test_object_lock_put_obj_lock_invalid_years(): assert error_code == 'InvalidRetentionPeriod' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with invalid mode') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_mode(): bucket_name = get_new_bucket_name() @@ -13624,12 +11438,6 @@ def test_object_lock_put_obj_lock_invalid_mode(): assert error_code == 'MalformedXML' -attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object lock with invalid status') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_lock_invalid_status(): bucket_name = get_new_bucket_name() @@ -13648,12 +11456,6 @@ def test_object_lock_put_obj_lock_invalid_status(): assert error_code == 'MalformedXML' -attr(resource='bucket') -@attr(method='put') -@attr(operation='Test suspend versioning when object lock enabled') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_suspend_versioning(): bucket_name = get_new_bucket_name() @@ -13665,12 +11467,6 @@ def test_object_lock_suspend_versioning(): assert error_code == 'InvalidBucketState' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get object lock') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_get_obj_lock(): bucket_name = get_new_bucket_name() @@ -13690,11 +11486,6 @@ def test_object_lock_get_obj_lock(): assert response['ObjectLockConfiguration'] == conf -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get object lock with bucket object lock not enabled') -@attr(assertion='fails') -@attr('object-lock') def test_object_lock_get_obj_lock_invalid_bucket(): bucket_name = get_new_bucket_name() client = get_client() @@ -13705,12 +11496,6 @@ def test_object_lock_get_obj_lock_invalid_bucket(): assert error_code == 'ObjectLockConfigurationNotFoundError' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test put object retention') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention(): bucket_name = get_new_bucket_name() @@ -13726,11 +11511,6 @@ def test_object_lock_put_obj_retention(): -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention with bucket object lock not enabled') -@attr(assertion='fails') -@attr('object-lock') def test_object_lock_put_obj_retention_invalid_bucket(): bucket_name = get_new_bucket_name() client = get_client() @@ -13744,12 +11524,6 @@ def test_object_lock_put_obj_retention_invalid_bucket(): assert error_code == 'InvalidRequest' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention with invalid mode') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_invalid_mode(): bucket_name = get_new_bucket_name() @@ -13770,12 +11544,6 @@ def test_object_lock_put_obj_retention_invalid_mode(): assert error_code == 'MalformedXML' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get object retention') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_get_obj_retention(): bucket_name = get_new_bucket_name() @@ -13791,12 +11559,6 @@ def test_object_lock_get_obj_retention(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test object retention date formatting') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_get_obj_retention_iso8601(): bucket_name = get_new_bucket_name() @@ -13815,11 +11577,6 @@ def test_object_lock_get_obj_retention_iso8601(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get object retention with invalid bucket') -@attr(assertion='fails') -@attr('object-lock') def test_object_lock_get_obj_retention_invalid_bucket(): bucket_name = get_new_bucket_name() client = get_client() @@ -13832,12 +11589,6 @@ def test_object_lock_get_obj_retention_invalid_bucket(): assert error_code == 'InvalidRequest' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention with version id') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_versionid(): bucket_name = get_new_bucket_name() @@ -13854,12 +11605,6 @@ def test_object_lock_put_obj_retention_versionid(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention to override default retention') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_override_default_retention(): bucket_name = get_new_bucket_name() @@ -13885,12 +11630,6 @@ def test_object_lock_put_obj_retention_override_default_retention(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention to increase retention period') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_increase_period(): bucket_name = get_new_bucket_name() @@ -13908,12 +11647,6 @@ def test_object_lock_put_obj_retention_increase_period(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention to shorten period') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_shorten_period(): bucket_name = get_new_bucket_name() @@ -13932,12 +11665,6 @@ def test_object_lock_put_obj_retention_shorten_period(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put object retention to shorten period with bypass header') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_obj_retention_shorten_period_bypass(): bucket_name = get_new_bucket_name() @@ -13955,12 +11682,6 @@ def test_object_lock_put_obj_retention_shorten_period_bypass(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=version_id, BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='Test delete object with retention') -@attr(assertion='retention period make effects') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_retention(): bucket_name = get_new_bucket_name() @@ -13979,12 +11700,6 @@ def test_object_lock_delete_object_with_retention(): response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) assert response['ResponseMetadata']['HTTPStatusCode'] == 204 -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='Test delete object with retention and delete marker') -@attr(assertion='retention period make effects') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_retention_and_marker(): bucket_name = get_new_bucket_name() @@ -14010,12 +11725,6 @@ def test_object_lock_delete_object_with_retention_and_marker(): response = client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) assert response['ResponseMetadata']['HTTPStatusCode'] == 204 -@attr(resource='object') -@attr(method='delete') -@attr(operation='Test multi-delete object with retention') -@attr(assertion='retention period make effects') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_multi_delete_object_with_retention(): bucket_name = get_new_bucket_name() @@ -14083,12 +11792,6 @@ def test_object_lock_multi_delete_object_with_retention(): -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put legal hold') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_legal_hold(): bucket_name = get_new_bucket_name() @@ -14103,11 +11806,6 @@ def test_object_lock_put_legal_hold(): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put legal hold with invalid bucket') -@attr(assertion='fails') -@attr('object-lock') def test_object_lock_put_legal_hold_invalid_bucket(): bucket_name = get_new_bucket_name() client = get_client() @@ -14121,12 +11819,6 @@ def test_object_lock_put_legal_hold_invalid_bucket(): assert error_code == 'InvalidRequest' -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put legal hold with invalid status') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_put_legal_hold_invalid_status(): bucket_name = get_new_bucket_name() @@ -14141,12 +11833,6 @@ def test_object_lock_put_legal_hold_invalid_status(): assert error_code == 'MalformedXML' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get legal hold') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_get_legal_hold(): bucket_name = get_new_bucket_name() @@ -14164,11 +11850,6 @@ def test_object_lock_get_legal_hold(): assert response['LegalHold'] == legal_hold_off -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get legal hold with invalid bucket') -@attr(assertion='fails') -@attr('object-lock') def test_object_lock_get_legal_hold_invalid_bucket(): bucket_name = get_new_bucket_name() client = get_client() @@ -14181,12 +11862,6 @@ def test_object_lock_get_legal_hold_invalid_bucket(): assert error_code == 'InvalidRequest' -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='Test delete object with legal hold on') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_legal_hold_on(): bucket_name = get_new_bucket_name() @@ -14202,12 +11877,6 @@ def test_object_lock_delete_object_with_legal_hold_on(): client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'}) -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='Test delete object with legal hold off') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_delete_object_with_legal_hold_off(): bucket_name = get_new_bucket_name() @@ -14220,12 +11889,6 @@ def test_object_lock_delete_object_with_legal_hold_off(): assert response['ResponseMetadata']['HTTPStatusCode'] == 204 -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test get object metadata') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_get_obj_metadata(): bucket_name = get_new_bucket_name() @@ -14246,12 +11909,6 @@ def test_object_lock_get_obj_metadata(): client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='Test put legal hold and retention when uploading object') -@attr(assertion='success') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_uploading_obj(): bucket_name = get_new_bucket_name() @@ -14268,12 +11925,6 @@ def test_object_lock_uploading_obj(): client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'}) client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test changing object retention mode from GOVERNANCE to COMPLIANCE with bypass') -@attr(assertion='succeeds') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_changing_mode_from_governance_with_bypass(): bucket_name = get_new_bucket_name() @@ -14288,12 +11939,6 @@ def test_object_lock_changing_mode_from_governance_with_bypass(): retention = {'Mode':'COMPLIANCE', 'RetainUntilDate':retain_until} client.put_object_retention(Bucket=bucket_name, Key=key, Retention=retention, BypassGovernanceRetention=True) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test changing object retention mode from GOVERNANCE to COMPLIANCE without bypass') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_changing_mode_from_governance_without_bypass(): bucket_name = get_new_bucket_name() @@ -14311,12 +11956,6 @@ def test_object_lock_changing_mode_from_governance_without_bypass(): assert status == 403 assert error_code == 'AccessDenied' -@attr(resource='object') -@attr(method='put') -@attr(operation='Test changing object retention mode from COMPLIANCE to GOVERNANCE') -@attr(assertion='fails') -@attr('object-lock') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_object_lock_changing_mode_from_compliance(): bucket_name = get_new_bucket_name() @@ -14334,11 +11973,6 @@ def test_object_lock_changing_mode_from_compliance(): assert status == 403 assert error_code == 'AccessDenied' -@attr(resource='object') -@attr(method='copy') -@attr(operation='copy w/ x-amz-copy-source-if-match: the latest ETag') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_copy_object_ifmatch_good(): bucket_name = get_new_bucket() @@ -14350,12 +11984,7 @@ def test_copy_object_ifmatch_good(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='copy') -@attr(operation='copy w/ x-amz-copy-source-if-match: bogus ETag') -@attr(assertion='fails 412') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/40808 is resolved -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_copy_object_ifmatch_failed(): bucket_name = get_new_bucket() @@ -14367,12 +11996,7 @@ def test_copy_object_ifmatch_failed(): assert status == 412 assert error_code == 'PreconditionFailed' -@attr(resource='object') -@attr(method='copy') -@attr(operation='copy w/ x-amz-copy-source-if-none-match: the latest ETag') -@attr(assertion='fails 412') # TODO: remove fails_on_rgw when https://tracker.ceph.com/issues/40808 is resolved -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_copy_object_ifnonematch_good(): bucket_name = get_new_bucket() @@ -14384,11 +12008,6 @@ def test_copy_object_ifnonematch_good(): assert status == 412 assert error_code == 'PreconditionFailed' -@attr(resource='object') -@attr(method='copy') -@attr(operation='copy w/ x-amz-copy-source-if-none-match: bogus ETag') -@attr(assertion='succeeds') -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_copy_object_ifnonematch_failed(): bucket_name = get_new_bucket() @@ -14400,12 +12019,7 @@ def test_copy_object_ifnonematch_failed(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='get') -@attr(operation='read to invalid key') -@attr(assertion='fails 400') # TODO: results in a 404 instead of 400 on the RGW -@attr('fails_on_rgw') @pytest.mark.fails_on_rgw def test_object_read_unreadable(): bucket_name = get_new_bucket() @@ -14415,11 +12029,6 @@ def test_object_read_unreadable(): assert status == 400 assert e.response['Error']['Message'] == 'Couldn\'t parse the specified URI.' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='Test User Policy') -@attr(assertion='succeeds') -@attr('user-policy') def test_user_policy(): client = get_tenant_iam_client() @@ -14437,22 +12046,12 @@ def test_user_policy(): ) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket policy status on a new bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == False -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket policy status on a public acl bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_public_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() @@ -14461,11 +12060,6 @@ def test_get_public_acl_bucket_policy_status(): resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == True -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket policy status on a authenticated acl bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_authpublic_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() @@ -14475,11 +12069,6 @@ def test_get_authpublic_acl_bucket_policy_status(): assert resp['PolicyStatus']['IsPublic'] == True -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket policy status on a public policy bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_publicpolicy_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() @@ -14509,11 +12098,6 @@ def test_get_publicpolicy_acl_bucket_policy_status(): assert resp['PolicyStatus']['IsPublic'] == True -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket policy status on a public policy bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_nonpublicpolicy_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() @@ -14547,11 +12131,6 @@ def test_get_nonpublicpolicy_acl_bucket_policy_status(): assert resp['PolicyStatus']['IsPublic'] == False -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket policy status on a public policy bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_nonpublicpolicy_deny_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() @@ -14579,11 +12158,6 @@ def test_get_nonpublicpolicy_deny_bucket_policy_status(): resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == True -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get public access block on a bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_get_default_public_block(): #client = get_svc_client(svc='s3control', client_config=Config(s3={'addressing_style': 'path'})) bucket_name = get_new_bucket() @@ -14595,11 +12169,6 @@ def test_get_default_public_block(): assert resp['PublicAccessBlockConfiguration']['IgnorePublicAcls'] == False assert resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'] == False -@attr(resource='bucket') -@attr(method='put') -@attr(operation='get public access block on a bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_put_public_block(): #client = get_svc_client(svc='s3control', client_config=Config(s3={'addressing_style': 'path'})) bucket_name = get_new_bucket() @@ -14619,11 +12188,6 @@ def test_put_public_block(): assert resp['PublicAccessBlockConfiguration']['RestrictPublicBuckets'] == access_conf['RestrictPublicBuckets'] -@attr(resource='bucket') -@attr(method='put') -@attr(operation='get public access block on a bucket') -@attr(assertion='succeeds') -@attr('policy_status') def test_block_public_put_bucket_acls(): #client = get_svc_client(svc='s3control', client_config=Config(s3={'addressing_style': 'path'})) bucket_name = get_new_bucket() @@ -14653,11 +12217,6 @@ def test_block_public_put_bucket_acls(): assert status == 403 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='block public acls on canned acls') -@attr(assertion='succeeds') -@attr('policy_status') def test_block_public_object_canned_acls(): bucket_name = get_new_bucket() client = get_client() @@ -14687,11 +12246,6 @@ def test_block_public_object_canned_acls(): assert status == 403 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='block public acls on canned acls') -@attr(assertion='succeeds') -@attr('policy_status') def test_block_public_policy(): bucket_name = get_new_bucket() client = get_client() @@ -14709,11 +12263,6 @@ def test_block_public_policy(): check_access_denied(client.put_bucket_policy, Bucket=bucket_name, Policy=policy_document) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='ignore public acls on canned acls') -@attr(assertion='succeeds') -@attr('policy_status') def test_ignore_public_acls(): bucket_name = get_new_bucket() client = get_client() @@ -14741,11 +12290,6 @@ def test_ignore_public_acls(): check_access_denied(alt_client.get_object, Bucket=bucket_name, Key='key1') -@attr(resource='bucket') -@attr(method='put') -@attr(operation='multipart upload on a bucket with a policy') -@attr(assertion='succeeds') -@attr('policy_status') def test_multipart_upload_on_a_bucket_with_policy(): bucket_name = get_new_bucket() client = get_client() @@ -14808,22 +12352,12 @@ def _put_bucket_encryption_kms(client, bucket_name): assert response['ResponseMetadata']['HTTPStatusCode'] == 200 -@attr(resource='bucket') -@attr(method='put') -@attr(operation='put bucket encryption on bucket - s3') -@attr(assertion='succeeds') -@attr('sse-s3') @pytest.mark.sse_s3 def test_put_bucket_encryption_s3(): bucket_name = get_new_bucket() client = get_client() _put_bucket_encryption_s3(client, bucket_name) -@attr(resource='bucket') -@attr(method='put') -@attr(operation='put bucket encryption on bucket - kms') -@attr(assertion='succeeds') -@attr('encryption') @pytest.mark.encryption def test_put_bucket_encryption_kms(): bucket_name = get_new_bucket() @@ -14831,11 +12365,6 @@ def test_put_bucket_encryption_kms(): _put_bucket_encryption_kms(client, bucket_name) -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket encryption on bucket - s3') -@attr(assertion='succeeds') -@attr('sse-s3') @pytest.mark.sse_s3 def test_get_bucket_encryption_s3(): bucket_name = get_new_bucket() @@ -14856,11 +12385,6 @@ def test_get_bucket_encryption_s3(): assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['SSEAlgorithm'] == 'AES256' -@attr(resource='bucket') -@attr(method='get') -@attr(operation='get bucket encryption on bucket - kms') -@attr(assertion='succeeds') -@attr('encryption') @pytest.mark.encryption def test_get_bucket_encryption_kms(): kms_keyid = get_main_kms_keyid() @@ -14885,11 +12409,6 @@ def test_get_bucket_encryption_kms(): assert response['ServerSideEncryptionConfiguration']['Rules'][0]['ApplyServerSideEncryptionByDefault']['KMSMasterKeyID'] == kms_keyid -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='delete bucket encryption on bucket - s3') -@attr(assertion='succeeds') -@attr('sse-s3') @pytest.mark.sse_s3 def test_delete_bucket_encryption_s3(): bucket_name = get_new_bucket() @@ -14912,11 +12431,6 @@ def test_delete_bucket_encryption_s3(): assert response_code == 'ServerSideEncryptionConfigurationNotFoundError' -@attr(resource='bucket') -@attr(method='delete') -@attr(operation='delete bucket encryption on bucket - kms') -@attr(assertion='succeeds') -@attr('encryption') @pytest.mark.encryption def test_delete_bucket_encryption_kms(): bucket_name = get_new_bucket() @@ -14957,62 +12471,30 @@ def _test_sse_s3_default_upload(file_size): body = _get_body(response) assert body == data -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1 byte upload to SSE-S3 default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_1b(): _test_sse_s3_default_upload(1) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1KB upload to SSE-S3 default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_1kb(): _test_sse_s3_default_upload(1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1MB upload to SSE-S3 default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_1mb(): _test_sse_s3_default_upload(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 8MB upload to SSE-S3 default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_upload_8mb(): _test_sse_s3_default_upload(8*1024*1024) @@ -15041,79 +12523,39 @@ def _test_sse_kms_default_upload(file_size): body = _get_body(response) assert body == data -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1 byte upload to SSE-KMS default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_1b(): _test_sse_kms_default_upload(1) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1KB upload to SSE-KMS default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_1kb(): _test_sse_kms_default_upload(1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1MB upload to SSE-KMS default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_1mb(): _test_sse_kms_default_upload(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 8MB upload to SSE-KMS default-encrypted bucket') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_default_upload_8mb(): _test_sse_kms_default_upload(8*1024*1024) -@attr(resource='object') -@attr(method='head') -@attr(operation='Test head operation on SSE-S3 default-encrypted object') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_method_head(): bucket_name = get_new_bucket() @@ -15136,17 +12578,9 @@ def test_sse_s3_default_method_head(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@attr(resource='object') -@attr(method='put') -@attr(operation='complete SSE-S3 multi-part upload') -@attr(assertion='successful') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_multipart_upload(): bucket_name = get_new_bucket() @@ -15191,17 +12625,9 @@ def test_sse_s3_default_multipart_upload(): _check_content_using_range(key, bucket_name, data, 1000000) _check_content_using_range(key, bucket_name, data, 10000000) -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated SSE-S3 browser based upload via POST request') -@attr(assertion='succeeds and returns written data') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_default_post_object_authenticated_request(): bucket_name = get_new_bucket() @@ -15245,15 +12671,8 @@ def test_sse_s3_default_post_object_authenticated_request(): body = _get_body(response) assert body == 'bar' -@attr(resource='object') -@attr(method='post') -@attr(operation='authenticated SSE-kMS browser based upload via POST request') -@attr(assertion='succeeds and returns written data') -@attr('encryption') @pytest.mark.encryption -@attr('bucket-encryption') @pytest.mark.bucket_encryption -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_kms_default_post_object_authenticated_request(): kms_keyid = get_main_kms_keyid() @@ -15318,54 +12737,26 @@ def _test_sse_s3_encrypted_upload(file_size): body = _get_body(response) assert body == data -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1 byte upload with SSE-S3 encryption') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_1b(): _test_sse_s3_encrypted_upload(1) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1Kb upload with SSE-S3 encryption') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_1kb(): _test_sse_s3_encrypted_upload(1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 1MB upload with SSE-S3 encryption') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_1mb(): _test_sse_s3_encrypted_upload(1024*1024) -@attr(resource='object') -@attr(method='put') -@attr(operation='Test 8MB upload with SSE-S3 encryption') -@attr(assertion='success') -@attr('encryption') @pytest.mark.encryption -@attr('sse-s3') @pytest.mark.sse_s3 -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_sse_s3_encrypted_upload_8mb(): _test_sse_s3_encrypted_upload(8*1024*1024) diff --git a/s3tests_boto3/functional/test_s3select.py b/s3tests_boto3/functional/test_s3select.py index 5a98a37..feb6720 100644 --- a/s3tests_boto3/functional/test_s3select.py +++ b/s3tests_boto3/functional/test_s3select.py @@ -1,9 +1,7 @@ -import nose import pytest import random import string import re -from nose.plugins.attrib import attr from botocore.exceptions import ClientError import uuid @@ -78,13 +76,11 @@ def generate_s3select_expression_projection(bucket_name,obj_name): # both results should be close (epsilon) assert( abs(float(res.split("\n")[1]) - eval(e)) < epsilon ) -@attr('s3select') @pytest.mark.s3select def get_random_string(): return uuid.uuid4().hex[:6].upper() -@attr('s3select') @pytest.mark.s3select def test_generate_where_clause(): @@ -97,7 +93,6 @@ def test_generate_where_clause(): for _ in range(100): generate_s3select_where_clause(bucket_name,obj_name) -@attr('s3select') @pytest.mark.s3select def test_generate_projection(): @@ -313,7 +308,6 @@ def create_list_of_int(column_pos,obj,field_split=",",row_split="\n"): return list_of_int -@attr('s3select') @pytest.mark.s3select def test_count_operation(): csv_obj_name = get_random_string() @@ -325,7 +319,6 @@ def test_count_operation(): s3select_assert_result( num_of_rows, int( res )) -@attr('s3select') @pytest.mark.s3select def test_column_sum_min_max(): csv_obj = create_random_csv_object(10000,10) @@ -391,7 +384,6 @@ def test_column_sum_min_max(): s3select_assert_result( int(count)*4 , int(sum1)-int(sum2) ) -@attr('s3select') @pytest.mark.s3select def test_nullif_expressions(): @@ -447,7 +439,6 @@ def test_nullif_expressions(): s3select_assert_result( res_s3select_nullif, res_s3select) -@attr('s3select') @pytest.mark.s3select def test_nulliftrue_expressions(): @@ -475,7 +466,6 @@ def test_nulliftrue_expressions(): s3select_assert_result( res_s3select_nullif, res_s3select) -@attr('s3select') @pytest.mark.s3select def test_is_not_null_expressions(): @@ -497,7 +487,6 @@ def test_is_not_null_expressions(): s3select_assert_result( res_s3select_null, res_s3select) -@attr('s3select') @pytest.mark.s3select def test_lowerupper_expressions(): @@ -515,7 +504,6 @@ def test_lowerupper_expressions(): s3select_assert_result( res_s3select, "AB12CD$$") -@attr('s3select') @pytest.mark.s3select def test_in_expressions(): @@ -586,7 +574,6 @@ def test_in_expressions(): s3select_assert_result( res_s3select_in, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_true_false_in_expressions(): @@ -632,7 +619,6 @@ def test_true_false_in_expressions(): s3select_assert_result( res_s3select_in, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_like_expressions(): @@ -720,7 +706,6 @@ def test_like_expressions(): s3select_assert_result( res_s3select_like, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_truefalselike_expressions(): @@ -766,7 +751,6 @@ def test_truefalselike_expressions(): s3select_assert_result( res_s3select_like, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_nullif_expressions(): @@ -794,7 +778,6 @@ def test_nullif_expressions(): assert res_s3select_nullif == res_s3select -@attr('s3select') @pytest.mark.s3select def test_lowerupper_expressions(): @@ -812,7 +795,6 @@ def test_lowerupper_expressions(): assert res_s3select == "AB12CD$$" -@attr('s3select') @pytest.mark.s3select def test_in_expressions(): @@ -853,7 +835,6 @@ def test_in_expressions(): assert res_s3select_in == res_s3select -@attr('s3select') @pytest.mark.s3select def test_like_expressions(): @@ -900,7 +881,6 @@ def test_like_expressions(): assert res_s3select_in == res_s3select -@attr('s3select') @pytest.mark.s3select def test_complex_expressions(): @@ -933,7 +913,6 @@ def test_complex_expressions(): s3select_assert_result( res_s3select_between_numbers, res_s3select_eq_modolu) -@attr('s3select') @pytest.mark.s3select def test_alias(): @@ -955,7 +934,6 @@ def test_alias(): s3select_assert_result( res_s3select_alias, res_s3select_no_alias) -@attr('s3select') @pytest.mark.s3select def test_alias_cyclic_refernce(): @@ -974,7 +952,6 @@ def test_alias_cyclic_refernce(): assert int(find_res) >= 0 -@attr('s3select') @pytest.mark.s3select def test_datetime(): @@ -1006,7 +983,6 @@ def test_datetime(): s3select_assert_result( res_s3select_date_time_to_timestamp, res_s3select_substring) -@attr('s3select') @pytest.mark.s3select def test_true_false_datetime(): @@ -1041,7 +1017,6 @@ def test_true_false_datetime(): s3select_assert_result( res_s3select_date_time_utcnow, res_s3select_count) -@attr('s3select') @pytest.mark.s3select def test_csv_parser(): @@ -1082,7 +1057,6 @@ def test_csv_parser(): res_s3select_alias = remove_xml_tags_from_result( run_s3select(bucket_name,csv_obj_name,"select _9 from s3object;") ).replace("\n","") s3select_assert_result( res_s3select_alias, 'null') -@attr('s3select') @pytest.mark.s3select def test_csv_definition(): @@ -1113,7 +1087,6 @@ def test_csv_definition(): s3select_assert_result( res_s3select, __res ) -@attr('s3select') @pytest.mark.s3select def test_schema_definition(): @@ -1149,7 +1122,6 @@ def test_schema_definition(): assert ((res_multiple_defintion.find("multiple definition of column {c4} as schema-column and alias")) >= 0) -@attr('s3select') @pytest.mark.s3select def test_when_then_else_expressions(): @@ -1179,7 +1151,6 @@ def test_when_then_else_expressions(): s3select_assert_result( str(count3) , res2) -@attr('s3select') @pytest.mark.s3select def test_coalesce_expressions(): @@ -1202,7 +1173,6 @@ def test_coalesce_expressions(): s3select_assert_result( res_s3select, res_coalesce) -@attr('s3select') @pytest.mark.s3select def test_cast_expressions(): @@ -1224,7 +1194,6 @@ def test_cast_expressions(): s3select_assert_result( res_s3select, res) -@attr('s3select') @pytest.mark.s3select def test_version(): @@ -1243,7 +1212,6 @@ def test_version(): s3select_assert_result( res_version, "41.a," ) -@attr('s3select') @pytest.mark.s3select def test_trim_expressions(): @@ -1283,7 +1251,6 @@ def test_trim_expressions(): s3select_assert_result( res_s3select_trim, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_truefalse_trim_expressions(): @@ -1323,7 +1290,6 @@ def test_truefalse_trim_expressions(): s3select_assert_result( res_s3select_trim, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_escape_expressions(): @@ -1345,7 +1311,6 @@ def test_escape_expressions(): s3select_assert_result( res_s3select_escape, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_case_value_expressions(): @@ -1361,7 +1326,6 @@ def test_case_value_expressions(): s3select_assert_result( res_s3select_case, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_bool_cast_expressions(): @@ -1377,7 +1341,6 @@ def test_bool_cast_expressions(): s3select_assert_result( res_s3select_cast, res_s3select ) -@attr('s3select') @pytest.mark.s3select def test_progress_expressions(): @@ -1405,7 +1368,6 @@ def test_progress_expressions(): # end response s3select_assert_result({}, res_s3select_response[total_response-1]) -@attr('s3select') @pytest.mark.s3select def test_output_serial_expressions(): return # TODO fix test diff --git a/s3tests_boto3/functional/test_sts.py b/s3tests_boto3/functional/test_sts.py index dc4fafe..0229dbd 100644 --- a/s3tests_boto3/functional/test_sts.py +++ b/s3tests_boto3/functional/test_sts.py @@ -2,7 +2,6 @@ import boto3 import botocore.session from botocore.exceptions import ClientError from botocore.exceptions import ParamValidationError -from nose.plugins.attrib import attr import pytest import isodate import email.utils @@ -19,7 +18,6 @@ import hashlib import xml.etree.ElementTree as ET import time import operator -import nose import os import string import random @@ -150,13 +148,7 @@ def get_s3_resource_using_iam_creds(): return s3_res_iam_creds -@attr(resource='get session token') -@attr(method='get') -@attr(operation='check') -@attr(assertion='s3 ops only accessible by temporary credentials') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_get_session_token(): iam_client=get_iam_client() @@ -186,13 +178,7 @@ def test_get_session_token(): finally: # clean up user policy even if create_bucket/delete_bucket fails iam_client.delete_user_policy(UserName=sts_user_id,PolicyName=policy_name) -@attr(resource='get session token') -@attr(method='get') -@attr(operation='check') -@attr(assertion='s3 ops denied by permanent credentials') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_get_session_token_permanent_creds_denied(): s3bucket_error=None @@ -225,13 +211,7 @@ def test_get_session_token_permanent_creds_denied(): assert s3bucket_error == 'AccessDenied' iam_client.delete_user_policy(UserName=sts_user_id,PolicyName=policy_name) -@attr(resource='assume role') -@attr(method='get') -@attr(operation='check') -@attr(assertion='role policy allows all s3 ops') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_allow(): iam_client=get_iam_client() @@ -264,13 +244,7 @@ def test_assume_role_allow(): bkt = s3_client.delete_bucket(Bucket=bucket_name) assert bkt['ResponseMetadata']['HTTPStatusCode'] == 204 -@attr(resource='assume role') -@attr(method='get') -@attr(operation='check') -@attr(assertion='role policy denies all s3 ops') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_deny(): s3bucket_error=None @@ -305,13 +279,7 @@ def test_assume_role_deny(): s3bucket_error = e.response.get("Error", {}).get("Code") assert s3bucket_error == 'AccessDenied' -@attr(resource='assume role') -@attr(method='get') -@attr(operation='check') -@attr(assertion='creds expire so all s3 ops fails') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_creds_expiry(): iam_client=get_iam_client() @@ -346,13 +314,7 @@ def test_assume_role_creds_expiry(): s3bucket_error = e.response.get("Error", {}).get("Code") assert s3bucket_error == 'AccessDenied' -@attr(resource='assume role') -@attr(method='head') -@attr(operation='check') -@attr(assertion='HEAD fails with 403 when role policy denies s3:ListBucket') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_deny_head_nonexistent(): # create a bucket with the normal s3 client @@ -390,13 +352,7 @@ def test_assume_role_deny_head_nonexistent(): status = e.response['ResponseMetadata']['HTTPStatusCode'] assert status == 403 -@attr(resource='assume role') -@attr(method='head') -@attr(operation='check') -@attr(assertion='HEAD fails with 404 when role policy allows s3:ListBucket') -@attr('test_of_sts') @pytest.mark.test_of_sts -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_allow_head_nonexistent(): # create a bucket with the normal s3 client @@ -435,15 +391,8 @@ def test_assume_role_allow_head_nonexistent(): assert status == 404 -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role through web token') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('token_claims_trust_policy_test') @pytest.mark.token_claims_trust_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity(): check_webidentity() @@ -492,11 +441,6 @@ def test_assume_role_with_web_identity(): ) ''' -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assume_role_with_web_token creds expire') -@attr('webidentity_test') @pytest.mark.webidentity_test def test_assume_role_with_web_identity_invalid_webtoken(): resp_error=None @@ -543,15 +487,8 @@ def test_assume_role_with_web_identity_invalid_webtoken(): # Session Policy Tests ####################### -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='checking session policy working for two different buckets') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_check_on_different_buckets(): check_webidentity() @@ -619,15 +556,8 @@ def test_session_policy_check_on_different_buckets(): ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking session policy working for same bucket') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_check_on_same_bucket(): check_webidentity() @@ -683,15 +613,8 @@ def test_session_policy_check_on_same_bucket(): ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='checking put_obj op denial') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_check_put_obj_denial(): check_webidentity() @@ -752,15 +675,8 @@ def test_session_policy_check_put_obj_denial(): ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='checking put_obj working by swapping policies') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_swapping_role_policy_and_session_policy(): check_webidentity() @@ -816,15 +732,8 @@ def test_swapping_role_policy_and_session_policy(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking put_obj working by setting different permissions to role and session policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_check_different_op_permissions(): check_webidentity() @@ -885,15 +794,8 @@ def test_session_policy_check_different_op_permissions(): ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking op behaviour with deny effect') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_check_with_deny_effect(): check_webidentity() @@ -953,15 +855,8 @@ def test_session_policy_check_with_deny_effect(): ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking put_obj working with deny and allow on same op') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_check_with_deny_on_same_op(): check_webidentity() @@ -1021,15 +916,8 @@ def test_session_policy_check_with_deny_on_same_op(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking op when bucket policy has role arn') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_bucket_policy_role_arn(): check_webidentity() @@ -1104,15 +992,8 @@ def test_session_policy_bucket_policy_role_arn(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='checking op when bucket policy has session arn') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_bucket_policy_session_arn(): check_webidentity() @@ -1185,15 +1066,8 @@ def test_session_policy_bucket_policy_session_arn(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking copy object op with role, session and bucket policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_copy_object(): check_webidentity() @@ -1273,15 +1147,8 @@ def test_session_policy_copy_object(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking op is denied when no role policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_no_bucket_role_policy(): check_webidentity() @@ -1332,15 +1199,8 @@ def test_session_policy_no_bucket_role_policy(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='checking op is denied when resource policy denies') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('session_policy') @pytest.mark.session_policy -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_session_policy_bucket_policy_deny(): check_webidentity() @@ -1413,15 +1273,8 @@ def test_session_policy_bucket_policy_deny(): OpenIDConnectProviderArn=oidc_arn ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token using sub in trust policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('token_claims_trust_policy_test') @pytest.mark.token_claims_trust_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_sub(): check_webidentity() @@ -1469,15 +1322,8 @@ def test_assume_role_with_web_identity_with_sub(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token using azp in trust policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('token_claims_trust_policy_test') @pytest.mark.token_claims_trust_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_azp(): check_webidentity() @@ -1525,17 +1371,9 @@ def test_assume_role_with_web_identity_with_azp(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token using aws:RequestTag in trust policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_request_tag_trust_policy_test') @pytest.mark.token_request_tag_trust_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_request_tag(): check_webidentity() @@ -1582,17 +1420,9 @@ def test_assume_role_with_web_identity_with_request_tag(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token with aws:PrincipalTag in role policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_principal_tag_role_policy_test') @pytest.mark.token_principal_tag_role_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_with_principal_tag(): check_webidentity() @@ -1639,17 +1469,9 @@ def test_assume_role_with_web_identity_with_principal_tag(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token with aws:PrincipalTag in role policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_principal_tag_role_policy_test') @pytest.mark.token_principal_tag_role_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_for_all_values(): check_webidentity() @@ -1696,17 +1518,9 @@ def test_assume_role_with_web_identity_for_all_values(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token with aws:PrincipalTag in role policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_principal_tag_role_policy_test') @pytest.mark.token_principal_tag_role_policy_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_for_all_values_deny(): check_webidentity() @@ -1755,17 +1569,9 @@ def test_assume_role_with_web_identity_for_all_values_deny(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token with aws:TagKeys in trust policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_tag_keys_test') @pytest.mark.token_tag_keys_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_tag_keys_trust_policy(): check_webidentity() @@ -1812,17 +1618,9 @@ def test_assume_role_with_web_identity_tag_keys_trust_policy(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='get') -@attr(operation='check') -@attr(assertion='assuming role using web token with aws:TagKeys in role permission policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_tag_keys_test') @pytest.mark.token_tag_keys_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_tag_keys_role_policy(): check_webidentity() @@ -1869,17 +1667,9 @@ def test_assume_role_with_web_identity_tag_keys_role_policy(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='assuming role using web token with s3:ResourceTag in role permission policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_resource_tags_test') @pytest.mark.token_resource_tags_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag(): check_webidentity() @@ -1936,17 +1726,9 @@ def test_assume_role_with_web_identity_resource_tag(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='assuming role using web token with s3:ResourceTag with missing tags on bucket') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_resource_tags_test') @pytest.mark.token_resource_tags_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag_deny(): check_webidentity() @@ -2003,17 +1785,9 @@ def test_assume_role_with_web_identity_resource_tag_deny(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='assuming role using web token with s3:ResourceTag with wrong resource tag in policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_resource_tags_test') @pytest.mark.token_resource_tags_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_wrong_resource_tag_deny(): check_webidentity() @@ -2073,17 +1847,9 @@ def test_assume_role_with_web_identity_wrong_resource_tag_deny(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='assuming role using web token with s3:ResourceTag matching aws:PrincipalTag in role permission policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_resource_tags_test') @pytest.mark.token_resource_tags_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag_princ_tag(): check_webidentity() @@ -2145,17 +1911,9 @@ def test_assume_role_with_web_identity_resource_tag_princ_tag(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='assuming role using web token with s3:ResourceTag used to test copy object') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_resource_tags_test') @pytest.mark.token_resource_tags_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_resource_tag_copy_obj(): check_webidentity() @@ -2244,17 +2002,9 @@ def test_assume_role_with_web_identity_resource_tag_copy_obj(): OpenIDConnectProviderArn=oidc_response["OpenIDConnectProviderArn"] ) -@attr(resource='assume role with web identity') -@attr(method='put') -@attr(operation='check') -@attr(assertion='assuming role using web token with iam:ResourceTag in role trust policy') -@attr('webidentity_test') @pytest.mark.webidentity_test -@attr('abac_test') @pytest.mark.abac_test -@attr('token_role_tags_test') @pytest.mark.token_role_tags_test -@attr('fails_on_dbstore') @pytest.mark.fails_on_dbstore def test_assume_role_with_web_identity_role_resource_tag(): check_webidentity() From 494379c2ff93dcf72e5c5638522073ad9d88389a Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Sat, 21 Jan 2023 14:25:30 -0500 Subject: [PATCH 09/12] remove nose dependency from requirements.txt Signed-off-by: Casey Bodley --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 20ffec8..ac1d18f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ PyYAML -nose >=1.0.0 boto >=2.6.0 boto3 >=1.0.0 # botocore-1.28 broke v2 signatures, see https://tracker.ceph.com/issues/58059 From d13ed28a5c1a4e4a694eb6950a7be687050d385c Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Sat, 21 Jan 2023 19:48:29 -0500 Subject: [PATCH 10/12] cleanup duplicate lines Signed-off-by: Casey Bodley --- s3tests_boto3/functional/test_s3.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/s3tests_boto3/functional/test_s3.py b/s3tests_boto3/functional/test_s3.py index 4b89623..5b77e2f 100644 --- a/s3tests_boto3/functional/test_s3.py +++ b/s3tests_boto3/functional/test_s3.py @@ -1807,7 +1807,6 @@ def test_object_set_get_unicode_metadata(): client.put_object(Bucket=bucket_name, Key='foo', Body='bar') response = client.get_object(Bucket=bucket_name, Key='foo') - got = response['Metadata']['meta1'].decode('utf-8') got = response['Metadata']['meta1'] print(got) print(u"Hello World\xe9") @@ -12055,7 +12054,6 @@ def test_get_bucket_policy_status(): def test_get_public_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() - client = get_client() client.put_bucket_acl(Bucket=bucket_name, ACL='public-read') resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == True @@ -12063,7 +12061,6 @@ def test_get_public_acl_bucket_policy_status(): def test_get_authpublic_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() - client = get_client() client.put_bucket_acl(Bucket=bucket_name, ACL='authenticated-read') resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == True @@ -12072,7 +12069,6 @@ def test_get_authpublic_acl_bucket_policy_status(): def test_get_publicpolicy_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() - client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == False @@ -12101,7 +12097,6 @@ def test_get_publicpolicy_acl_bucket_policy_status(): def test_get_nonpublicpolicy_acl_bucket_policy_status(): bucket_name = get_new_bucket() client = get_client() - client = get_client() resp = client.get_bucket_policy_status(Bucket=bucket_name) assert resp['PolicyStatus']['IsPublic'] == False From 5e9f6e5ffb6affcd5ecdaf5e6de3de9489488e00 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Sun, 22 Jan 2023 11:50:40 -0500 Subject: [PATCH 11/12] remove boostrap Signed-off-by: Casey Bodley --- bootstrap | 76 ------------------------------------------------------- 1 file changed, 76 deletions(-) delete mode 100755 bootstrap diff --git a/bootstrap b/bootstrap deleted file mode 100755 index 0bba312..0000000 --- a/bootstrap +++ /dev/null @@ -1,76 +0,0 @@ -#!/bin/bash -set -e - -virtualenv="virtualenv" -declare -a packages -source /etc/os-release - -case "$ID" in - debian|ubuntu|devuan) - packages=(debianutils python3-pip python3-virtualenv python3-dev libevent-dev libffi-dev libxml2-dev libxslt-dev zlib1g-dev) - for package in ${packages[@]}; do - if [ "$(dpkg --status -- $package 2>/dev/null|sed -n 's/^Status: //p')" != "install ok installed" ]; then - # add a space after old values - missing="${missing:+$missing }$package" - fi - done - - if [ -n "$missing" ]; then - echo "$0: missing required DEB packages. Installing via sudo." 1>&2 - sudo apt-get -y install $missing - fi - ;; - centos|fedora|rhel|rocky|ol|virtuozzo) - - packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel) - for package in ${packages[@]}; do - # When the package is python36-devel we change it to python3-devel on Fedora - if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then - package=python36 - fi - if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then - missing="${missing:+$missing }$package" - fi - done - - if [ -n "$missing" ]; then - echo "$0: Missing required RPM packages: ${missing}." 1>&2 - sudo yum -y install $missing - fi - ;; - opensuse*|suse|sles) - - packages=(which python3-virtualenv python3-devel libev-devel libffi-devel libxml2-devel libxslt-devel zlib-devel) - for package in ${packages[@]}; do - if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then - missing="${missing:+$missing }$package" - fi - if [ -n "$missing" ]; then - echo "$0: Missing required RPM packages: ${missing}." 1>&2 - sudo zypper --non-interactive install --no-recommends $missing - fi - done - - ;; - *) - echo "Bootstrap script does not support this distro yet, consider adding the packages" - exit 1 -esac - - -# s3-tests only works on python 3.6 not newer versions of python3 -${virtualenv} --python=$(which python3.6) virtualenv - -# avoid pip bugs -./virtualenv/bin/pip3 install --upgrade pip - -# latest setuptools supporting python 2.7 -./virtualenv/bin/pip install setuptools==44.1.0 - -./virtualenv/bin/pip3 install -r requirements.txt - -# forbid setuptools from using the network because it'll try to use -# easy_install, and we really wanted pip; next line will fail if pip -# requirements.txt does not match setup.py requirements -- sucky but -# good enough for now -./virtualenv/bin/python3 setup.py develop From 7993dd02a53c83e5831fc62d954621f7aca92bf5 Mon Sep 17 00:00:00 2001 From: Casey Bodley Date: Wed, 25 Jan 2023 17:04:04 -0500 Subject: [PATCH 12/12] test_headers: add custom marks for auth_common/aws2/aws4 Signed-off-by: Casey Bodley --- pytest.ini | 3 + s3tests/functional/test_headers.py | 117 +++++++++++------------ s3tests_boto3/functional/test_headers.py | 103 ++++++++++---------- 3 files changed, 106 insertions(+), 117 deletions(-) diff --git a/pytest.ini b/pytest.ini index f4f638e..0e3bcba 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,6 +2,9 @@ markers = abac_test appendobject + auth_aws2 + auth_aws4 + auth_common bucket_policy bucket_encryption cloud_transition diff --git a/s3tests/functional/test_headers.py b/s3tests/functional/test_headers.py index 85171a6..b3eb576 100644 --- a/s3tests/functional/test_headers.py +++ b/s3tests/functional/test_headers.py @@ -168,18 +168,11 @@ def _setup_bad_object(headers=None, remove=None): _add_custom_headers(headers=headers, remove=remove) return bucket.new_key('foo') -def tag(*tags): - def wrap(func): - for tag in tags: - setattr(func, tag, True) - return func - return wrap - # # common tests # -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_object_create_bad_contentlength_none(): key = _setup_bad_object(remove=('Content-Length',)) @@ -190,7 +183,7 @@ def test_object_create_bad_contentlength_none(): assert e.error_code == 'MissingContentLength' -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_above(): content = 'bar' @@ -209,7 +202,7 @@ def test_object_create_bad_contentlength_mismatch_above(): assert e.error_code == 'RequestTimeout' -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_empty(): key = _setup_bad_object({'Authorization': ''}) @@ -219,14 +212,14 @@ def test_object_create_bad_authorization_empty(): assert e.reason == 'Forbidden' assert e.error_code == 'AccessDenied' -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_object_create_date_and_amz_date(): date = formatdate(usegmt=True) key = _setup_bad_object({'Date': date, 'X-Amz-Date': date}) key.set_contents_from_string('bar') -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_object_create_amz_date_and_no_date(): date = formatdate(usegmt=True) @@ -235,7 +228,7 @@ def test_object_create_amz_date_and_no_date(): # the teardown is really messed up here. check it out -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_none(): key = _setup_bad_object(remove=('Authorization',)) @@ -246,14 +239,14 @@ def test_object_create_bad_authorization_none(): assert e.error_code == 'AccessDenied' -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_bucket_create_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) get_new_bucket() -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_object_acl_create_contentlength_none(): bucket = get_new_bucket() @@ -277,7 +270,7 @@ def _create_new_connection(): ) return TargetConnection(targets.main.default.conf, conn) -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_empty(): conn = _create_new_connection() @@ -287,14 +280,14 @@ def test_bucket_create_bad_contentlength_empty(): assert e.reason.lower() == 'bad request' # some proxies vary the case -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_bucket_create_bad_contentlength_none(): _add_custom_headers(remove=('Content-Length',)) bucket = get_new_bucket() -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_bucket_create_bad_authorization_empty(): _add_custom_headers({'Authorization': ''}) @@ -305,7 +298,7 @@ def test_bucket_create_bad_authorization_empty(): # the teardown is really messed up here. check it out -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_dbstore def test_bucket_create_bad_authorization_none(): _add_custom_headers(remove=('Authorization',)) @@ -318,7 +311,7 @@ def test_bucket_create_bad_authorization_none(): # AWS2 specific tests # -@tag('auth_aws2') +@pytest.mark.auth_aws2 @pytest.mark.fails_on_dbstore def test_object_create_bad_contentlength_mismatch_below_aws2(): check_aws2_support() @@ -331,7 +324,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): assert e.error_code == 'BadDigest' -@tag('auth_aws2') +@pytest.mark.auth_aws2 @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_incorrect_aws2(): check_aws2_support() @@ -342,7 +335,7 @@ def test_object_create_bad_authorization_incorrect_aws2(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch', 'InvalidAccessKeyId') -@tag('auth_aws2') +@pytest.mark.auth_aws2 @pytest.mark.fails_on_dbstore def test_object_create_bad_authorization_invalid_aws2(): check_aws2_support() @@ -352,7 +345,7 @@ def test_object_create_bad_authorization_invalid_aws2(): assert e.reason.lower() == 'bad request' # some proxies vary the case assert e.error_code == 'InvalidArgument' -@tag('auth_aws2') +@pytest.mark.auth_aws2 @pytest.mark.fails_on_dbstore def test_object_create_bad_date_none_aws2(): check_aws2_support() @@ -363,7 +356,7 @@ def test_object_create_bad_date_none_aws2(): assert e.error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_authorization_invalid_aws2(): check_aws2_support() _add_custom_headers({'Authorization': 'AWS HAHAHA'}) @@ -372,7 +365,7 @@ def test_bucket_create_bad_authorization_invalid_aws2(): assert e.reason.lower() == 'bad request' # some proxies vary the case assert e.error_code == 'InvalidArgument' -@tag('auth_aws2') +@pytest.mark.auth_aws2 @pytest.mark.fails_on_dbstore def test_bucket_create_bad_date_none_aws2(): check_aws2_support() @@ -395,7 +388,7 @@ def check_aws2_support(): pytest.skip('sigv2 tests disabled by S3_USE_SIGV4') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_md5_invalid_garbage_aws4(): check_aws4_support() key = _setup_bad_object({'Content-MD5':'AWS4 HAHAHA'}) @@ -406,7 +399,7 @@ def test_object_create_bad_md5_invalid_garbage_aws4(): assert e.error_code == 'InvalidDigest' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_contentlength_mismatch_below_aws4(): check_aws4_support() content = 'bar' @@ -419,7 +412,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws4(): assert e.error_code == 'XAmzContentSHA256Mismatch' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_authorization_incorrect_aws4(): check_aws4_support() key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=AKIAIGR7ZNNBHC5BKSUB/20150930/us-east-1/s3/aws4_request,SignedHeaders=host;user-agent,Signature=FWeDfwojDSdS2Ztmpfeubhd9isU='}) @@ -430,7 +423,7 @@ def test_object_create_bad_authorization_incorrect_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch', 'InvalidAccessKeyId') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_authorization_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'Authorization': 'AWS4-HMAC-SHA256 Credential=HAHAHA'}) @@ -441,7 +434,7 @@ def test_object_create_bad_authorization_invalid_aws4(): assert e.error_code in ('AuthorizationHeaderMalformed', 'InvalidArgument') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_ua_empty_aws4(): check_aws4_support() key = _setup_bad_object({'User-Agent': ''}) @@ -452,7 +445,7 @@ def test_object_create_bad_ua_empty_aws4(): assert e.error_code == 'SignatureDoesNotMatch' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_ua_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('User-Agent',)) @@ -463,14 +456,14 @@ def test_object_create_bad_ua_none_aws4(): assert e.error_code == 'SignatureDoesNotMatch' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Bad Date'}) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_invalid_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': 'Bad Date'}) @@ -481,14 +474,14 @@ def test_object_create_bad_amz_date_invalid_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_empty_aws4(): check_aws4_support() key = _setup_bad_object({'Date': ''}) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_empty_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': ''}) @@ -499,14 +492,14 @@ def test_object_create_bad_amz_date_empty_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('Date',)) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_none_aws4(): check_aws4_support() key = _setup_bad_object(remove=('X-Amz-Date',)) @@ -517,14 +510,14 @@ def test_object_create_bad_amz_date_none_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_before_today_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'}) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_before_today_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '20100707T215304Z'}) @@ -535,14 +528,14 @@ def test_object_create_bad_amz_date_before_today_aws4(): assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_after_today_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'}) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_after_today_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '20300707T215304Z'}) @@ -553,14 +546,14 @@ def test_object_create_bad_amz_date_after_today_aws4(): assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_before_epoch_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'}) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_before_epoch_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '19500707T215304Z'}) @@ -571,14 +564,14 @@ def test_object_create_bad_amz_date_before_epoch_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_date_after_end_aws4(): check_aws4_support() key = _setup_bad_object({'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}) key.set_contents_from_string('bar') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_bad_amz_date_after_end_aws4(): check_aws4_support() key = _setup_bad_object({'X-Amz-Date': '99990707T215304Z'}) @@ -589,7 +582,7 @@ def test_object_create_bad_amz_date_after_end_aws4(): assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_missing_signed_custom_header_aws4(): check_aws4_support() method='PUT' @@ -615,7 +608,7 @@ def test_object_create_missing_signed_custom_header_aws4(): assert res.reason == 'Forbidden' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_object_create_missing_signed_header_aws4(): check_aws4_support() method='PUT' @@ -642,7 +635,7 @@ def test_object_create_missing_signed_header_aws4(): assert res.reason == 'Forbidden' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_authorization_invalid_aws4(): check_aws4_support() _add_custom_headers({'Authorization': 'AWS4 HAHAHA'}) @@ -653,7 +646,7 @@ def test_bucket_create_bad_authorization_invalid_aws4(): assert e.error_code == 'InvalidArgument' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_ua_empty_aws4(): check_aws4_support() _add_custom_headers({'User-Agent': ''}) @@ -663,7 +656,7 @@ def test_bucket_create_bad_ua_empty_aws4(): assert e.reason == 'Forbidden' assert e.error_code == 'SignatureDoesNotMatch' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_ua_none_aws4(): check_aws4_support() _add_custom_headers(remove=('User-Agent',)) @@ -674,14 +667,14 @@ def test_bucket_create_bad_ua_none_aws4(): assert e.error_code == 'SignatureDoesNotMatch' -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_date_invalid_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Bad Date'}) get_new_bucket() -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_amz_date_invalid_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': 'Bad Date'}) @@ -692,14 +685,14 @@ def test_bucket_create_bad_amz_date_invalid_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_date_empty_aws4(): check_aws4_support() _add_custom_headers({'Date': ''}) get_new_bucket() -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_amz_date_empty_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': ''}) @@ -709,14 +702,14 @@ def test_bucket_create_bad_amz_date_empty_aws4(): assert e.reason == 'Forbidden' assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_date_none_aws4(): check_aws4_support() _add_custom_headers(remove=('Date',)) get_new_bucket() -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_amz_date_none_aws4(): check_aws4_support() _add_custom_headers(remove=('X-Amz-Date',)) @@ -727,14 +720,14 @@ def test_bucket_create_bad_amz_date_none_aws4(): assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_date_before_today_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 2010 21:53:04 GMT'}) get_new_bucket() -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_amz_date_before_today_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '20100707T215304Z'}) @@ -745,14 +738,14 @@ def test_bucket_create_bad_amz_date_before_today_aws4(): assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_date_after_today_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 2030 21:53:04 GMT'}) get_new_bucket() -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_amz_date_after_today_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '20300707T215304Z'}) @@ -763,14 +756,14 @@ def test_bucket_create_bad_amz_date_after_today_aws4(): assert e.error_code in ('RequestTimeTooSkewed', 'SignatureDoesNotMatch') -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_date_before_epoch_aws4(): check_aws4_support() _add_custom_headers({'Date': 'Tue, 07 Jul 1950 21:53:04 GMT'}) get_new_bucket() -@tag('auth_aws4') +@pytest.mark.auth_aws4 def test_bucket_create_bad_amz_date_before_epoch_aws4(): check_aws4_support() _add_custom_headers({'X-Amz-Date': '19500707T215304Z'}) diff --git a/s3tests_boto3/functional/test_headers.py b/s3tests_boto3/functional/test_headers.py index 4573d72..66cabe5 100644 --- a/s3tests_boto3/functional/test_headers.py +++ b/s3tests_boto3/functional/test_headers.py @@ -149,63 +149,56 @@ def _remove_header_create_bad_bucket(remove, client=None): return e -def tag(*tags): - def wrap(func): - for tag in tags: - setattr(func, tag, True) - return func - return wrap - # # common tests # -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_md5_invalid_short(): e = _add_header_create_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='}) status, error_code = _get_status_and_error_code(e.response) assert status == 400 assert error_code == 'InvalidDigest' -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_md5_bad(): e = _add_header_create_bad_object({'Content-MD5':'rL0Y20xC+Fzt72VPzMSk2A=='}) status, error_code = _get_status_and_error_code(e.response) assert status == 400 assert error_code == 'BadDigest' -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_md5_empty(): e = _add_header_create_bad_object({'Content-MD5':''}) status, error_code = _get_status_and_error_code(e.response) assert status == 400 assert error_code == 'InvalidDigest' -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_md5_none(): bucket_name, key_name = _remove_header_create_object('Content-MD5') client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_expect_mismatch(): bucket_name, key_name = _add_header_create_object({'Expect': 200}) client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_expect_empty(): bucket_name, key_name = _add_header_create_object({'Expect': ''}) client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_expect_none(): bucket_name, key_name = _remove_header_create_object('Expect') client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_empty(): @@ -213,7 +206,7 @@ def test_object_create_bad_contentlength_empty(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_mod_proxy_fcgi def test_object_create_bad_contentlength_negative(): client = get_client() @@ -223,7 +216,7 @@ def test_object_create_bad_contentlength_negative(): status = _get_status(e.response) assert status == 400 -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_none(): @@ -233,20 +226,20 @@ def test_object_create_bad_contentlength_none(): assert status == 411 assert error_code == 'MissingContentLength' -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_contenttype_invalid(): bucket_name, key_name = _add_header_create_object({'Content-Type': 'text/plain'}) client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_contenttype_empty(): client = get_client() key_name = 'foo' bucket_name = get_new_bucket() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar', ContentType='') -@tag('auth_common') +@pytest.mark.auth_common def test_object_create_bad_contenttype_none(): bucket_name = get_new_bucket() key_name = 'foo' @@ -255,7 +248,7 @@ def test_object_create_bad_contenttype_none(): client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_empty(): @@ -263,7 +256,7 @@ def test_object_create_bad_authorization_empty(): status, error_code = _get_status_and_error_code(e.response) assert status == 403 -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before @pytest.mark.fails_on_rgw def test_object_create_date_and_amz_date(): @@ -272,7 +265,7 @@ def test_object_create_date_and_amz_date(): client = get_client() client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to pass both the 'Date' and 'X-Amz-Date' header during signing and not 'X-Amz-Date' before @pytest.mark.fails_on_rgw def test_object_create_amz_date_and_no_date(): @@ -282,7 +275,7 @@ def test_object_create_amz_date_and_no_date(): client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') # the teardown is really messed up here. check it out -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the authorization header @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_none(): @@ -290,14 +283,14 @@ def test_object_create_bad_authorization_none(): status, error_code = _get_status_and_error_code(e.response) assert status == 403 -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @pytest.mark.fails_on_rgw def test_bucket_create_contentlength_none(): remove = 'Content-Length' _remove_header_create_bucket(remove) -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @pytest.mark.fails_on_rgw def test_object_acl_create_contentlength_none(): @@ -313,7 +306,7 @@ def test_object_acl_create_contentlength_none(): client.meta.events.register('before-call.s3.PutObjectAcl', remove_header) client.put_object_acl(Bucket=bucket_name, Key='foo', ACL='public-read') -@tag('auth_common') +@pytest.mark.auth_common def test_bucket_put_bad_canned_acl(): bucket_name = get_new_bucket() client = get_client() @@ -326,7 +319,7 @@ def test_bucket_put_bad_canned_acl(): status = _get_status(e.response) assert status == 400 -@tag('auth_common') +@pytest.mark.auth_common def test_bucket_create_bad_expect_mismatch(): bucket_name = get_new_bucket_name() client = get_client() @@ -336,12 +329,12 @@ def test_bucket_create_bad_expect_mismatch(): client.meta.events.register('before-call.s3.CreateBucket', add_headers) client.create_bucket(Bucket=bucket_name) -@tag('auth_common') +@pytest.mark.auth_common def test_bucket_create_bad_expect_empty(): headers = {'Expect': ''} _add_header_create_bucket(headers) -@tag('auth_common') +@pytest.mark.auth_common # TODO: The request isn't even making it to the RGW past the frontend # This test had 'fails_on_rgw' before the move to boto3 @pytest.mark.fails_on_rgw @@ -351,7 +344,7 @@ def test_bucket_create_bad_contentlength_empty(): status, error_code = _get_status_and_error_code(e.response) assert status == 400 -@tag('auth_common') +@pytest.mark.auth_common @pytest.mark.fails_on_mod_proxy_fcgi def test_bucket_create_bad_contentlength_negative(): headers = {'Content-Length': '-1'} @@ -359,14 +352,14 @@ def test_bucket_create_bad_contentlength_negative(): status = _get_status(e.response) assert status == 400 -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to remove the content-length header @pytest.mark.fails_on_rgw def test_bucket_create_bad_contentlength_none(): remove = 'Content-Length' _remove_header_create_bucket(remove) -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_empty(): @@ -376,7 +369,7 @@ def test_bucket_create_bad_authorization_empty(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_common') +@pytest.mark.auth_common # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_none(): @@ -385,7 +378,7 @@ def test_bucket_create_bad_authorization_none(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_md5_invalid_garbage_aws2(): v2_client = get_v2_client() headers = {'Content-MD5': 'AWS HAHAHA'} @@ -394,7 +387,7 @@ def test_object_create_bad_md5_invalid_garbage_aws2(): assert status == 400 assert error_code == 'InvalidDigest' -@tag('auth_aws2') +@pytest.mark.auth_aws2 # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the Content-Length header @pytest.mark.fails_on_rgw def test_object_create_bad_contentlength_mismatch_below_aws2(): @@ -407,7 +400,7 @@ def test_object_create_bad_contentlength_mismatch_below_aws2(): assert status == 400 assert error_code == 'BadDigest' -@tag('auth_aws2') +@pytest.mark.auth_aws2 # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_incorrect_aws2(): @@ -418,7 +411,7 @@ def test_object_create_bad_authorization_incorrect_aws2(): assert status == 403 assert error_code == 'InvalidDigest' -@tag('auth_aws2') +@pytest.mark.auth_aws2 # TODO: remove 'fails_on_rgw' and once we have learned how to manipulate the authorization header @pytest.mark.fails_on_rgw def test_object_create_bad_authorization_invalid_aws2(): @@ -429,21 +422,21 @@ def test_object_create_bad_authorization_invalid_aws2(): assert status == 400 assert error_code == 'InvalidArgument' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_ua_empty_aws2(): v2_client = get_v2_client() headers = {'User-Agent': ''} bucket_name, key_name = _add_header_create_object(headers, v2_client) v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_ua_none_aws2(): v2_client = get_v2_client() remove = 'User-Agent' bucket_name, key_name = _remove_header_create_object(remove, v2_client) v2_client.put_object(Bucket=bucket_name, Key=key_name, Body='bar') -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_date_invalid_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Bad Date'} @@ -452,7 +445,7 @@ def test_object_create_bad_date_invalid_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_date_empty_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': ''} @@ -461,7 +454,7 @@ def test_object_create_bad_date_empty_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header @pytest.mark.fails_on_rgw def test_object_create_bad_date_none_aws2(): @@ -472,7 +465,7 @@ def test_object_create_bad_date_none_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_date_before_today_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'} @@ -481,7 +474,7 @@ def test_object_create_bad_date_before_today_aws2(): assert status == 403 assert error_code == 'RequestTimeTooSkewed' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_date_before_epoch_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'} @@ -490,7 +483,7 @@ def test_object_create_bad_date_before_epoch_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_object_create_bad_date_after_end_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 9999 21:53:04 GMT'} @@ -499,7 +492,7 @@ def test_object_create_bad_date_after_end_aws2(): assert status == 403 assert error_code == 'RequestTimeTooSkewed' -@tag('auth_aws2') +@pytest.mark.auth_aws2 # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header @pytest.mark.fails_on_rgw def test_bucket_create_bad_authorization_invalid_aws2(): @@ -510,19 +503,19 @@ def test_bucket_create_bad_authorization_invalid_aws2(): assert status == 400 assert error_code == 'InvalidArgument' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_ua_empty_aws2(): v2_client = get_v2_client() headers = {'User-Agent': ''} _add_header_create_bucket(headers, v2_client) -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_ua_none_aws2(): v2_client = get_v2_client() remove = 'User-Agent' _remove_header_create_bucket(remove, v2_client) -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_date_invalid_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Bad Date'} @@ -531,7 +524,7 @@ def test_bucket_create_bad_date_invalid_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_date_empty_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': ''} @@ -540,7 +533,7 @@ def test_bucket_create_bad_date_empty_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 # TODO: remove 'fails_on_rgw' and once we have learned how to remove the date header @pytest.mark.fails_on_rgw def test_bucket_create_bad_date_none_aws2(): @@ -551,7 +544,7 @@ def test_bucket_create_bad_date_none_aws2(): assert status == 403 assert error_code == 'AccessDenied' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_date_before_today_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 2010 21:53:04 GMT'} @@ -560,7 +553,7 @@ def test_bucket_create_bad_date_before_today_aws2(): assert status == 403 assert error_code == 'RequestTimeTooSkewed' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_date_after_today_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 2030 21:53:04 GMT'} @@ -569,7 +562,7 @@ def test_bucket_create_bad_date_after_today_aws2(): assert status == 403 assert error_code == 'RequestTimeTooSkewed' -@tag('auth_aws2') +@pytest.mark.auth_aws2 def test_bucket_create_bad_date_before_epoch_aws2(): v2_client = get_v2_client() headers = {'x-amz-date': 'Tue, 07 Jul 1950 21:53:04 GMT'}