From 3cf86161d04bc2d63e5c5dd97a6e3e2ec728ef7b Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Mon, 15 Jun 2015 05:15:28 +0000 Subject: [PATCH 01/27] Use py3 print. Signed-off-by: Robin H. Johnson --- s3tests/functional/__init__.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/s3tests/functional/__init__.py b/s3tests/functional/__init__.py index a5917e1..fe5fa45 100644 --- a/s3tests/functional/__init__.py +++ b/s3tests/functional/__init__.py @@ -1,3 +1,5 @@ +from __future__ import print_function +import sys import ConfigParser import boto.exception import boto.s3.connection @@ -55,15 +57,15 @@ def choose_bucket_prefix(template, max_len=30): def nuke_prefixed_buckets_on_conn(prefix, name, conn): - print 'Cleaning buckets from connection {name} prefix {prefix!r}.'.format( + print('Cleaning buckets from connection {name} prefix {prefix!r}.'.format( name=name, prefix=prefix, - ) + )) for bucket in conn.get_all_buckets(): - print 'prefix=',prefix + print('prefix=',prefix) if bucket.name.startswith(prefix): - print 'Cleaning bucket {bucket}'.format(bucket=bucket) + print('Cleaning bucket {bucket}'.format(bucket=bucket)) success = False for i in xrange(2): try: @@ -81,17 +83,17 @@ def nuke_prefixed_buckets_on_conn(prefix, name, conn): raise e keys = bucket.list(); for key in keys: - print 'Cleaning bucket {bucket} key {key}'.format( + print('Cleaning bucket {bucket} key {key}'.format( bucket=bucket, key=key, - ) + )) # key.set_canned_acl('private') bucket.delete_key(key.name, version_id = key.version_id) bucket.delete() success = True except boto.exception.S3ResponseError as e: if e.error_code != 'AccessDenied': - print 'GOT UNWANTED ERROR', e.error_code + print('GOT UNWANTED ERROR', e.error_code) raise # seems like we don't have permissions set appropriately, we'll # modify permissions and retry @@ -107,26 +109,26 @@ def nuke_prefixed_buckets(prefix): # If no regions are specified, use the simple method if targets.main.master == None: for name, conn in s3.items(): - print 'Deleting buckets on {name}'.format(name=name) + print('Deleting buckets on {name}'.format(name=name)) nuke_prefixed_buckets_on_conn(prefix, name, conn) else: # First, delete all buckets on the master connection for name, conn in s3.items(): if conn == targets.main.master.connection: - print 'Deleting buckets on {name} (master)'.format(name=name) + print('Deleting buckets on {name} (master)'.format(name=name)) nuke_prefixed_buckets_on_conn(prefix, name, conn) # Then sync to propagate deletes to secondaries region_sync_meta(targets.main, targets.main.master.connection) - print 'region-sync in nuke_prefixed_buckets' + print('region-sync in nuke_prefixed_buckets') # Now delete remaining buckets on any other connection for name, conn in s3.items(): if conn != targets.main.master.connection: - print 'Deleting buckets on {name} (non-master)'.format(name=name) + print('Deleting buckets on {name} (non-master)'.format(name=name)) nuke_prefixed_buckets_on_conn(prefix, name, conn) - print 'Done with cleanup of test buckets.' + print('Done with cleanup of test buckets.') class TargetConfig: def __init__(self, cfg, section): From a48a8983b7770f5c693a0530e399d86abbd74f15 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Mon, 15 Jun 2015 05:18:07 +0000 Subject: [PATCH 02/27] refactor out _make_request and _make_bucket_request, with a common _make_raw_request; ready for s3website testing. Signed-off-by: Robin H. Johnson --- s3tests/functional/__init__.py | 87 ++++++++++++++++++++++++++++++++++ s3tests/functional/test_s3.py | 56 +--------------------- 2 files changed, 89 insertions(+), 54 deletions(-) diff --git a/s3tests/functional/__init__.py b/s3tests/functional/__init__.py index fe5fa45..f03bcc2 100644 --- a/s3tests/functional/__init__.py +++ b/s3tests/functional/__init__.py @@ -8,6 +8,8 @@ import itertools import os import random import string +from httplib import HTTPConnection, HTTPSConnection +from urlparse import urlparse from .utils import region_sync_meta @@ -312,6 +314,10 @@ def setup(): 'user_id', 'display_name', 'email', + 's3website_domain', + 'host', + 'port', + 'is_secure', ]: try: config[name][var] = cfg.get(section, var) @@ -394,3 +400,84 @@ def get_new_bucket(target=None, name=None, headers=None): # ignore that as astronomically unlikely bucket = connection.create_bucket(name, location=target.conf.api_name, headers=headers) return bucket + +def _make_request(method, bucket, key, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True): + """ + issue a request for a specified method, on a specified , + with a specified (optional) body (encrypted per the connection), and + return the response (status, reason) + """ + if response_headers is None: + response_headers = {} + if request_headers is None: + request_headers = {} + if not path_style: + conn = bucket.connection + request_headers['Host'] = conn.calling_format.build_host(conn.server_name(), bucket.name) + + if authenticated: + url = key.generate_url(expires_in, method=method, response_headers=response_headers, headers=request_headers) + o = urlparse(url) + path = o.path + '?' + o.query + else: + if path_style: + path = '/{bucket}/{obj}'.format(bucket=key.bucket.name, obj=key.name) + else: + path = '/{obj}'.format(bucket=key.bucket.name, obj=key.name) + + return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure) + +def _make_bucket_request(method, bucket, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True): + """ + issue a request for a specified method, on a specified , + with a specified (optional) body (encrypted per the connection), and + return the response (status, reason) + """ + if response_headers is None: + response_headers = {} + if request_headers is None: + request_headers = {} + if not path_style: + conn = bucket.connection + request_headers['Host'] = conn.calling_format.build_host(conn.server_name(), bucket.name) + + if authenticated: + url = bucket.generate_url(expires_in, method=method, response_headers=response_headers, headers=request_headers) + o = urlparse(url) + path = o.path + '?' + o.query + else: + if path_style: + path = '/{bucket}'.format(bucket=bucket.name) + else: + path = '/' + + return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure) + +def _make_raw_request(host, port, method, path, body=None, request_headers=None, secure=False): + if secure: + class_ = HTTPSConnection + else: + class_ = HTTPConnection + + if request_headers is None: + request_headers = {} + + skip_host=('Host' in request_headers) + skip_accept_encoding = False + c = class_(host, port, strict=True) + + # We do the request manually, so we can muck with headers + #c.request(method, path, body=body, headers=request_headers) + c.connect() + c.putrequest(method, path, skip_host, skip_accept_encoding) + for k,v in request_headers.items(): + c.putheader(k,v) + c.endheaders(message_body=body) + + res = c.getresponse() + #c.close() + + print(res.status, res.reason) + return res + + diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index cdd61f8..5c71e1e 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -27,9 +27,6 @@ import re import xml.etree.ElementTree as ET -from httplib import HTTPConnection, HTTPSConnection -from urlparse import urlparse - from nose.tools import eq_ as eq from nose.plugins.attrib import attr from nose.plugins.skip import SkipTest @@ -53,6 +50,8 @@ from . import ( config, get_prefix, is_slow_backend, + _make_request, + _make_bucket_request, ) @@ -2455,57 +2454,6 @@ def _setup_bucket_request(bucket_acl=None): return bucket -def _make_request(method, bucket, key, body=None, authenticated=False, response_headers=None, expires_in=100000): - """ - issue a request for a specified method, on a specified , - with a specified (optional) body (encrypted per the connection), and - return the response (status, reason) - """ - if authenticated: - url = key.generate_url(expires_in, method=method, response_headers=response_headers) - o = urlparse(url) - path = o.path + '?' + o.query - else: - path = '/{bucket}/{obj}'.format(bucket=key.bucket.name, obj=key.name) - - if s3.main.is_secure: - class_ = HTTPSConnection - else: - class_ = HTTPConnection - - c = class_(s3.main.host, s3.main.port, strict=True) - c.request(method, path, body=body) - res = c.getresponse() - - print res.status, res.reason - return res - -def _make_bucket_request(method, bucket, body=None, authenticated=False, expires_in=100000): - """ - issue a request for a specified method, on a specified , - with a specified (optional) body (encrypted per the connection), and - return the response (status, reason) - """ - if authenticated: - url = bucket.generate_url(expires_in, method=method) - o = urlparse(url) - path = o.path + '?' + o.query - else: - path = '/{bucket}'.format(bucket=bucket.name) - - if s3.main.is_secure: - class_ = HTTPSConnection - else: - class_ = HTTPConnection - - c = class_(s3.main.host, s3.main.port, strict=True) - c.request(method, path, body=body) - res = c.getresponse() - - print res.status, res.reason - return res - - @attr(resource='object') @attr(method='get') @attr(operation='publically readable bucket') From cb1e14ae16aca2f0e1b5972c22aa3861dab3ce53 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Mon, 15 Jun 2015 05:19:13 +0000 Subject: [PATCH 03/27] website: bucket-level tests (indexdoc, errordoc) This adds tests for the new S3 Website code under development at on the wip-static-website-robbat2-master branch (see the robbat2 fork). The ErrorDoc tests with a valid uploaded ErrorDoc show that my existing code fails as expected :-). All other IndexDoc cases pass already. It would be nice to refactor some of this set of tests and extract common code, because the only variation in some of them is the ACLs on a single file. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 525 ++++++++++++++++++++++++++ 1 file changed, 525 insertions(+) create mode 100644 s3tests/functional/test_s3_website.py diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py new file mode 100644 index 0000000..74e0c81 --- /dev/null +++ b/s3tests/functional/test_s3_website.py @@ -0,0 +1,525 @@ +from __future__ import print_function +import sys +from cStringIO import StringIO +import boto.exception +import boto.s3.connection +import boto.s3.acl +import bunch +import datetime +import time +import email.utils +import isodate +import nose +import operator +import socket +import ssl +import os +import requests +import base64 +import hmac +import sha +import pytz +import json +import httplib2 +import threading +import itertools +import string +import random + +import xml.etree.ElementTree as ET + +from httplib import HTTPConnection, HTTPSConnection +from urlparse import urlparse + +from nose.tools import eq_ as eq, ok_ as ok +from nose.plugins.attrib import attr +from nose.plugins.skip import SkipTest + +from .utils import assert_raises +from .utils import generate_random +from .utils import region_sync_meta +import AnonymousAuth + +from email.header import decode_header +from ordereddict import OrderedDict + +from boto.s3.cors import CORSConfiguration + +from . import ( + get_new_bucket, + get_new_bucket_name, + s3, + config, + _make_raw_request, + choose_bucket_prefix, + ) + +WEBSITE_CONFIGS_XMLFRAG = { + 'IndexDoc': '{indexdoc}', + 'IndexDocErrorDoc': '{indexdoc}{errordoc}', + } + +def make_website_config(xml_fragment): + """ + Take the tedious stuff out of the config + """ + return '' + xml_fragment + '' + +def get_website_url(proto, bucket, path): + """ + Return the URL to a website page + """ + domain = config['main']['host'] + if('s3website_domain' in config['main']): + domain = config['main']['s3website_domain'] + elif('s3website_domain' in config['alt']): + domain = config['DEFAULT']['s3website_domain'] + return "%s://%s.%s/%s" % (proto, bucket, domain, path) + +def _test_website_prep(bucket, xml_fragment): + indexname = choose_bucket_prefix(template='index-{random}.html', max_len=32) + errorname = choose_bucket_prefix(template='error-{random}.html', max_len=32) + xml_fragment = xml_fragment.format(indexdoc=indexname, errordoc=errorname) + config_xml = make_website_config(xml_fragment) + bucket.set_website_configuration_xml(config_xml) + eq (config_xml, bucket.get_website_configuration_xml()) + return indexname, errorname + +def __website_expected_reponse_status(res, status, reason): + eq(res.status, status) + eq(res.reason, reason) + +def _website_expected_error_response(res, bucket_name, status, reason, code): + body = res.read() + print(body) + __website_expected_reponse_status(res, status, reason) + ok('
  • Code: '+code+'
  • ' in body, 'HTML should contain "Code: %s" ' % (code, )) + ok(('
  • BucketName: %s
  • ' % (bucket_name, )) in body, 'HTML should contain bucket name') + +def _website_request(bucket_name, path): + url = get_website_url('http', bucket_name, path) + print("url", url) + + o = urlparse(url) + path = o.path + '?' + o.query + request_headers={} + request_headers['Host'] = o.hostname + res = _make_raw_request(config.main.host, config.main.port, 'GET', path, request_headers=request_headers, secure=False) + for (k,v) in res.getheaders(): + print(k,v) + return res + +# ---------- Non-existant buckets via the website endpoint +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket, exposing security risk') +@attr('s3website') +@attr('fails_on_rgw') +def test_website_nonexistant_bucket_s3(): + bucket_name = get_new_bucket_name() + res = _website_request(bucket_name, '') + _website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket') + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure') +@attr('s3website') +@attr('fails_on_s3') +def test_website_nonexistant_bucket_rgw(): + bucket_name = get_new_bucket_name() + res = _website_request(bucket_name, '') + _website_expected_error_response(res, bucket_name, 403, 'Forbidden', 'AccessDenied') + +#------------- IndexDocument only, successes +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty public buckets via s3website return page for /, where page is public') +@attr('s3website') +def test_website_public_bucket_list_public_index(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.make_public() + + res = _website_request(bucket.name, '') + body = res.read() + print(body) + eq(body, indexstring, 'default content should match index.html set content') + __website_expected_reponse_status(res, 200, 'OK') + indexhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty private buckets via s3website return page for /, where page is private') +@attr('s3website') +def test_website_private_bucket_list_public_index(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.set_canned_acl('private') + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.make_public() + + res = _website_request(bucket.name, '') + __website_expected_reponse_status(res, 200, 'OK') + body = res.read() + print(body) + eq(body, indexstring, 'default content should match index.html set content') + indexhtml.delete() + bucket.delete() + + +# ---------- IndexDocument only, failures +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty private buckets via s3website return a 403 for /') +@attr('s3website') +def test_website_private_bucket_list_empty(): + bucket = get_new_bucket() + bucket.set_canned_acl('private') + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty public buckets via s3website return a 404 for /') +@attr('s3website') +def test_website_public_bucket_list_empty(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty public buckets via s3website return page for /, where page is private') +@attr('s3website') +def test_website_public_bucket_list_private_index(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + indexhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty private buckets via s3website return page for /, where page is private') +@attr('s3website') +def test_website_private_bucket_list_private_index(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.set_canned_acl('private') + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + + indexhtml.delete() + bucket.delete() + +# ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but missing +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc') +@attr('s3website') +def test_website_private_bucket_list_empty_missingerrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc') +@attr('s3website') +def test_website_public_bucket_list_empty_missingerrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.make_public() + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc') +@attr('s3website') +def test_website_public_bucket_list_private_index_missingerrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.make_public() + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + + indexhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc') +@attr('s3website') +def test_website_private_bucket_list_private_index_missingerrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.set_canned_acl('private') + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + + indexhtml.delete() + bucket.delete() + +# ---------- IndexDocument & ErrorDocument, failures due to errordoc assigned but not accessible +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc') +@attr('s3website') +def test_website_private_bucket_list_empty_blockederrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.set_canned_acl('private') + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + ok(errorstring not in body, 'error content should match error.html set content') + + errorhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc') +@attr('s3website') +def test_website_public_bucket_list_empty_blockederrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.make_public() + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') + body = res.read() + print(body) + ok(errorstring not in body, 'error content should match error.html set content') + + errorhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc') +@attr('s3website') +def test_website_public_bucket_list_private_index_blockederrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.make_public() + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + ok(errorstring not in body, 'error content should match error.html set content') + + indexhtml.delete() + errorhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc') +@attr('s3website') +def test_website_private_bucket_list_private_index_blockederrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.set_canned_acl('private') + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('private') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + ok(errorstring not in body, 'error content should match error.html set content') + + indexhtml.delete() + errorhtml.delete() + bucket.delete() + +# ---------- IndexDocument & ErrorDocument, failures with errordoc available +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc') +@attr('s3website') +def test_website_private_bucket_list_empty_gooderrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.set_canned_acl('private') + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('public-read') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + eq(body, errorstring, 'error content should match error.html set content') + + errorhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc') +@attr('s3website') +def test_website_public_bucket_list_empty_gooderrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.make_public() + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('public-read') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') + body = res.read() + print(body) + eq(body, errorstring, 'error content should match error.html set content') + + errorhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty public buckets via s3website return page for /, where page is private') +@attr('s3website') +def test_website_public_bucket_list_private_index_gooderrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.make_public() + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('public-read') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + eq(body, errorstring, 'error content should match error.html set content') + + indexhtml.delete() + errorhtml.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='non-empty private buckets via s3website return page for /, where page is private') +@attr('s3website') +def test_website_private_bucket_list_private_index_gooderrordoc(): + bucket = get_new_bucket() + indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + bucket.set_canned_acl('private') + indexhtml = bucket.new_key(indexname) + indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexhtml.set_contents_from_string(indexstring) + indexhtml.set_canned_acl('private') + errorhtml = bucket.new_key(errorname) + errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorhtml.set_contents_from_string(errorstring) + errorhtml.set_canned_acl('public-read') + + res = _website_request(bucket.name, '') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + body = res.read() + print(body) + eq(body, errorstring, 'error content should match error.html set content') + + indexhtml.delete() + errorhtml.delete() + bucket.delete() From ea691117736f9ccfba4e27fa15aaf0fe0eadaa95 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Wed, 17 Jun 2015 00:43:46 +0000 Subject: [PATCH 04/27] website: improve test structure. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 115 +++++++++++++++----------- 1 file changed, 67 insertions(+), 48 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 74e0c81..2714ec8 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -1,6 +1,7 @@ from __future__ import print_function import sys from cStringIO import StringIO +import collections import boto.exception import boto.s3.connection import boto.s3.acl @@ -55,8 +56,10 @@ from . import ( ) WEBSITE_CONFIGS_XMLFRAG = { - 'IndexDoc': '{indexdoc}', - 'IndexDocErrorDoc': '{indexdoc}{errordoc}', + 'IndexDoc': '${IndexDocument_Suffix}', + 'IndexDocErrorDoc': '${IndexDocument_Suffix}${ErrorDocument_Key}', + 'RedirectAll': '${RedirectAllRequestsTo_HostName}', + 'RedirectAll+Protocol': '${RedirectAllRequestsTo_HostName}${RedirectAllRequestsTo_Protocol}', } def make_website_config(xml_fragment): @@ -74,20 +77,35 @@ def get_website_url(proto, bucket, path): domain = config['main']['s3website_domain'] elif('s3website_domain' in config['alt']): domain = config['DEFAULT']['s3website_domain'] + path = path.lstrip('/') return "%s://%s.%s/%s" % (proto, bucket, domain, path) -def _test_website_prep(bucket, xml_fragment): - indexname = choose_bucket_prefix(template='index-{random}.html', max_len=32) - errorname = choose_bucket_prefix(template='error-{random}.html', max_len=32) - xml_fragment = xml_fragment.format(indexdoc=indexname, errordoc=errorname) +def _test_website_populate_fragment(xml_fragment, fields): + f = { + 'IndexDocument_Suffix': choose_bucket_prefix(template='index-{random}.html', max_len=32), + 'ErrorDocument_Key': choose_bucket_prefix(template='error-{random}.html', max_len=32), + 'RedirectAllRequestsTo_HostName': choose_bucket_prefix(template='{random}.{random}.com', max_len=32), + } + f.update(fields) + xml_fragment = string.Template(xml_fragment).safe_substitute(**f) + return xml_fragment, f + +def _test_website_prep(bucket, xml_template, hardcoded_fields = {}): + xml_fragment, f = _test_website_populate_fragment(xml_template, hardcoded_fields) config_xml = make_website_config(xml_fragment) + print(config_xml) bucket.set_website_configuration_xml(config_xml) eq (config_xml, bucket.get_website_configuration_xml()) - return indexname, errorname + return f def __website_expected_reponse_status(res, status, reason): - eq(res.status, status) - eq(res.reason, reason) + if not isinstance(status, collections.Container): + status = set([status]) + if not isinstance(reason, collections.Container): + reason = set([reason]) + + ok(res.status in status, 'HTTP status code mismatch') + ok(res.reason in reason, 'HTTP reason mismatch') def _website_expected_error_response(res, bucket_name, status, reason, code): body = res.read() @@ -96,7 +114,7 @@ def _website_expected_error_response(res, bucket_name, status, reason, code): ok('
  • Code: '+code+'
  • ' in body, 'HTML should contain "Code: %s" ' % (code, )) ok(('
  • BucketName: %s
  • ' % (bucket_name, )) in body, 'HTML should contain bucket name') -def _website_request(bucket_name, path): +def _website_request(bucket_name, path, method='GET'): url = get_website_url('http', bucket_name, path) print("url", url) @@ -104,7 +122,8 @@ def _website_request(bucket_name, path): path = o.path + '?' + o.query request_headers={} request_headers['Host'] = o.hostname - res = _make_raw_request(config.main.host, config.main.port, 'GET', path, request_headers=request_headers, secure=False) + print('Request: {method} {path} {headers}'.format(method=method, path=path, headers=' '.join(map(lambda t: t[0]+':'+t[1]+"\n", request_headers.items())))) + res = _make_raw_request(config.main.host, config.main.port, method, path, request_headers=request_headers, secure=False) for (k,v) in res.getheaders(): print(k,v) return res @@ -140,9 +159,9 @@ def test_website_nonexistant_bucket_rgw(): @attr('s3website') def test_website_public_bucket_list_public_index(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.make_public() @@ -150,7 +169,7 @@ def test_website_public_bucket_list_public_index(): res = _website_request(bucket.name, '') body = res.read() print(body) - eq(body, indexstring, 'default content should match index.html set content') + eq(body, indexstring) # default content should match index.html set content __website_expected_reponse_status(res, 200, 'OK') indexhtml.delete() bucket.delete() @@ -162,9 +181,9 @@ def test_website_public_bucket_list_public_index(): @attr('s3website') def test_website_private_bucket_list_public_index(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.set_canned_acl('private') - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.make_public() @@ -187,7 +206,7 @@ def test_website_private_bucket_list_public_index(): def test_website_private_bucket_list_empty(): bucket = get_new_bucket() bucket.set_canned_acl('private') - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') @@ -200,7 +219,7 @@ def test_website_private_bucket_list_empty(): @attr('s3website') def test_website_public_bucket_list_empty(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() res = _website_request(bucket.name, '') @@ -214,9 +233,9 @@ def test_website_public_bucket_list_empty(): @attr('s3website') def test_website_public_bucket_list_private_index(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') @@ -233,9 +252,9 @@ def test_website_public_bucket_list_private_index(): @attr('s3website') def test_website_private_bucket_list_private_index(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.set_canned_acl('private') - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') @@ -254,7 +273,7 @@ def test_website_private_bucket_list_private_index(): @attr('s3website') def test_website_private_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') res = _website_request(bucket.name, '') @@ -271,7 +290,7 @@ def test_website_private_bucket_list_empty_missingerrordoc(): @attr('s3website') def test_website_public_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() res = _website_request(bucket.name, '') @@ -285,9 +304,9 @@ def test_website_public_bucket_list_empty_missingerrordoc(): @attr('s3website') def test_website_public_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') @@ -305,9 +324,9 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): @attr('s3website') def test_website_private_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') @@ -326,9 +345,9 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): @attr('s3website') def test_website_private_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') @@ -349,9 +368,9 @@ def test_website_private_bucket_list_empty_blockederrordoc(): @attr('s3website') def test_website_public_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') @@ -372,13 +391,13 @@ def test_website_public_bucket_list_empty_blockederrordoc(): @attr('s3website') def test_website_public_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') @@ -400,13 +419,13 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): @attr('s3website') def test_website_private_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') @@ -429,9 +448,9 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): @attr('s3website') def test_website_private_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') @@ -452,9 +471,9 @@ def test_website_private_bucket_list_empty_gooderrordoc(): @attr('s3website') def test_website_public_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') @@ -475,13 +494,13 @@ def test_website_public_bucket_list_empty_gooderrordoc(): @attr('s3website') def test_website_public_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') @@ -503,13 +522,13 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): @attr('s3website') def test_website_private_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() - indexname, errorname = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') - indexhtml = bucket.new_key(indexname) + indexhtml = bucket.new_key(f['IndexDocument_Suffix']) indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') - errorhtml = bucket.new_key(errorname) + errorhtml = bucket.new_key(f['ErrorDocument_Key']) errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') From 4c07c555eed78af692e70dc28df9b1305ba7bad3 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Wed, 17 Jun 2015 00:44:06 +0000 Subject: [PATCH 05/27] website: Start redirect tests. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 69 +++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 2714ec8..7f6b0f9 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -114,6 +114,14 @@ def _website_expected_error_response(res, bucket_name, status, reason, code): ok('
  • Code: '+code+'
  • ' in body, 'HTML should contain "Code: %s" ' % (code, )) ok(('
  • BucketName: %s
  • ' % (bucket_name, )) in body, 'HTML should contain bucket name') +def _website_expected_redirect_response(res, status, reason, new_url): + body = res.read() + print(body) + __website_expected_reponse_status(res, status, reason) + loc = res.getheader('Location', None) + eq(loc, new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,)) + ok(len(body) == 0, 'Body of a redirect should be empty') + def _website_request(bucket_name, path, method='GET'): url = get_website_url('http', bucket_name, path) print("url", url) @@ -542,3 +550,64 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): indexhtml.delete() errorhtml.delete() bucket.delete() + +# ------ redirect tests + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='RedirectAllRequestsTo without protocol should TODO') +@attr('s3website') +def test_website_bucket_private_redirectall_base(): + bucket = get_new_bucket() + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) + bucket.set_canned_acl('private') + + res = _website_request(bucket.name, '') + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + new_url = 'http://%s/' % f['RedirectAllRequestsTo_HostName'] + _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='RedirectAllRequestsTo without protocol should TODO') +@attr('s3website') +def test_website_bucket_private_redirectall_path(): + bucket = get_new_bucket() + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) + bucket.set_canned_acl('private') + + pathfragment = choose_bucket_prefix(template='{random}', max_len=16) + + res = _website_request(bucket.name, '/'+pathfragment) + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + new_url = 'http://%s/%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) + _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='RedirectAllRequestsTo without protocol should TODO') +@attr('s3website') +def test_website_bucket_private_redirectall_path_upgrade(): + bucket = get_new_bucket() + x = string.Template(WEBSITE_CONFIGS_XMLFRAG['RedirectAll+Protocol']).safe_substitute(RedirectAllRequestsTo_Protocol='https') + f = _test_website_prep(bucket, x) + bucket.set_canned_acl('private') + + pathfragment = choose_bucket_prefix(template='{random}', max_len=16) + + res = _website_request(bucket.name, pathfragment) + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + new_url = 'https://%s/%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) + _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + + bucket.delete() From a9fd9988e1d7640ef55e9a8e943af474205286c4 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Wed, 17 Jun 2015 01:03:07 +0000 Subject: [PATCH 06/27] website: Clean up imports. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 36 --------------------------- 1 file changed, 36 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 7f6b0f9..e73c6ed 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -1,50 +1,14 @@ from __future__ import print_function import sys -from cStringIO import StringIO import collections -import boto.exception -import boto.s3.connection -import boto.s3.acl -import bunch -import datetime -import time -import email.utils -import isodate import nose -import operator -import socket -import ssl -import os -import requests -import base64 -import hmac -import sha -import pytz -import json -import httplib2 -import threading -import itertools import string import random -import xml.etree.ElementTree as ET - -from httplib import HTTPConnection, HTTPSConnection from urlparse import urlparse from nose.tools import eq_ as eq, ok_ as ok from nose.plugins.attrib import attr -from nose.plugins.skip import SkipTest - -from .utils import assert_raises -from .utils import generate_random -from .utils import region_sync_meta -import AnonymousAuth - -from email.header import decode_header -from ordereddict import OrderedDict - -from boto.s3.cors import CORSConfiguration from . import ( get_new_bucket, From 5c681dacff640eba242c4d5c10cb5c61a30f61dc Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Wed, 17 Jun 2015 04:33:49 +0000 Subject: [PATCH 07/27] Prepare for better testcases, with fixture functions. Signed-off-by: Robin H. Johnson --- s3tests/common.py | 70 +++++++++++++++++++++++++++ s3tests/functional/test_s3_website.py | 2 + 2 files changed, 72 insertions(+) diff --git a/s3tests/common.py b/s3tests/common.py index b096cdc..a9ab28e 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -181,3 +181,73 @@ def get_new_bucket(connection=None): def teardown(): nuke_prefixed_buckets() + +def with_setup_kwargs(setup, teardown=None): + """Decorator to add setup and/or teardown methods to a test function:: + + @with_setup_args(setup, teardown) + def test_something(): + " ... " + + The setup function should return (kwargs) which will be passed to + test function, and teardown function. + + Note that `with_setup_kwargs` is useful *only* for test functions, not for test + methods or inside of TestCase subclasses. + """ + def decorate(func): + kwargs = {} + + def test_wrapped(*args, **kwargs2): + k2 = kwargs.copy() + k2.update(kwargs2) + k2['testname'] = func.__name__ + func(*args, **k2) + + test_wrapped.__name__ = func.__name__ + + def setup_wrapped(): + k = setup() + kwargs.update(k) + if hasattr(func, 'setup'): + func.setup() + test_wrapped.setup = setup_wrapped + + if teardown: + def teardown_wrapped(): + if hasattr(func, 'teardown'): + func.teardown() + teardown(**kwargs) + + test_wrapped.teardown = teardown_wrapped + else: + if hasattr(func, 'teardown'): + test_wrapped.teardown = func.teardown() + return test_wrapped + return decorate + +# Demo case for the above, when you run test_gen(): +# _test_gen will run twice, +# with the following stderr printing +# setup_func {'b': 2} +# testcase ('1',) {'b': 2, 'testname': '_test_gen'} +# teardown_func {'b': 2} +# setup_func {'b': 2} +# testcase () {'b': 2, 'testname': '_test_gen'} +# teardown_func {'b': 2} +# +#def setup_func(): +# kwargs = {'b': 2} +# print("setup_func", kwargs, file=sys.stderr) +# return kwargs +# +#def teardown_func(**kwargs): +# print("teardown_func", kwargs, file=sys.stderr) +# +#@with_setup_kwargs(setup=setup_func, teardown=teardown_func) +#def _test_gen(*args, **kwargs): +# print("testcase", args, kwargs, file=sys.stderr) +# +#def test_gen(): +# yield _test_gen, '1' +# yield _test_gen diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index e73c6ed..a713858 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -19,6 +19,8 @@ from . import ( choose_bucket_prefix, ) +from ..common import with_setup_kwargs + WEBSITE_CONFIGS_XMLFRAG = { 'IndexDoc': '${IndexDocument_Suffix}', 'IndexDocErrorDoc': '${IndexDocument_Suffix}${ErrorDocument_Key}', From c053a96ef2d98081e3f0c09387d0a1a53bbfa8d2 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Thu, 18 Jun 2015 04:41:09 +0000 Subject: [PATCH 08/27] Fixup path stuff for redirect test. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index a713858..ca6444d 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -547,12 +547,12 @@ def test_website_bucket_private_redirectall_path(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) bucket.set_canned_acl('private') - pathfragment = choose_bucket_prefix(template='{random}', max_len=16) + pathfragment = choose_bucket_prefix(template='/{random}', max_len=16) - res = _website_request(bucket.name, '/'+pathfragment) + res = _website_request(bucket.name, pathfragment) # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 - new_url = 'http://%s/%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) + new_url = 'http://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) bucket.delete() @@ -568,12 +568,12 @@ def test_website_bucket_private_redirectall_path_upgrade(): f = _test_website_prep(bucket, x) bucket.set_canned_acl('private') - pathfragment = choose_bucket_prefix(template='{random}', max_len=16) + pathfragment = choose_bucket_prefix(template='/{random}', max_len=16) - res = _website_request(bucket.name, pathfragment) + res = _website_request(bucket.name, +pathfragment) # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 - new_url = 'https://%s/%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) + new_url = 'https://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) bucket.delete() From 25e9980af8dfb378e2f4726071d7a36b89771cc5 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 19 Jun 2015 06:09:09 +0000 Subject: [PATCH 09/27] website: refactor and add redirect testcases, so far 3 failures out of 27 new tests. Signed-off-by: Robin H. Johnson --- requirements.txt | 1 + s3tests/common.py | 17 ++ s3tests/functional/test_s3_website.py | 269 ++++++++++++++++++++++++-- 3 files changed, 268 insertions(+), 19 deletions(-) diff --git a/requirements.txt b/requirements.txt index 7f1348a..beced13 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,3 +9,4 @@ requests ==0.14.0 pytz >=2011k ordereddict httplib2 +lxml diff --git a/s3tests/common.py b/s3tests/common.py index a9ab28e..7b5cb47 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -5,6 +5,7 @@ import os import random import string import yaml +from lxml import etree s3 = bunch.Bunch() config = bunch.Bunch() @@ -251,3 +252,19 @@ def with_setup_kwargs(setup, teardown=None): #def test_gen(): # yield _test_gen, '1' # yield _test_gen + + +def normalize_xml_whitespace(xml, pretty_print=True): + root = etree.fromstring(xml.encode(encoding='ascii')) + + for element in root.iter('*'): + if element.text is not None and not element.text.strip(): + element.text = None + if element.text is not None: + element.text = element.text.strip().replace("\n","").replace("\r","") + if element.tail is not None and not element.tail.strip(): + element.tail = None + if element.tail is not None: + element.tail = element.tail.strip().replace("\n","").replace("\r","") + + return etree.tostring(root, encoding="utf-8", xml_declaration=True, pretty_print=pretty_print) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index ca6444d..85a4e2b 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -4,6 +4,7 @@ import collections import nose import string import random +from pprint import pprint from urlparse import urlparse @@ -20,12 +21,15 @@ from . import ( ) from ..common import with_setup_kwargs +from ..xmlhelper import normalize_xml_whitespace + +IGNORE_FIELD = 'IGNORETHIS' WEBSITE_CONFIGS_XMLFRAG = { - 'IndexDoc': '${IndexDocument_Suffix}', - 'IndexDocErrorDoc': '${IndexDocument_Suffix}${ErrorDocument_Key}', - 'RedirectAll': '${RedirectAllRequestsTo_HostName}', - 'RedirectAll+Protocol': '${RedirectAllRequestsTo_HostName}${RedirectAllRequestsTo_Protocol}', + 'IndexDoc': '${IndexDocument_Suffix}${RoutingRules}', + 'IndexDocErrorDoc': '${IndexDocument_Suffix}${ErrorDocument_Key}${RoutingRules}', + 'RedirectAll': '${RedirectAllRequestsTo_HostName}${RoutingRules}', + 'RedirectAll+Protocol': '${RedirectAllRequestsTo_HostName}${RedirectAllRequestsTo_Protocol}${RoutingRules}', } def make_website_config(xml_fragment): @@ -34,23 +38,40 @@ def make_website_config(xml_fragment): """ return '' + xml_fragment + '' -def get_website_url(proto, bucket, path): +def get_website_url(**kwargs): """ Return the URL to a website page """ + proto, bucket, hostname, path = 'http', None, None, '/' + + if 'proto' in kwargs: + proto = kwargs['proto'] + if 'bucket' in kwargs: + bucket = kwargs['bucket'] + if 'hostname' in kwargs: + hostname = kwargs['hostname'] + if 'path' in kwargs: + path = kwargs['path'] + domain = config['main']['host'] if('s3website_domain' in config['main']): domain = config['main']['s3website_domain'] elif('s3website_domain' in config['alt']): domain = config['DEFAULT']['s3website_domain'] + if hostname is None: + hostname = '%s.%s' % (bucket, domain) path = path.lstrip('/') - return "%s://%s.%s/%s" % (proto, bucket, domain, path) + return "%s://%s/%s" % (proto, hostname, path) def _test_website_populate_fragment(xml_fragment, fields): + for k in ['RoutingRules']: + if k in fields.keys() and len(fields[k]) > 0: + fields[k] = '<%s>%s' % (k, fields[k], k) f = { 'IndexDocument_Suffix': choose_bucket_prefix(template='index-{random}.html', max_len=32), 'ErrorDocument_Key': choose_bucket_prefix(template='error-{random}.html', max_len=32), 'RedirectAllRequestsTo_HostName': choose_bucket_prefix(template='{random}.{random}.com', max_len=32), + 'RoutingRules': '' } f.update(fields) xml_fragment = string.Template(xml_fragment).safe_substitute(**f) @@ -58,10 +79,15 @@ def _test_website_populate_fragment(xml_fragment, fields): def _test_website_prep(bucket, xml_template, hardcoded_fields = {}): xml_fragment, f = _test_website_populate_fragment(xml_template, hardcoded_fields) - config_xml = make_website_config(xml_fragment) - print(config_xml) - bucket.set_website_configuration_xml(config_xml) - eq (config_xml, bucket.get_website_configuration_xml()) + config_xml1 = make_website_config(xml_fragment) + bucket.set_website_configuration_xml(config_xml1) + config_xml1 = normalize_xml_whitespace(config_xml1, pretty_print=True) # Do it late, so the system gets weird whitespace + #print("config_xml1\n", config_xml1) + config_xml2 = bucket.get_website_configuration_xml() + config_xml2 = normalize_xml_whitespace(config_xml2, pretty_print=True) # For us to read + #print("config_xml2\n", config_xml2) + eq (config_xml1, config_xml2) + f['WebsiteConfiguration'] = config_xml2 return f def __website_expected_reponse_status(res, status, reason): @@ -70,15 +96,19 @@ def __website_expected_reponse_status(res, status, reason): if not isinstance(reason, collections.Container): reason = set([reason]) - ok(res.status in status, 'HTTP status code mismatch') - ok(res.reason in reason, 'HTTP reason mismatch') + if status is not IGNORE_FIELD: + ok(res.status in status, 'HTTP code was %s should be %s' % (res.status, status)) + if reason is not IGNORE_FIELD: + ok(res.reason in reason, 'HTTP reason was was %s should be %s' % (res.reason, reason)) def _website_expected_error_response(res, bucket_name, status, reason, code): body = res.read() print(body) __website_expected_reponse_status(res, status, reason) - ok('
  • Code: '+code+'
  • ' in body, 'HTML should contain "Code: %s" ' % (code, )) - ok(('
  • BucketName: %s
  • ' % (bucket_name, )) in body, 'HTML should contain bucket name') + if code is not IGNORE_FIELD: + ok('
  • Code: '+code+'
  • ' in body, 'HTML should contain "Code: %s" ' % (code, )) + if bucket_name is not IGNORE_FIELD: + ok(('
  • BucketName: %s
  • ' % (bucket_name, )) in body, 'HTML should contain bucket name') def _website_expected_redirect_response(res, status, reason, new_url): body = res.read() @@ -89,7 +119,7 @@ def _website_expected_redirect_response(res, status, reason, new_url): ok(len(body) == 0, 'Body of a redirect should be empty') def _website_request(bucket_name, path, method='GET'): - url = get_website_url('http', bucket_name, path) + url = get_website_url(proto='http', bucket=bucket_name, path=path) print("url", url) o = urlparse(url) @@ -179,8 +209,8 @@ def test_website_private_bucket_list_public_index(): @attr('s3website') def test_website_private_bucket_list_empty(): bucket = get_new_bucket() - bucket.set_canned_acl('private') f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.set_canned_acl('private') res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') @@ -517,8 +547,7 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): errorhtml.delete() bucket.delete() -# ------ redirect tests - +# ------ RedirectAll tests @attr(resource='bucket') @attr(method='get') @attr(operation='list') @@ -570,10 +599,212 @@ def test_website_bucket_private_redirectall_path_upgrade(): pathfragment = choose_bucket_prefix(template='/{random}', max_len=16) - res = _website_request(bucket.name, +pathfragment) + res = _website_request(bucket.name, pathfragment) # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'https://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) bucket.delete() + +# RoutingRules +ROUTING_RULES = { + 'empty': '', + 'AmazonExample1': \ +""" + + + docs/ + + + documents/ + + +""", + 'AmazonExample1+Protocol=https': \ +""" + + + docs/ + + + https + documents/ + + +""", + 'AmazonExample1+Protocol=https+Hostname=xyzzy': \ +""" + + + docs/ + + + https + xyzzy + documents/ + + +""", + 'AmazonExample1+Protocol=http2': \ +""" + + + docs/ + + + http2 + documents/ + + +""", + 'AmazonExample2': \ +""" + + + images/ + + + folderdeleted.html + + +""", + 'AmazonExample2+HttpRedirectCode=314': \ +""" + + + images/ + + + 314 + folderdeleted.html + + +""", + 'AmazonExample3': \ +""" + + + 404 + + + ec2-11-22-333-44.compute-1.amazonaws.com + report-404/ + + +""", + 'AmazonExample3+KeyPrefixEquals': \ +""" + + + images/ + 404 + + + ec2-11-22-333-44.compute-1.amazonaws.com + report-404/ + + +""", +} + +ROUTING_RULES_TESTS = [ + dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/', location=dict(proto='http',bucket='{bucket_name}',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/x', location=dict(proto='http',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/', location=dict(proto='https',bucket='{bucket_name}',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/x', location=dict(proto='https',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/', location=dict(proto='http2',bucket='{bucket_name}',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/x', location=dict(proto='http2',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/', location=dict(proto='https',hostname='xyzzy',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/x', location=dict(proto='https',hostname='xyzzy',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2+HttpRedirectCode=314']), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=314), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2+HttpRedirectCode=314']), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=314), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/x', location=dict(proto='http',bucket='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/images/x', location=dict(proto='http',bucket='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/images/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/images/x', location=dict(proto='http',bucket='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), +] + +def routing_setup(): + kwargs = {'obj':[]} + bucket = get_new_bucket() + kwargs['bucket'] = bucket + kwargs['obj'].append(bucket) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + kwargs.update(f) + bucket.set_canned_acl('public-read') + + k = bucket.new_key(f['IndexDocument_Suffix']) + kwargs['obj'].append(k) + s = choose_bucket_prefix(template='

    Index

    {random}', max_len=64) + k.set_contents_from_string(s) + k.set_canned_acl('public-read') + + k = bucket.new_key(f['ErrorDocument_Key']) + kwargs['obj'].append(k) + s = choose_bucket_prefix(template='

    Error

    {random}', max_len=64) + k.set_contents_from_string(s) + k.set_canned_acl('public-read') + + return kwargs + +def routing_teardown(**kwargs): + for o in reversed(kwargs['obj']): + print('Deleting', str(o)) + o.delete() + + +@with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) +def routing_check(*args, **kwargs): + bucket = kwargs['bucket'] + args=args[0] + #print(args) + pprint(args) + xml_fields = kwargs.copy() + xml_fields.update(args['xml']) + pprint(xml_fields) + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'], hardcoded_fields=xml_fields) + #print(f) + config_xml2 = bucket.get_website_configuration_xml() + config_xml2 = normalize_xml_whitespace(config_xml2, pretty_print=True) # For us to read + res = _website_request(bucket.name, args['url']) + print(config_xml2) + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + new_url = args['location'] + if new_url is not None: + new_url = get_website_url(**new_url) + new_url = new_url.format(bucket_name=bucket.name) + if args['code'] >= 200 and args['code'] < 300: + #body = res.read() + #print(body) + #eq(body, args['content'], 'default content should match index.html set content') + ok(res.getheader('Content-Length', -1) > 0) + elif args['code'] >= 300 and args['code'] < 400: + _website_expected_redirect_response(res, args['code'], IGNORE_FIELD, new_url) + elif args['code'] >= 400: + _website_expected_error_response(res, bucket.name, args['code'], IGNORE_FIELD, IGNORE_FIELD) + else: + assert(False) + +@attr('xml') +def testGEN_routing(): + + for t in ROUTING_RULES_TESTS: + yield routing_check, t + + + From ddfcf59e53120d475ec715c94686056a0ebaacf3 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 19 Jun 2015 17:09:40 +0000 Subject: [PATCH 10/27] Add tests for x-amz-website-redirect-location, code still pending. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 84 +++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 85a4e2b..df2441c 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -607,6 +607,90 @@ def test_website_bucket_private_redirectall_path_upgrade(): bucket.delete() +# ------ x-amz redirect tests +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='x-amz-website-redirect-location should not fire without websiteconf') +@attr('s3website') +@attr('x-amz-website-redirect-location') +def test_websute_xredirect_nonwebsite(): + bucket = get_new_bucket() + #f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) + #bucket.set_canned_acl('private') + + k = bucket.new_key('page') + content = 'wrong-content' + headers = {'x-amz-website-redirect-location': '/relative'} + k.set_contents_from_string(content, headers=headers) + k.make_public() + + res = _website_request(bucket.name, '/page') + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + #_website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + __website_expected_reponse_status(res, 200, 'OK') + body = res.read() + print(body) + eq(body, content, 'default content should match index.html set content') + + k.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path') +@attr('s3website') +@attr('x-amz-website-redirect-location') +def test_websute_xredirect_relative(): + bucket = get_new_bucket() + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + + k = bucket.new_key('page') + content = 'wrong-content' + headers = {'x-amz-website-redirect-location': '/relative'} + k.set_contents_from_string(content, headers=headers) + k.make_public() + + res = _website_request(bucket.name, '/page') + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + new_url = get_website_url(bucket_name=bucket.name, path='/relative') + _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + + k.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute') +@attr('s3website') +@attr('x-amz-website-redirect-location') +def test_websute_xredirect_abs(): + bucket = get_new_bucket() + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + + k = bucket.new_key('page') + content = 'wrong-content' + headers = {'x-amz-website-redirect-location': 'http://example.com/foo'} + k.set_contents_from_string(content, headers=headers) + k.make_public() + + res = _website_request(bucket.name, '/page') + # RGW returns "302 Found" per RFC2616 + # S3 returns 302 Moved Temporarily per RFC1945 + new_url = get_website_url(proto='http', hostname='example.com', path='/foo') + _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + + k.delete() + bucket.delete() + +# ------ RoutingRules tests + # RoutingRules ROUTING_RULES = { 'empty': '', From 672e0ad0e4c50b2f3d8605df9bdf00c2216bd491 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 19 Jun 2015 17:09:53 +0000 Subject: [PATCH 11/27] Improve test attribute. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index df2441c..e3e607a 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -884,7 +884,7 @@ def routing_check(*args, **kwargs): else: assert(False) -@attr('xml') +@attr('RoutingRules') def testGEN_routing(): for t in ROUTING_RULES_TESTS: From 4d8c6bdb695e00bb0c69a8b720d945dc962485dd Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Sun, 21 Jun 2015 05:04:51 +0000 Subject: [PATCH 12/27] Add XML improvements. Signed-off-by: Robin H. Johnson --- s3tests/common.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/s3tests/common.py b/s3tests/common.py index 7b5cb47..55e669a 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -5,8 +5,12 @@ import os import random import string import yaml +import re from lxml import etree +from doctest import Example +from lxml.doctestcompare import LXMLOutputChecker + s3 = bunch.Bunch() config = bunch.Bunch() prefix = '' @@ -261,10 +265,23 @@ def normalize_xml_whitespace(xml, pretty_print=True): if element.text is not None and not element.text.strip(): element.text = None if element.text is not None: - element.text = element.text.strip().replace("\n","").replace("\r","") + element.text = element.text.strip().replace("\n", "").replace("\r", "") if element.tail is not None and not element.tail.strip(): element.tail = None if element.tail is not None: - element.tail = element.tail.strip().replace("\n","").replace("\r","") + element.tail = element.tail.strip().replace("\n", "").replace("\r", "") - return etree.tostring(root, encoding="utf-8", xml_declaration=True, pretty_print=pretty_print) + xmlstr = etree.tostring(root, encoding="utf-8", xml_declaration=True, pretty_print=pretty_print) + # there are two different DTD URIs + xmlstr = re.sub(r'xmlns="[^"]+"', 'xmlns="DTD-URI"', xmlstr) + xmlstr = re.sub(r'xmlns=\'[^\']+\'', 'xmlns="DTD-URI"', xmlstr) + for uri in ['http://doc.s3.amazonaws.com/doc/2006-03-01/', 'http://s3.amazonaws.com/doc/2006-03-01/']: + xmlstr = xmlstr.replace(uri, 'URI-DTD') + #xmlstr = re.sub(r'>\s+', '>', xmlstr, count=0, flags=re.MULTILINE) + return xmlstr + +def assert_xml_equal(got, want): + checker = LXMLOutputChecker() + if not checker.check_output(want, got, 0): + message = checker.output_difference(Example("", want), got, 0) + raise AssertionError(message) From c1f7ad54ceb88567b8e25a4038aa31ad35e589ee Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Sun, 21 Jun 2015 05:05:30 +0000 Subject: [PATCH 13/27] Use new XML checks. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index e3e607a..8691697 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -20,8 +20,11 @@ from . import ( choose_bucket_prefix, ) -from ..common import with_setup_kwargs -from ..xmlhelper import normalize_xml_whitespace +from ..common import ( + with_setup_kwargs, + normalize_xml_whitespace, + assert_xml_equal +) IGNORE_FIELD = 'IGNORETHIS' @@ -86,6 +89,7 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}): config_xml2 = bucket.get_website_configuration_xml() config_xml2 = normalize_xml_whitespace(config_xml2, pretty_print=True) # For us to read #print("config_xml2\n", config_xml2) + assert_xml_equal(config_xml1, config_xml2) eq (config_xml1, config_xml2) f['WebsiteConfiguration'] = config_xml2 return f From 5d158567fd0a5929d36784ed18ee8a9d1154ba79 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Sun, 21 Jun 2015 05:06:26 +0000 Subject: [PATCH 14/27] Update redirect checks for x-amz-website-redirect-location Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 30 ++++++++++++++++----------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 8691697..88885e7 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -625,9 +625,11 @@ def test_websute_xredirect_nonwebsite(): k = bucket.new_key('page') content = 'wrong-content' - headers = {'x-amz-website-redirect-location': '/relative'} - k.set_contents_from_string(content, headers=headers) - k.make_public() + redirect_dest = '/relative' + headers = {'x-amz-website-redirect-location': redirect_dest} + k.set_contents_from_string(content, headers=headers, policy='public-read') + redirect = k.get_redirect() + ok(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') # RGW returns "302 Found" per RFC2616 @@ -654,15 +656,17 @@ def test_websute_xredirect_relative(): k = bucket.new_key('page') content = 'wrong-content' - headers = {'x-amz-website-redirect-location': '/relative'} - k.set_contents_from_string(content, headers=headers) - k.make_public() + redirect_dest = '/relative' + headers = {'x-amz-website-redirect-location': redirect_dest} + k.set_contents_from_string(content, headers=headers, policy='public-read') + redirect = k.get_redirect() + ok(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 - new_url = get_website_url(bucket_name=bucket.name, path='/relative') - _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest) + _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) k.delete() bucket.delete() @@ -680,15 +684,17 @@ def test_websute_xredirect_abs(): k = bucket.new_key('page') content = 'wrong-content' - headers = {'x-amz-website-redirect-location': 'http://example.com/foo'} - k.set_contents_from_string(content, headers=headers) - k.make_public() + redirect_dest = 'http://example.com/foo' + headers = {'x-amz-website-redirect-location': redirect_dest} + k.set_contents_from_string(content, headers=headers, policy='public-read') + redirect = k.get_redirect() + ok(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 new_url = get_website_url(proto='http', hostname='example.com', path='/foo') - _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) k.delete() bucket.delete() From 819eecba5cd77b371390e7aff5fcea604c200da0 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Mon, 22 Jun 2015 17:17:18 +0000 Subject: [PATCH 15/27] Work on tests. Signed-off-by: Robin H. Johnson --- s3tests/common.py | 10 +- s3tests/functional/test_s3_website.py | 234 ++++++++++++++++++-------- 2 files changed, 172 insertions(+), 72 deletions(-) diff --git a/s3tests/common.py b/s3tests/common.py index 55e669a..60067bd 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -258,7 +258,7 @@ def with_setup_kwargs(setup, teardown=None): # yield _test_gen -def normalize_xml_whitespace(xml, pretty_print=True): +def normalize_xml(xml, pretty_print=True): root = etree.fromstring(xml.encode(encoding='ascii')) for element in root.iter('*'): @@ -271,10 +271,14 @@ def normalize_xml_whitespace(xml, pretty_print=True): if element.tail is not None: element.tail = element.tail.strip().replace("\n", "").replace("\r", "") + # Sort the elements + for parent in root.xpath('//*[./*]'): # Search for parent elements + parent[:] = sorted(parent,key=lambda x: x.tag) + xmlstr = etree.tostring(root, encoding="utf-8", xml_declaration=True, pretty_print=pretty_print) # there are two different DTD URIs - xmlstr = re.sub(r'xmlns="[^"]+"', 'xmlns="DTD-URI"', xmlstr) - xmlstr = re.sub(r'xmlns=\'[^\']+\'', 'xmlns="DTD-URI"', xmlstr) + xmlstr = re.sub(r'xmlns="[^"]+"', 'xmlns="s3"', xmlstr) + xmlstr = re.sub(r'xmlns=\'[^\']+\'', 'xmlns="s3"', xmlstr) for uri in ['http://doc.s3.amazonaws.com/doc/2006-03-01/', 'http://s3.amazonaws.com/doc/2006-03-01/']: xmlstr = xmlstr.replace(uri, 'URI-DTD') #xmlstr = re.sub(r'>\s+', '>', xmlstr, count=0, flags=re.MULTILINE) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 88885e7..0427c82 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -5,11 +5,15 @@ import nose import string import random from pprint import pprint +import time from urlparse import urlparse from nose.tools import eq_ as eq, ok_ as ok from nose.plugins.attrib import attr +from nose.tools import timed + +from .. import common from . import ( get_new_bucket, @@ -20,12 +24,6 @@ from . import ( choose_bucket_prefix, ) -from ..common import ( - with_setup_kwargs, - normalize_xml_whitespace, - assert_xml_equal -) - IGNORE_FIELD = 'IGNORETHIS' WEBSITE_CONFIGS_XMLFRAG = { @@ -55,13 +53,16 @@ def get_website_url(**kwargs): hostname = kwargs['hostname'] if 'path' in kwargs: path = kwargs['path'] + + if hostname is None and bucket is None: + return '/' + path.lstrip('/') domain = config['main']['host'] if('s3website_domain' in config['main']): domain = config['main']['s3website_domain'] elif('s3website_domain' in config['alt']): domain = config['DEFAULT']['s3website_domain'] - if hostname is None: + if hostname is None and bucket is not None: hostname = '%s.%s' % (bucket, domain) path = path.lstrip('/') return "%s://%s/%s" % (proto, hostname, path) @@ -84,12 +85,14 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}): xml_fragment, f = _test_website_populate_fragment(xml_template, hardcoded_fields) config_xml1 = make_website_config(xml_fragment) bucket.set_website_configuration_xml(config_xml1) - config_xml1 = normalize_xml_whitespace(config_xml1, pretty_print=True) # Do it late, so the system gets weird whitespace #print("config_xml1\n", config_xml1) config_xml2 = bucket.get_website_configuration_xml() - config_xml2 = normalize_xml_whitespace(config_xml2, pretty_print=True) # For us to read + + # Cleanup for our validation + config_xml1 = common.normalize_xml(config_xml1, pretty_print=True) # Do it late, so the system gets weird whitespace + config_xml2 = common.normalize_xml(config_xml2, pretty_print=True) # For us to read + common.assert_xml_equal(config_xml1, config_xml2) #print("config_xml2\n", config_xml2) - assert_xml_equal(config_xml1, config_xml2) eq (config_xml1, config_xml2) f['WebsiteConfiguration'] = config_xml2 return f @@ -105,14 +108,40 @@ def __website_expected_reponse_status(res, status, reason): if reason is not IGNORE_FIELD: ok(res.reason in reason, 'HTTP reason was was %s should be %s' % (res.reason, reason)) -def _website_expected_error_response(res, bucket_name, status, reason, code): - body = res.read() - print(body) +def _website_expected_default_html(**kwargs): + fields = [] + for k in kwargs.keys(): + # AmazonS3 seems to be inconsistent, some HTML errors include BucketName, but others do not. + if k is 'BucketName': + continue + + v = kwargs[k] + if isinstance(v, str): + v = [v] + elif not isinstance(v, collections.Container): + v = [v] + for v2 in v: + s = '
  • %s: %s
  • ' % (k,v2) + fields.append(s) + return fields + +def _website_expected_error_response(res, bucket_name, status, reason, code, content=None, body=None): + if body is None: + body = res.read() + print(body) __website_expected_reponse_status(res, status, reason) - if code is not IGNORE_FIELD: - ok('
  • Code: '+code+'
  • ' in body, 'HTML should contain "Code: %s" ' % (code, )) - if bucket_name is not IGNORE_FIELD: - ok(('
  • BucketName: %s
  • ' % (bucket_name, )) in body, 'HTML should contain bucket name') + + # Argh, AmazonS3 is really inconsistent, so we have a conditional test! + # This is most visible if you have an ErrorDoc present + errorcode = res.getheader('x-amz-error-code', None) + if errorcode is not None: + eq(errorcode, code) + + if not isinstance(content, collections.Container): + content = set([content]) + for f in content: + if f is not IGNORE_FIELD and f is not None: + ok(f in body, 'HTML should contain "%s"' % (f, )) def _website_expected_redirect_response(res, status, reason, new_url): body = res.read() @@ -122,16 +151,18 @@ def _website_expected_redirect_response(res, status, reason, new_url): eq(loc, new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,)) ok(len(body) == 0, 'Body of a redirect should be empty') -def _website_request(bucket_name, path, method='GET'): +def _website_request(bucket_name, path, connect_hostname=None, method='GET'): url = get_website_url(proto='http', bucket=bucket_name, path=path) print("url", url) - o = urlparse(url) + if connect_hostname is None: + connect_hostname = o.hostname path = o.path + '?' + o.query request_headers={} request_headers['Host'] = o.hostname - print('Request: {method} {path} {headers}'.format(method=method, path=path, headers=' '.join(map(lambda t: t[0]+':'+t[1]+"\n", request_headers.items())))) - res = _make_raw_request(config.main.host, config.main.port, method, path, request_headers=request_headers, secure=False) + request_headers['Accept'] = '*/*' + print('Request: {method} {path}\n{headers}'.format(method=method, path=path, headers=''.join(map(lambda t: t[0]+':'+t[1]+"\n", request_headers.items())))) + res = _make_raw_request(connect_hostname, config.main.port, method, path, request_headers=request_headers, secure=False) for (k,v) in res.getheaders(): print(k,v) return res @@ -143,10 +174,11 @@ def _website_request(bucket_name, path, method='GET'): @attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket, exposing security risk') @attr('s3website') @attr('fails_on_rgw') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_nonexistant_bucket_s3(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') - _website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket') + _website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket')) @attr(resource='bucket') @attr(method='get') @@ -154,10 +186,11 @@ def test_website_nonexistant_bucket_s3(): @attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure') @attr('s3website') @attr('fails_on_s3') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_nonexistant_bucket_rgw(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') - _website_expected_error_response(res, bucket_name, 403, 'Forbidden', 'AccessDenied') + _website_expected_error_response(res, bucket_name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) #------------- IndexDocument only, successes @attr(resource='bucket') @@ -165,6 +198,8 @@ def test_website_nonexistant_bucket_rgw(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is public') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) +@timed(5) def test_website_public_bucket_list_public_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -173,6 +208,9 @@ def test_website_public_bucket_list_public_index(): indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.make_public() + #time.sleep(1) + while bucket.get_key(f['IndexDocument_Suffix']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') body = res.read() @@ -187,6 +225,7 @@ def test_website_public_bucket_list_public_index(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_public_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -195,6 +234,10 @@ def test_website_private_bucket_list_public_index(): indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.make_public() + #time.sleep(1) + while bucket.get_key(f['IndexDocument_Suffix']) is None: + time.sleep(0.05) + res = _website_request(bucket.name, '') __website_expected_reponse_status(res, 200, 'OK') @@ -211,13 +254,15 @@ def test_website_private_bucket_list_public_index(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_empty(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.set_canned_acl('private') + # TODO: wait for sync res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) bucket.delete() @attr(resource='bucket') @@ -225,13 +270,14 @@ def test_website_private_bucket_list_empty(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_empty(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey')) bucket.delete() @attr(resource='bucket') @@ -239,6 +285,7 @@ def test_website_public_bucket_list_empty(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_private_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -247,9 +294,14 @@ def test_website_public_bucket_list_private_index(): indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') + #time.sleep(1) + #time.sleep(1) + while bucket.get_key(f['IndexDocument_Suffix']) is None: + time.sleep(0.05) + res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) indexhtml.delete() bucket.delete() @@ -258,6 +310,7 @@ def test_website_public_bucket_list_private_index(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_private_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -266,9 +319,13 @@ def test_website_private_bucket_list_private_index(): indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') + ##time.sleep(1) + while bucket.get_key(f['IndexDocument_Suffix']) is None: + time.sleep(0.05) + res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) indexhtml.delete() bucket.delete() @@ -279,15 +336,14 @@ def test_website_private_bucket_list_private_index(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') - body = res.read() - print(body) + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) bucket.delete() @@ -296,6 +352,7 @@ def test_website_private_bucket_list_empty_missingerrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -310,6 +367,7 @@ def test_website_public_bucket_list_empty_missingerrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -318,9 +376,12 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') + #time.sleep(1) + while bucket.get_key(f['IndexDocument_Suffix']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) indexhtml.delete() bucket.delete() @@ -330,6 +391,7 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -338,9 +400,12 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): indexstring = choose_bucket_prefix(template='{random}', max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') + #time.sleep(1) + while bucket.get_key(f['IndexDocument_Suffix']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) indexhtml.delete() bucket.delete() @@ -351,6 +416,7 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -359,12 +425,15 @@ def test_website_private_bucket_list_empty_blockederrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') body = res.read() print(body) - ok(errorstring not in body, 'error content should match error.html set content') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body) + ok(errorstring not in body, 'error content should NOT match error.html set content') errorhtml.delete() bucket.delete() @@ -374,6 +443,7 @@ def test_website_private_bucket_list_empty_blockederrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -382,11 +452,13 @@ def test_website_public_bucket_list_empty_blockederrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') body = res.read() print(body) + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'), body=body) ok(errorstring not in body, 'error content should match error.html set content') errorhtml.delete() @@ -397,6 +469,7 @@ def test_website_public_bucket_list_empty_blockederrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -409,11 +482,14 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') body = res.read() print(body) + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body) ok(errorstring not in body, 'error content should match error.html set content') indexhtml.delete() @@ -425,6 +501,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -437,11 +514,14 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') body = res.read() print(body) + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body) ok(errorstring not in body, 'error content should match error.html set content') indexhtml.delete() @@ -454,6 +534,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -462,12 +543,12 @@ def test_website_private_bucket_list_empty_gooderrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') - body = res.read() - print(body) - eq(body, errorstring, 'error content should match error.html set content') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=[errorstring]) errorhtml.delete() bucket.delete() @@ -477,6 +558,7 @@ def test_website_private_bucket_list_empty_gooderrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -485,12 +567,12 @@ def test_website_public_bucket_list_empty_gooderrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey') - body = res.read() - print(body) - eq(body, errorstring, 'error content should match error.html set content') + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=[errorstring]) errorhtml.delete() bucket.delete() @@ -500,6 +582,7 @@ def test_website_public_bucket_list_empty_gooderrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_public_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -512,12 +595,12 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') - body = res.read() - print(body) - eq(body, errorstring, 'error content should match error.html set content') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=[errorstring]) indexhtml.delete() errorhtml.delete() @@ -528,6 +611,7 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_private_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -540,12 +624,12 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): errorstring = choose_bucket_prefix(template='{random}', max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) res = _website_request(bucket.name, '') - _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied') - body = res.read() - print(body) - eq(body, errorstring, 'error content should match error.html set content') + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=[errorstring]) indexhtml.delete() errorhtml.delete() @@ -557,6 +641,7 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_bucket_private_redirectall_base(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -566,7 +651,7 @@ def test_website_bucket_private_redirectall_base(): # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'http://%s/' % f['RedirectAllRequestsTo_HostName'] - _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) bucket.delete() @@ -575,6 +660,7 @@ def test_website_bucket_private_redirectall_base(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_bucket_private_redirectall_path(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -586,7 +672,7 @@ def test_website_bucket_private_redirectall_path(): # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'http://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) - _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) bucket.delete() @@ -595,6 +681,7 @@ def test_website_bucket_private_redirectall_path(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') +@nose.with_setup(setup=None, teardown=common.teardown) def test_website_bucket_private_redirectall_path_upgrade(): bucket = get_new_bucket() x = string.Template(WEBSITE_CONFIGS_XMLFRAG['RedirectAll+Protocol']).safe_substitute(RedirectAllRequestsTo_Protocol='https') @@ -607,7 +694,7 @@ def test_website_bucket_private_redirectall_path_upgrade(): # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'https://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) - _website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) + _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) bucket.delete() @@ -618,7 +705,8 @@ def test_website_bucket_private_redirectall_path_upgrade(): @attr(assertion='x-amz-website-redirect-location should not fire without websiteconf') @attr('s3website') @attr('x-amz-website-redirect-location') -def test_websute_xredirect_nonwebsite(): +#@nose.with_setup(setup=None, teardown=common.teardown) +def test_website_xredirect_nonwebsite(): bucket = get_new_bucket() #f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) #bucket.set_canned_acl('private') @@ -632,13 +720,14 @@ def test_websute_xredirect_nonwebsite(): ok(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') + body = res.read() + print(body) # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 #_website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) - __website_expected_reponse_status(res, 200, 'OK') - body = res.read() - print(body) - eq(body, content, 'default content should match index.html set content') + expected_content = _website_expected_default_html(Code='NoSuchWebsiteConfiguration', BucketName=bucket.name, Message='The specified bucket does not have a website configuration') + print(expected_content) + _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchWebsiteConfiguration', content=expected_content, body=body) k.delete() bucket.delete() @@ -649,7 +738,8 @@ def test_websute_xredirect_nonwebsite(): @attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path') @attr('s3website') @attr('x-amz-website-redirect-location') -def test_websute_xredirect_relative(): +@nose.with_setup(setup=None, teardown=common.teardown) +def test_website_xredirect_relative(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() @@ -665,8 +755,8 @@ def test_websute_xredirect_relative(): res = _website_request(bucket.name, '/page') # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 - new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest) - _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) + #new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest) + _website_expected_redirect_response(res, 301, ['Moved Permanently'], redirect_dest) k.delete() bucket.delete() @@ -677,7 +767,8 @@ def test_websute_xredirect_relative(): @attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute') @attr('s3website') @attr('x-amz-website-redirect-location') -def test_websute_xredirect_abs(): +@nose.with_setup(setup=None, teardown=common.teardown) +def test_website_xredirect_abs(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() @@ -853,6 +944,10 @@ def routing_setup(): k.set_contents_from_string(s) k.set_canned_acl('public-read') + #time.sleep(1) + while bucket.get_key(f['ErrorDocument_Key']) is None: + time.sleep(0.05) + return kwargs def routing_teardown(**kwargs): @@ -861,7 +956,8 @@ def routing_teardown(**kwargs): o.delete() -@with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) +@common.with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) +@timed(5) def routing_check(*args, **kwargs): bucket = kwargs['bucket'] args=args[0] @@ -873,7 +969,7 @@ def routing_check(*args, **kwargs): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'], hardcoded_fields=xml_fields) #print(f) config_xml2 = bucket.get_website_configuration_xml() - config_xml2 = normalize_xml_whitespace(config_xml2, pretty_print=True) # For us to read + config_xml2 = common.normalize_xml(config_xml2, pretty_print=True) # For us to read res = _website_request(bucket.name, args['url']) print(config_xml2) # RGW returns "302 Found" per RFC2616 @@ -895,8 +991,8 @@ def routing_check(*args, **kwargs): assert(False) @attr('RoutingRules') -def testGEN_routing(): - +@nose.with_setup(setup=None, teardown=common.teardown) +def test_routing_generator(): for t in ROUTING_RULES_TESTS: yield routing_check, t From e0655234b135795ebd0950b2159a06ec53c132d0 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Tue, 23 Jun 2015 06:52:52 +0000 Subject: [PATCH 16/27] Lots of tests, that pass on AmazonS3 unless the phase of the moon is wrong. There is a non-trivial degree of lag in S3 applying WebsiteConfiguration changes sometimes, eu-west-1 was taking 20+ seconds during part of the last day. Compounded by the Amazon eventual-consistency made developing these tests difficult at times. Signed-off-by: Robin H. Johnson --- s3tests/common.py | 6 + s3tests/functional/__init__.py | 12 +- s3tests/functional/test_s3_website.py | 162 ++++++++++++++++++-------- 3 files changed, 127 insertions(+), 53 deletions(-) diff --git a/s3tests/common.py b/s3tests/common.py index 60067bd..fd834bf 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -259,6 +259,9 @@ def with_setup_kwargs(setup, teardown=None): def normalize_xml(xml, pretty_print=True): + if xml is None: + return xml + root = etree.fromstring(xml.encode(encoding='ascii')) for element in root.iter('*'): @@ -285,6 +288,9 @@ def normalize_xml(xml, pretty_print=True): return xmlstr def assert_xml_equal(got, want): + assert want is not None, 'Wanted XML cannot be None' + if got is None: + raise AssertionError('Got input to validate was None') checker = LXMLOutputChecker() if not checker.check_output(want, got, 0): message = checker.output_difference(Example("", want), got, 0) diff --git a/s3tests/functional/__init__.py b/s3tests/functional/__init__.py index f03bcc2..7a5eb57 100644 --- a/s3tests/functional/__init__.py +++ b/s3tests/functional/__init__.py @@ -401,7 +401,7 @@ def get_new_bucket(target=None, name=None, headers=None): bucket = connection.create_bucket(name, location=target.conf.api_name, headers=headers) return bucket -def _make_request(method, bucket, key, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True): +def _make_request(method, bucket, key, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True, timeout=None): """ issue a request for a specified method, on a specified , with a specified (optional) body (encrypted per the connection), and @@ -425,9 +425,9 @@ def _make_request(method, bucket, key, body=None, authenticated=False, response_ else: path = '/{obj}'.format(bucket=key.bucket.name, obj=key.name) - return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure) + return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure, timeout=timeout) -def _make_bucket_request(method, bucket, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True): +def _make_bucket_request(method, bucket, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True, timeout=None): """ issue a request for a specified method, on a specified , with a specified (optional) body (encrypted per the connection), and @@ -451,9 +451,9 @@ def _make_bucket_request(method, bucket, body=None, authenticated=False, respons else: path = '/' - return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure) + return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure, timeout=timeout) -def _make_raw_request(host, port, method, path, body=None, request_headers=None, secure=False): +def _make_raw_request(host, port, method, path, body=None, request_headers=None, secure=False, timeout=None): if secure: class_ = HTTPSConnection else: @@ -464,7 +464,7 @@ def _make_raw_request(host, port, method, path, body=None, request_headers=None, skip_host=('Host' in request_headers) skip_accept_encoding = False - c = class_(host, port, strict=True) + c = class_(host, port, strict=True, timeout=timeout) # We do the request manually, so we can muck with headers #c.request(method, path, body=body, headers=request_headers) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 0427c82..5f055c3 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -12,6 +12,7 @@ from urlparse import urlparse from nose.tools import eq_ as eq, ok_ as ok from nose.plugins.attrib import attr from nose.tools import timed +from boto.exception import S3ResponseError from .. import common @@ -26,6 +27,9 @@ from . import ( IGNORE_FIELD = 'IGNORETHIS' +SLEEP_INTERVAL = 0.01 +SLEEP_MAX = 2.0 + WEBSITE_CONFIGS_XMLFRAG = { 'IndexDoc': '${IndexDocument_Suffix}${RoutingRules}', 'IndexDocErrorDoc': '${IndexDocument_Suffix}${ErrorDocument_Key}${RoutingRules}', @@ -37,7 +41,7 @@ def make_website_config(xml_fragment): """ Take the tedious stuff out of the config """ - return '' + xml_fragment + '' + return '' + xml_fragment + '' def get_website_url(**kwargs): """ @@ -81,20 +85,52 @@ def _test_website_populate_fragment(xml_fragment, fields): xml_fragment = string.Template(xml_fragment).safe_substitute(**f) return xml_fragment, f -def _test_website_prep(bucket, xml_template, hardcoded_fields = {}): +def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail=None): xml_fragment, f = _test_website_populate_fragment(xml_template, hardcoded_fields) - config_xml1 = make_website_config(xml_fragment) - bucket.set_website_configuration_xml(config_xml1) - #print("config_xml1\n", config_xml1) - config_xml2 = bucket.get_website_configuration_xml() + f['WebsiteConfiguration'] = '' + if not xml_template: + bucket.delete_website_configuration() + return f + + config_xmlnew = make_website_config(xml_fragment) + config_xmlold = '' + try: + config_xmlold = common.normalize_xml(bucket.get_website_configuration_xml(), pretty_print=True) + except S3ResponseError as e: + if str(e.status) == str(404) and ('NoSuchWebsiteConfiguration' in e.body or 'NoSuchWebsiteConfiguration' in e.code): + pass + else: + raise e + + try: + bucket.set_website_configuration_xml(config_xmlnew) + config_xmlnew = common.normalize_xml(config_xmlnew, pretty_print=True) + except S3ResponseError as e: + if expect_fail is not None: + if isinstance(expect_fail, dict): + pass + elif isinstance(expect_fail, str): + pass + raise e + + # TODO: in some cases, it takes non-zero time for the config to be applied by AmazonS3 + # We should figure out how to poll for changes better + # WARNING: eu-west-1 as of 2015/06/22 was taking at least 4 seconds to propogate website configs, esp when you cycle between non-null configs + time.sleep(0.1) + config_xmlcmp = common.normalize_xml(bucket.get_website_configuration_xml(), pretty_print=True) + + #if config_xmlold is not None: + # print('old',config_xmlold.replace("\n",'')) + #if config_xmlcmp is not None: + # print('cmp',config_xmlcmp.replace("\n",'')) + #if config_xmlnew is not None: + # print('new',config_xmlnew.replace("\n",'')) # Cleanup for our validation - config_xml1 = common.normalize_xml(config_xml1, pretty_print=True) # Do it late, so the system gets weird whitespace - config_xml2 = common.normalize_xml(config_xml2, pretty_print=True) # For us to read - common.assert_xml_equal(config_xml1, config_xml2) - #print("config_xml2\n", config_xml2) - eq (config_xml1, config_xml2) - f['WebsiteConfiguration'] = config_xml2 + common.assert_xml_equal(config_xmlcmp, config_xmlnew) + #print("config_xmlcmp\n", config_xmlcmp) + #eq (config_xmlnew, config_xmlcmp) + f['WebsiteConfiguration'] = config_xmlcmp return f def __website_expected_reponse_status(res, status, reason): @@ -135,7 +171,8 @@ def _website_expected_error_response(res, bucket_name, status, reason, code, con # This is most visible if you have an ErrorDoc present errorcode = res.getheader('x-amz-error-code', None) if errorcode is not None: - eq(errorcode, code) + if code is not IGNORE_FIELD: + eq(errorcode, code) if not isinstance(content, collections.Container): content = set([content]) @@ -151,7 +188,7 @@ def _website_expected_redirect_response(res, status, reason, new_url): eq(loc, new_url, 'Location header should be set "%s" != "%s"' % (loc,new_url,)) ok(len(body) == 0, 'Body of a redirect should be empty') -def _website_request(bucket_name, path, connect_hostname=None, method='GET'): +def _website_request(bucket_name, path, connect_hostname=None, method='GET', timeout=None): url = get_website_url(proto='http', bucket=bucket_name, path=path) print("url", url) o = urlparse(url) @@ -162,7 +199,7 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET'): request_headers['Host'] = o.hostname request_headers['Accept'] = '*/*' print('Request: {method} {path}\n{headers}'.format(method=method, path=path, headers=''.join(map(lambda t: t[0]+':'+t[1]+"\n", request_headers.items())))) - res = _make_raw_request(connect_hostname, config.main.port, method, path, request_headers=request_headers, secure=False) + res = _make_raw_request(connect_hostname, config.main.port, method, path, request_headers=request_headers, secure=False, timeout=timeout) for (k,v) in res.getheaders(): print(k,v) return res @@ -199,7 +236,7 @@ def test_website_nonexistant_bucket_rgw(): @attr(assertion='non-empty public buckets via s3website return page for /, where page is public') @attr('s3website') @nose.with_setup(setup=None, teardown=common.teardown) -@timed(5) +@timed(10) def test_website_public_bucket_list_public_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -210,7 +247,7 @@ def test_website_public_bucket_list_public_index(): indexhtml.make_public() #time.sleep(1) while bucket.get_key(f['IndexDocument_Suffix']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') body = res.read() @@ -236,7 +273,7 @@ def test_website_private_bucket_list_public_index(): indexhtml.make_public() #time.sleep(1) while bucket.get_key(f['IndexDocument_Suffix']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') @@ -297,7 +334,7 @@ def test_website_public_bucket_list_private_index(): #time.sleep(1) #time.sleep(1) while bucket.get_key(f['IndexDocument_Suffix']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') @@ -321,7 +358,7 @@ def test_website_private_bucket_list_private_index(): indexhtml.set_canned_acl('private') ##time.sleep(1) while bucket.get_key(f['IndexDocument_Suffix']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') @@ -378,7 +415,7 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): indexhtml.set_canned_acl('private') #time.sleep(1) while bucket.get_key(f['IndexDocument_Suffix']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) @@ -402,7 +439,7 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): indexhtml.set_canned_acl('private') #time.sleep(1) while bucket.get_key(f['IndexDocument_Suffix']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) @@ -427,7 +464,7 @@ def test_website_private_bucket_list_empty_blockederrordoc(): errorhtml.set_canned_acl('private') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') body = res.read() @@ -453,7 +490,7 @@ def test_website_public_bucket_list_empty_blockederrordoc(): errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') body = res.read() @@ -484,7 +521,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): errorhtml.set_canned_acl('private') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') body = res.read() @@ -516,7 +553,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): errorhtml.set_canned_acl('private') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') body = res.read() @@ -545,7 +582,7 @@ def test_website_private_bucket_list_empty_gooderrordoc(): errorhtml.set_canned_acl('public-read') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=[errorstring]) @@ -569,7 +606,7 @@ def test_website_public_bucket_list_empty_gooderrordoc(): errorhtml.set_canned_acl('public-read') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=[errorstring]) @@ -597,7 +634,7 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): errorhtml.set_canned_acl('public-read') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=[errorstring]) @@ -626,7 +663,7 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): errorhtml.set_canned_acl('public-read') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=[errorstring]) @@ -854,14 +891,14 @@ ROUTING_RULES = { """, - 'AmazonExample2+HttpRedirectCode=314': \ + 'AmazonExample2+HttpRedirectCode=TMPL': \ """ images/ - 314 + {HttpRedirectCode} folderdeleted.html @@ -897,38 +934,68 @@ ROUTING_RULES_TESTS = [ dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='', location=None, code=200), dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/', location=None, code=200), dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/', location=None, code=200), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/x', location=None, code=404), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/', location=dict(proto='http',bucket='{bucket_name}',path='/documents/'), code=301), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/x', location=dict(proto='http',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/', location=None, code=200), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/x', location=None, code=404), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/', location=dict(proto='https',bucket='{bucket_name}',path='/documents/'), code=301), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/x', location=dict(proto='https',bucket='{bucket_name}',path='/documents/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/', location=None, code=200), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/', location=dict(proto='http2',bucket='{bucket_name}',path='/documents/'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/x', location=dict(proto='http2',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/', location=None, code=200), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/x', location=None, code=404), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/', location=dict(proto='https',hostname='xyzzy',path='/documents/'), code=301), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/x', location=dict(proto='https',hostname='xyzzy',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2+HttpRedirectCode=314']), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=314), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2+HttpRedirectCode=314']), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=314), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/x', location=dict(proto='http',bucket='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/images/x', location=dict(proto='http',bucket='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/images/x'), code=301), + + + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/images/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/images/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/images/x', location=dict(proto='http',bucket='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/images/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), ] +ROUTING_ERROR_PROTOCOL = dict(code=400, reason='Bad Request', errorcode='InvalidRequest', bodyregex=r'Invalid protocol, protocol can be http or https. If not defined the protocol will be selected automatically.') + +ROUTING_RULES_TESTS_ERRORS = [ + # Invalid protocol, protocol can be http or https. If not defined the protocol will be selected automatically. + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), +] + +VALID_AMZ_REDIRECT = set([301,302,303,304,305,307,308]) + +# General lots of tests +for redirect_code in VALID_AMZ_REDIRECT: + rules = ROUTING_RULES['AmazonExample2+HttpRedirectCode=TMPL'].format(HttpRedirectCode=redirect_code) + result = redirect_code + ROUTING_RULES_TESTS.append( + dict(xml=dict(RoutingRules=rules), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=result) + ) + ROUTING_RULES_TESTS.append( + dict(xml=dict(RoutingRules=rules), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=result) + ) + +# TODO: +# codes other than those in VALID_AMZ_REDIRECT +# give an error of 'The provided HTTP redirect code (314) is not valid. Valid codes are 3XX except 300.' during setting the website config +# we should check that we can return that too on ceph + def routing_setup(): kwargs = {'obj':[]} bucket = get_new_bucket() kwargs['bucket'] = bucket kwargs['obj'].append(bucket) - f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + #f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) + f = _test_website_prep(bucket, '') kwargs.update(f) bucket.set_canned_acl('public-read') @@ -946,7 +1013,7 @@ def routing_setup(): #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: - time.sleep(0.05) + time.sleep(SLEEP_INTERVAL) return kwargs @@ -957,7 +1024,7 @@ def routing_teardown(**kwargs): @common.with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) -@timed(5) +#@timed(10) def routing_check(*args, **kwargs): bucket = kwargs['bucket'] args=args[0] @@ -968,10 +1035,10 @@ def routing_check(*args, **kwargs): pprint(xml_fields) f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'], hardcoded_fields=xml_fields) #print(f) - config_xml2 = bucket.get_website_configuration_xml() - config_xml2 = common.normalize_xml(config_xml2, pretty_print=True) # For us to read + config_xmlcmp = bucket.get_website_configuration_xml() + config_xmlcmp = common.normalize_xml(config_xmlcmp, pretty_print=True) # For us to read res = _website_request(bucket.name, args['url']) - print(config_xml2) + print(config_xmlcmp) # RGW returns "302 Found" per RFC2616 # S3 returns 302 Moved Temporarily per RFC1945 new_url = args['location'] @@ -991,6 +1058,7 @@ def routing_check(*args, **kwargs): assert(False) @attr('RoutingRules') +@attr('s3website') @nose.with_setup(setup=None, teardown=common.teardown) def test_routing_generator(): for t in ROUTING_RULES_TESTS: From 88b63280034b0add7128a3d25ba1d6c90bc16a60 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Tue, 26 Jan 2016 00:22:51 +0000 Subject: [PATCH 17/27] website: Improve XML handling and x-amz-website-redirect-location tests Signed-off-by: Robin H. Johnson --- s3tests/common.py | 4 ++ s3tests/functional/test_s3_website.py | 96 ++++++++++++++++++++------- 2 files changed, 76 insertions(+), 24 deletions(-) diff --git a/s3tests/common.py b/s3tests/common.py index fd834bf..9a325c0 100644 --- a/s3tests/common.py +++ b/s3tests/common.py @@ -257,6 +257,10 @@ def with_setup_kwargs(setup, teardown=None): # yield _test_gen, '1' # yield _test_gen +def trim_xml(xml_str): + p = etree.XMLParser(remove_blank_text=True) + elem = etree.XML(xml_str, parser=p) + return etree.tostring(elem) def normalize_xml(xml, pretty_print=True): if xml is None: diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 5f055c3..09d190b 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -98,13 +98,15 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail= try: config_xmlold = common.normalize_xml(bucket.get_website_configuration_xml(), pretty_print=True) except S3ResponseError as e: - if str(e.status) == str(404) and ('NoSuchWebsiteConfiguration' in e.body or 'NoSuchWebsiteConfiguration' in e.code): + if str(e.status) == str(404) \ + and True: + #and ('NoSuchWebsiteConfiguration' in e.body or 'NoSuchWebsiteConfiguration' in e.code): pass else: raise e try: - bucket.set_website_configuration_xml(config_xmlnew) + bucket.set_website_configuration_xml(common.trim_xml(config_xmlnew)) config_xmlnew = common.normalize_xml(config_xmlnew, pretty_print=True) except S3ResponseError as e: if expect_fail is not None: @@ -220,14 +222,16 @@ def test_website_nonexistant_bucket_s3(): @attr(resource='bucket') @attr(method='get') @attr(operation='list') -@attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure') +#@attr(assertion='non-existant bucket via website endpoint should give Forbidden, keeping bucket identity secure') +@attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket') @attr('s3website') @attr('fails_on_s3') @nose.with_setup(setup=None, teardown=common.teardown) def test_website_nonexistant_bucket_rgw(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') - _website_expected_error_response(res, bucket_name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) + #_website_expected_error_response(res, bucket_name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) + _website_expected_error_response(res, bucket_name, 404, 'Not Found', 'NoSuchBucket', content=_website_expected_default_html(Code='NoSuchBucket')) #------------- IndexDocument only, successes @attr(resource='bucket') @@ -685,8 +689,6 @@ def test_website_bucket_private_redirectall_base(): bucket.set_canned_acl('private') res = _website_request(bucket.name, '') - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'http://%s/' % f['RedirectAllRequestsTo_HostName'] _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) @@ -706,8 +708,6 @@ def test_website_bucket_private_redirectall_path(): pathfragment = choose_bucket_prefix(template='/{random}', max_len=16) res = _website_request(bucket.name, pathfragment) - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'http://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) @@ -728,8 +728,6 @@ def test_website_bucket_private_redirectall_path_upgrade(): pathfragment = choose_bucket_prefix(template='/{random}', max_len=16) res = _website_request(bucket.name, pathfragment) - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 new_url = 'https://%s%s' % (f['RedirectAllRequestsTo_HostName'], pathfragment) _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) @@ -754,7 +752,7 @@ def test_website_xredirect_nonwebsite(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='public-read') redirect = k.get_redirect() - ok(k.get_redirect(), redirect_dest) + eq(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') body = res.read() @@ -772,11 +770,11 @@ def test_website_xredirect_nonwebsite(): @attr(resource='bucket') @attr(method='get') @attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path') +@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key') @attr('s3website') @attr('x-amz-website-redirect-location') @nose.with_setup(setup=None, teardown=common.teardown) -def test_website_xredirect_relative(): +def test_website_xredirect_public_relative(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() @@ -787,11 +785,9 @@ def test_website_xredirect_relative(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='public-read') redirect = k.get_redirect() - ok(k.get_redirect(), redirect_dest) + eq(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 #new_url = get_website_url(bucket_name=bucket.name, path=redirect_dest) _website_expected_redirect_response(res, 301, ['Moved Permanently'], redirect_dest) @@ -801,11 +797,11 @@ def test_website_xredirect_relative(): @attr(resource='bucket') @attr(method='get') @attr(operation='list') -@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute') +@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key') @attr('s3website') @attr('x-amz-website-redirect-location') @nose.with_setup(setup=None, teardown=common.teardown) -def test_website_xredirect_abs(): +def test_website_xredirect_public_abs(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() @@ -816,17 +812,69 @@ def test_website_xredirect_abs(): headers = {'x-amz-website-redirect-location': redirect_dest} k.set_contents_from_string(content, headers=headers, policy='public-read') redirect = k.get_redirect() - ok(k.get_redirect(), redirect_dest) + eq(k.get_redirect(), redirect_dest) res = _website_request(bucket.name, '/page') - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 new_url = get_website_url(proto='http', hostname='example.com', path='/foo') _website_expected_redirect_response(res, 301, ['Moved Permanently'], new_url) k.delete() bucket.delete() +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key') +@attr('s3website') +@attr('x-amz-website-redirect-location') +@nose.with_setup(setup=None, teardown=common.teardown) +def test_website_xredirect_private_relative(): + bucket = get_new_bucket() + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + + k = bucket.new_key('page') + content = 'wrong-content' + redirect_dest = '/relative' + headers = {'x-amz-website-redirect-location': redirect_dest} + k.set_contents_from_string(content, headers=headers, policy='private') + redirect = k.get_redirect() + eq(k.get_redirect(), redirect_dest) + + res = _website_request(bucket.name, '/page') + # We get a 403 because the page is private + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) + + k.delete() + bucket.delete() + +@attr(resource='bucket') +@attr(method='get') +@attr(operation='list') +@attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key') +@attr('s3website') +@attr('x-amz-website-redirect-location') +@nose.with_setup(setup=None, teardown=common.teardown) +def test_website_xredirect_private_abs(): + bucket = get_new_bucket() + f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) + bucket.make_public() + + k = bucket.new_key('page') + content = 'wrong-content' + redirect_dest = 'http://example.com/foo' + headers = {'x-amz-website-redirect-location': redirect_dest} + k.set_contents_from_string(content, headers=headers, policy='private') + redirect = k.get_redirect() + eq(k.get_redirect(), redirect_dest) + + res = _website_request(bucket.name, '/page') + new_url = get_website_url(proto='http', hostname='example.com', path='/foo') + # We get a 403 because the page is private + _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) + + k.delete() + bucket.delete() # ------ RoutingRules tests # RoutingRules @@ -907,7 +955,7 @@ ROUTING_RULES = { """ - 404 + 404 ec2-11-22-333-44.compute-1.amazonaws.com @@ -1039,8 +1087,6 @@ def routing_check(*args, **kwargs): config_xmlcmp = common.normalize_xml(config_xmlcmp, pretty_print=True) # For us to read res = _website_request(bucket.name, args['url']) print(config_xmlcmp) - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 new_url = args['location'] if new_url is not None: new_url = get_website_url(**new_url) @@ -1062,6 +1108,8 @@ def routing_check(*args, **kwargs): @nose.with_setup(setup=None, teardown=common.teardown) def test_routing_generator(): for t in ROUTING_RULES_TESTS: + if 'xml' in t and 'RoutingRules' in t['xml'] and len(t['xml']['RoutingRules']) > 0: + t['xml']['RoutingRules'] = common.trim_xml(t['xml']['RoutingRules']) yield routing_check, t From 44e51960c661619732e4e3481fba1e82eed1d7cb Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 29 Jan 2016 03:28:51 +0000 Subject: [PATCH 18/27] Annotate many tests that fail on AWS (tested on EU region) Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/s3tests/functional/test_s3.py b/s3tests/functional/test_s3.py index 5c71e1e..4f27af0 100644 --- a/s3tests/functional/test_s3.py +++ b/s3tests/functional/test_s3.py @@ -2886,6 +2886,7 @@ def _test_bucket_create_naming_good_long(length): @attr(method='put') @attr(operation='create w/250 byte name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_good_long_250(): _test_bucket_create_naming_good_long(250) @@ -2896,6 +2897,7 @@ def test_bucket_create_naming_good_long_250(): @attr(method='put') @attr(operation='create w/251 byte name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_good_long_251(): _test_bucket_create_naming_good_long(251) @@ -2906,6 +2908,7 @@ def test_bucket_create_naming_good_long_251(): @attr(method='put') @attr(operation='create w/252 byte name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_good_long_252(): _test_bucket_create_naming_good_long(252) @@ -2945,6 +2948,7 @@ def test_bucket_create_naming_good_long_255(): @attr(method='get') @attr(operation='list w/251 byte name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_list_long_name(): prefix = get_new_bucket_name() length = 251 @@ -2985,6 +2989,7 @@ def test_bucket_create_naming_bad_punctuation(): @attr(method='put') @attr(operation='create w/underscore in name') @attr(assertion='succeeds') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_dns_underscore(): check_good_bucket_name('foo_bar') @@ -2995,6 +3000,7 @@ def test_bucket_create_naming_dns_underscore(): @attr(method='put') @attr(operation='create w/100 byte name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_dns_long(): prefix = get_prefix() assert len(prefix) < 50 @@ -3008,6 +3014,7 @@ def test_bucket_create_naming_dns_long(): @attr(method='put') @attr(operation='create w/dash at end of name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_dns_dash_at_end(): check_good_bucket_name('foo-') @@ -3018,6 +3025,7 @@ def test_bucket_create_naming_dns_dash_at_end(): @attr(method='put') @attr(operation='create w/.. in name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_dns_dot_dot(): check_good_bucket_name('foo..bar') @@ -3028,6 +3036,7 @@ def test_bucket_create_naming_dns_dot_dot(): @attr(method='put') @attr(operation='create w/.- in name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_dns_dot_dash(): check_good_bucket_name('foo.-bar') @@ -3038,6 +3047,7 @@ def test_bucket_create_naming_dns_dot_dash(): @attr(method='put') @attr(operation='create w/-. in name') @attr(assertion='fails with subdomain') +@attr('fails_on_aws') # InvalidBucketNameThe specified bucket is not valid.... def test_bucket_create_naming_dns_dash_dot(): check_good_bucket_name('foo-.bar') @@ -3120,6 +3130,7 @@ def test_bucket_acl_default(): @attr(method='get') @attr(operation='public-read acl') @attr(assertion='read back expected defaults') +@attr('fails_on_aws') # IllegalLocationConstraintExceptionThe unspecified location constraint is incompatible for the region specific endpoint this request was sent to. def test_bucket_acl_canned_during_create(): name = get_new_bucket_name() bucket = targets.main.default.connection.create_bucket(name, policy = 'public-read') @@ -3548,6 +3559,7 @@ def test_object_acl_canned_bucketownerfullcontrol(): @attr(method='put') @attr(operation='set write-acp') @attr(assertion='does not modify owner') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_object_acl_full_control_verify_owner(): bucket = get_new_bucket(targets.main.default) bucket.set_acl('public-read-write') @@ -3638,6 +3650,7 @@ def _build_bucket_acl_xml(permission, bucket=None): @attr(method='ACLs') @attr(operation='set acl FULL_CONTROL (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_bucket_acl_xml_fullcontrol(): _build_bucket_acl_xml('FULL_CONTROL') @@ -3646,6 +3659,7 @@ def test_bucket_acl_xml_fullcontrol(): @attr(method='ACLs') @attr(operation='set acl WRITE (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_bucket_acl_xml_write(): _build_bucket_acl_xml('WRITE') @@ -3654,6 +3668,7 @@ def test_bucket_acl_xml_write(): @attr(method='ACLs') @attr(operation='set acl WRITE_ACP (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_bucket_acl_xml_writeacp(): _build_bucket_acl_xml('WRITE_ACP') @@ -3662,6 +3677,7 @@ def test_bucket_acl_xml_writeacp(): @attr(method='ACLs') @attr(operation='set acl READ (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_bucket_acl_xml_read(): _build_bucket_acl_xml('READ') @@ -3670,6 +3686,7 @@ def test_bucket_acl_xml_read(): @attr(method='ACLs') @attr(operation='set acl READ_ACP (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_bucket_acl_xml_readacp(): _build_bucket_acl_xml('READ_ACP') @@ -3708,6 +3725,7 @@ def _build_object_acl_xml(permission): @attr(method='ACLs') @attr(operation='set acl FULL_CONTROL (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_object_acl_xml(): _build_object_acl_xml('FULL_CONTROL') @@ -3716,6 +3734,7 @@ def test_object_acl_xml(): @attr(method='ACLs') @attr(operation='set acl WRITE (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_object_acl_xml_write(): _build_object_acl_xml('WRITE') @@ -3724,6 +3743,7 @@ def test_object_acl_xml_write(): @attr(method='ACLs') @attr(operation='set acl WRITE_ACP (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_object_acl_xml_writeacp(): _build_object_acl_xml('WRITE_ACP') @@ -3732,6 +3752,7 @@ def test_object_acl_xml_writeacp(): @attr(method='ACLs') @attr(operation='set acl READ (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_object_acl_xml_read(): _build_object_acl_xml('READ') @@ -3740,6 +3761,7 @@ def test_object_acl_xml_read(): @attr(method='ACLs') @attr(operation='set acl READ_ACP (xml)') @attr(assertion='reads back correctly') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_object_acl_xml_readacp(): _build_object_acl_xml('READ_ACP') @@ -3848,6 +3870,7 @@ def _check_bucket_acl_grant_cant_writeacp(bucket): @attr(method='ACLs') @attr(operation='set acl w/userid FULL_CONTROL') @attr(assertion='can read/write data/acls') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${USER} def test_bucket_acl_grant_userid_fullcontrol(): bucket = _bucket_acl_grant_userid('FULL_CONTROL') @@ -3871,6 +3894,7 @@ def test_bucket_acl_grant_userid_fullcontrol(): @attr(method='ACLs') @attr(operation='set acl w/userid READ') @attr(assertion='can read data, no other r/w') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_bucket_acl_grant_userid_read(): bucket = _bucket_acl_grant_userid('READ') @@ -3888,6 +3912,7 @@ def test_bucket_acl_grant_userid_read(): @attr(method='ACLs') @attr(operation='set acl w/userid READ_ACP') @attr(assertion='can read acl, no other r/w') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_bucket_acl_grant_userid_readacp(): bucket = _bucket_acl_grant_userid('READ_ACP') @@ -3905,6 +3930,7 @@ def test_bucket_acl_grant_userid_readacp(): @attr(method='ACLs') @attr(operation='set acl w/userid WRITE') @attr(assertion='can write data, no other r/w') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_bucket_acl_grant_userid_write(): bucket = _bucket_acl_grant_userid('WRITE') @@ -3922,6 +3948,7 @@ def test_bucket_acl_grant_userid_write(): @attr(method='ACLs') @attr(operation='set acl w/userid WRITE_ACP') @attr(assertion='can write acls, no other r/w') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_bucket_acl_grant_userid_writeacp(): bucket = _bucket_acl_grant_userid('WRITE_ACP') @@ -4005,6 +4032,7 @@ def _get_acl_header(user=None, perms=None): @attr(operation='add all grants to user through headers') @attr(assertion='adds all grants individually to second user') @attr('fails_on_dho') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_object_header_acl_grants(): bucket = get_new_bucket() headers = _get_acl_header() @@ -4064,6 +4092,7 @@ def test_object_header_acl_grants(): @attr(operation='add all grants to user through headers') @attr(assertion='adds all grants individually to second user') @attr('fails_on_dho') +@attr('fails_on_aws') # InvalidArgumentInvalid idCanonicalUser/ID${ALTUSER} def test_bucket_header_acl_grants(): headers = _get_acl_header() bucket = get_new_bucket(targets.main.default, get_prefix(), headers) @@ -4128,6 +4157,7 @@ def test_bucket_header_acl_grants(): @attr(method='ACLs') @attr(operation='add second FULL_CONTROL user') @attr(assertion='works for S3, fails for DHO') +@attr('fails_on_aws') # AmbiguousGrantByEmailAddressThe e-mail address you provided is associated with more than one account. Please retry your request using a different identification method or after resolving the ambiguity. def test_bucket_acl_grant_email(): bucket = get_new_bucket() # add alt user From eb8acbaf920ca00606f849aa1191b21b349ef3db Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 29 Jan 2016 03:50:06 +0000 Subject: [PATCH 19/27] Fix whitespace. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 57 +++++++++++++-------------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 09d190b..fe7fdd4 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -60,7 +60,7 @@ def get_website_url(**kwargs): if hostname is None and bucket is None: return '/' + path.lstrip('/') - + domain = config['main']['host'] if('s3website_domain' in config['main']): domain = config['main']['s3website_domain'] @@ -91,7 +91,7 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail= if not xml_template: bucket.delete_website_configuration() return f - + config_xmlnew = make_website_config(xml_fragment) config_xmlold = '' @@ -385,7 +385,7 @@ def test_website_private_bucket_list_empty_missingerrordoc(): res = _website_request(bucket.name, '') _website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied')) - + bucket.delete() @attr(resource='bucket') @@ -980,43 +980,43 @@ ROUTING_RULES = { ROUTING_RULES_TESTS = [ dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='', location=None, code=200), - dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/', location=None, code=200), - dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/', location=None, code=200), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/', location=dict(proto='http',bucket='{bucket_name}',path='/documents/'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/x', location=dict(proto='http',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/', location=dict(proto='http',bucket='{bucket_name}',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1']), url='/docs/x', location=dict(proto='http',bucket='{bucket_name}',path='/documents/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/', location=None, code=200), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/', location=dict(proto='https',bucket='{bucket_name}',path='/documents/'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/x', location=dict(proto='https',bucket='{bucket_name}',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/', location=dict(proto='https',bucket='{bucket_name}',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https']), url='/docs/x', location=dict(proto='https',bucket='{bucket_name}',path='/documents/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/', location=None, code=200), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/', location=dict(proto='https',hostname='xyzzy',path='/documents/'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/x', location=dict(proto='https',hostname='xyzzy',path='/documents/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/', location=None, code=200), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/', location=dict(proto='https',hostname='xyzzy',path='/documents/'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=https+Hostname=xyzzy']), url='/docs/x', location=dict(proto='https',hostname='xyzzy',path='/documents/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample2']), url='/images/x', location=dict(proto='http',bucket='{bucket_name}',path='/folderdeleted.html'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/images/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/images/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3']), url='/images/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/images/x'), code=301), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/x', location=None, code=404), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/images/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/x', location=None, code=404), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample3+KeyPrefixEquals']), url='/images/x', location=dict(proto='http',hostname='ec2-11-22-333-44.compute-1.amazonaws.com',path='/report-404/x'), code=301), ] ROUTING_ERROR_PROTOCOL = dict(code=400, reason='Bad Request', errorcode='InvalidRequest', bodyregex=r'Invalid protocol, protocol can be http or https. If not defined the protocol will be selected automatically.') ROUTING_RULES_TESTS_ERRORS = [ # Invalid protocol, protocol can be http or https. If not defined the protocol will be selected automatically. - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), - dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), + dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/docs/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), ] VALID_AMZ_REDIRECT = set([301,302,303,304,305,307,308]) @@ -1111,6 +1111,3 @@ def test_routing_generator(): if 'xml' in t and 'RoutingRules' in t['xml'] and len(t['xml']['RoutingRules']) > 0: t['xml']['RoutingRules'] = common.trim_xml(t['xml']['RoutingRules']) yield routing_check, t - - - From e917e5e41ae61fc888f6823ee48c399783e8ffcb Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 29 Jan 2016 03:49:54 +0000 Subject: [PATCH 20/27] s3website: Allow tests to be skipped if website support disabled in RGW. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 103 +++++++++++++++++--------- 1 file changed, 68 insertions(+), 35 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index fe7fdd4..8efd024 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -6,13 +6,14 @@ import string import random from pprint import pprint import time +import boto.exception from urlparse import urlparse from nose.tools import eq_ as eq, ok_ as ok from nose.plugins.attrib import attr from nose.tools import timed -from boto.exception import S3ResponseError +from nose.plugins.skip import SkipTest from .. import common @@ -37,6 +38,38 @@ WEBSITE_CONFIGS_XMLFRAG = { 'RedirectAll+Protocol': '${RedirectAllRequestsTo_HostName}${RedirectAllRequestsTo_Protocol}${RoutingRules}', } +CAN_WEBSITE = None + +def check_can_test_website(): + global CAN_WEBSITE + # This is a bit expensive, so we cache this + if CAN_WEBSITE is None: + bucket = get_new_bucket() + try: + wsconf = bucket.get_website_configuration() + CAN_WEBSITE = True + except boto.exception.S3ResponseError as e: + if e.status == 404 and e.reason == 'Not Found' and e.error_code == 'NoSuchWebsiteConfiguration': + CAN_WEBSITE = True + elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed': + # rgw_enable_static_website is false + CAN_WEBSITE = False + elif e.status == 403 and e.reason == 'SignatureDoesNotMatch': + # This is older versions that do not support the website code + CAN_WEBSITE = False + else: + raise RuntimeError("Unknown response in checking if WebsiteConf is supported", e) + finally: + bucket.delete() + + if CAN_WEBSITE is True: + return True + elif CAN_WEBSITE is False: + raise SkipTest + else: + raise RuntimeError("Unknown cached response in checking if WebsiteConf is supported") + + def make_website_config(xml_fragment): """ Take the tedious stuff out of the config @@ -97,7 +130,7 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail= config_xmlold = '' try: config_xmlold = common.normalize_xml(bucket.get_website_configuration_xml(), pretty_print=True) - except S3ResponseError as e: + except boto.exception.S3ResponseError as e: if str(e.status) == str(404) \ and True: #and ('NoSuchWebsiteConfiguration' in e.body or 'NoSuchWebsiteConfiguration' in e.code): @@ -108,7 +141,7 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail= try: bucket.set_website_configuration_xml(common.trim_xml(config_xmlnew)) config_xmlnew = common.normalize_xml(config_xmlnew, pretty_print=True) - except S3ResponseError as e: + except boto.exception.S3ResponseError as e: if expect_fail is not None: if isinstance(expect_fail, dict): pass @@ -213,7 +246,7 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET', tim @attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket, exposing security risk') @attr('s3website') @attr('fails_on_rgw') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_nonexistant_bucket_s3(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') @@ -226,7 +259,7 @@ def test_website_nonexistant_bucket_s3(): @attr(assertion='non-existant bucket via website endpoint should give NoSuchBucket') @attr('s3website') @attr('fails_on_s3') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_nonexistant_bucket_rgw(): bucket_name = get_new_bucket_name() res = _website_request(bucket_name, '') @@ -239,7 +272,7 @@ def test_website_nonexistant_bucket_rgw(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is public') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) @timed(10) def test_website_public_bucket_list_public_index(): bucket = get_new_bucket() @@ -266,7 +299,7 @@ def test_website_public_bucket_list_public_index(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_public_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -295,7 +328,7 @@ def test_website_private_bucket_list_public_index(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -311,7 +344,7 @@ def test_website_private_bucket_list_empty(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -326,7 +359,7 @@ def test_website_public_bucket_list_empty(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -351,7 +384,7 @@ def test_website_public_bucket_list_private_index(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -377,7 +410,7 @@ def test_website_private_bucket_list_private_index(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, missing errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -393,7 +426,7 @@ def test_website_private_bucket_list_empty_missingerrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, missing errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -408,7 +441,7 @@ def test_website_public_bucket_list_empty_missingerrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private, missing errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -432,7 +465,7 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private, missing errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_missingerrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -457,7 +490,7 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, blocked errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -484,7 +517,7 @@ def test_website_private_bucket_list_empty_blockederrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, blocked errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -510,7 +543,7 @@ def test_website_public_bucket_list_empty_blockederrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private, blocked errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -542,7 +575,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private, blocked errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_blockederrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -575,7 +608,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): @attr(operation='list') @attr(assertion='empty private buckets via s3website return a 403 for /, good errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -599,7 +632,7 @@ def test_website_private_bucket_list_empty_gooderrordoc(): @attr(operation='list') @attr(assertion='empty public buckets via s3website return a 404 for /, good errordoc') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_empty_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -623,7 +656,7 @@ def test_website_public_bucket_list_empty_gooderrordoc(): @attr(operation='list') @attr(assertion='non-empty public buckets via s3website return page for /, where page is private') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_public_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -652,7 +685,7 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): @attr(operation='list') @attr(assertion='non-empty private buckets via s3website return page for /, where page is private') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_private_bucket_list_private_index_gooderrordoc(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) @@ -682,7 +715,7 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_base(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -699,7 +732,7 @@ def test_website_bucket_private_redirectall_base(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_path(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -718,7 +751,7 @@ def test_website_bucket_private_redirectall_path(): @attr(operation='list') @attr(assertion='RedirectAllRequestsTo without protocol should TODO') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_bucket_private_redirectall_path_upgrade(): bucket = get_new_bucket() x = string.Template(WEBSITE_CONFIGS_XMLFRAG['RedirectAll+Protocol']).safe_substitute(RedirectAllRequestsTo_Protocol='https') @@ -740,7 +773,7 @@ def test_website_bucket_private_redirectall_path_upgrade(): @attr(assertion='x-amz-website-redirect-location should not fire without websiteconf') @attr('s3website') @attr('x-amz-website-redirect-location') -#@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_nonwebsite(): bucket = get_new_bucket() #f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['RedirectAll']) @@ -773,7 +806,7 @@ def test_website_xredirect_nonwebsite(): @attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, public key') @attr('s3website') @attr('x-amz-website-redirect-location') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_public_relative(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -800,7 +833,7 @@ def test_website_xredirect_public_relative(): @attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, public key') @attr('s3website') @attr('x-amz-website-redirect-location') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_public_abs(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -827,7 +860,7 @@ def test_website_xredirect_public_abs(): @attr(assertion='x-amz-website-redirect-location should fire websiteconf, relative path, private key') @attr('s3website') @attr('x-amz-website-redirect-location') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_private_relative(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -854,7 +887,7 @@ def test_website_xredirect_private_relative(): @attr(assertion='x-amz-website-redirect-location should fire websiteconf, absolute, private key') @attr('s3website') @attr('x-amz-website-redirect-location') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_website_xredirect_private_abs(): bucket = get_new_bucket() f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) @@ -1038,6 +1071,7 @@ for redirect_code in VALID_AMZ_REDIRECT: # we should check that we can return that too on ceph def routing_setup(): + check_can_test_website() kwargs = {'obj':[]} bucket = get_new_bucket() kwargs['bucket'] = bucket @@ -1069,9 +1103,8 @@ def routing_teardown(**kwargs): for o in reversed(kwargs['obj']): print('Deleting', str(o)) o.delete() - - -@common.with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) + +@common.with_setup_kwargs(setup=routing_setup, teardown=routing_teardown) #@timed(10) def routing_check(*args, **kwargs): bucket = kwargs['bucket'] @@ -1105,7 +1138,7 @@ def routing_check(*args, **kwargs): @attr('RoutingRules') @attr('s3website') -@nose.with_setup(setup=None, teardown=common.teardown) +@nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_routing_generator(): for t in ROUTING_RULES_TESTS: if 'xml' in t and 'RoutingRules' in t['xml'] and len(t['xml']['RoutingRules']) > 0: From 9efbe4ef813c8b9439685c9d29d86ab473c045f1 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 29 Jan 2016 04:01:45 +0000 Subject: [PATCH 21/27] website: Support both NoSuchWebsiteConfiguration and NoSuchKey for fetching the website config subresource. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 8efd024..9fb47ae 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -49,7 +49,7 @@ def check_can_test_website(): wsconf = bucket.get_website_configuration() CAN_WEBSITE = True except boto.exception.S3ResponseError as e: - if e.status == 404 and e.reason == 'Not Found' and e.error_code == 'NoSuchWebsiteConfiguration': + if e.status == 404 and e.reason == 'Not Found' and e.error_code in ['NoSuchWebsiteConfiguration', 'NoSuchKey']: CAN_WEBSITE = True elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed': # rgw_enable_static_website is false @@ -132,8 +132,8 @@ def _test_website_prep(bucket, xml_template, hardcoded_fields = {}, expect_fail= config_xmlold = common.normalize_xml(bucket.get_website_configuration_xml(), pretty_print=True) except boto.exception.S3ResponseError as e: if str(e.status) == str(404) \ - and True: - #and ('NoSuchWebsiteConfiguration' in e.body or 'NoSuchWebsiteConfiguration' in e.code): + and ('NoSuchWebsiteConfiguration' in e.body or 'NoSuchWebsiteConfiguration' in e.code or + 'NoSuchKey' in e.body or 'NoSuchKey' in e.code): pass else: raise e From 17f98b00baea0f9195b20a5b1a400598f016ac0e Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 29 Jan 2016 04:02:08 +0000 Subject: [PATCH 22/27] website: Use better test annotation for routingrules tests. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 9fb47ae..a70e426 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -1136,7 +1136,7 @@ def routing_check(*args, **kwargs): else: assert(False) -@attr('RoutingRules') +@attr('s3website_RoutingRules') @attr('s3website') @nose.with_setup(setup=check_can_test_website, teardown=common.teardown) def test_routing_generator(): From 6ce34d3c6a5fbc36c518977e5ef75b325910057a Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Fri, 29 Jan 2016 05:31:15 +0000 Subject: [PATCH 23/27] website: RGW does not have custom 404 message descriptions per s3 error codes yet. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index a70e426..574dbd1 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -790,10 +790,9 @@ def test_website_xredirect_nonwebsite(): res = _website_request(bucket.name, '/page') body = res.read() print(body) - # RGW returns "302 Found" per RFC2616 - # S3 returns 302 Moved Temporarily per RFC1945 - #_website_expected_redirect_response(res, 302, ['Found', 'Moved Temporarily'], new_url) - expected_content = _website_expected_default_html(Code='NoSuchWebsiteConfiguration', BucketName=bucket.name, Message='The specified bucket does not have a website configuration') + expected_content = _website_expected_default_html(Code='NoSuchWebsiteConfiguration', BucketName=bucket.name) + # TODO: RGW does not have custom error messages for different 404s yet + #expected_content = _website_expected_default_html(Code='NoSuchWebsiteConfiguration', BucketName=bucket.name, Message='The specified bucket does not have a website configuration') print(expected_content) _website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchWebsiteConfiguration', content=expected_content, body=body) From f028e1e926bddac3396e7316f3c483a4f4ea161d Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Sat, 30 Jan 2016 17:14:15 +0000 Subject: [PATCH 24/27] website: include Forbidden in 403 test per https://github.com/dreamhost/s3-tests/commit/802c8a3ee985a90e35b21ffab5f670c2eeecf8a9#commitcomment-15776266. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index 574dbd1..f55c478 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -54,7 +54,7 @@ def check_can_test_website(): elif e.status == 405 and e.reason == 'Method Not Allowed' and e.error_code == 'MethodNotAllowed': # rgw_enable_static_website is false CAN_WEBSITE = False - elif e.status == 403 and e.reason == 'SignatureDoesNotMatch': + elif e.status == 403 and e.reason == 'SignatureDoesNotMatch' and e.error_code == 'Forbidden': # This is older versions that do not support the website code CAN_WEBSITE = False else: From 94fa405270970066c7eebbb95dada7b9d640301c Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Sat, 30 Jan 2016 17:38:45 +0000 Subject: [PATCH 25/27] Refactor _make_bucket_request. s3tests.functional._make_bucket_request is now a special case of _make_request, as requested by https://github.com/ceph/s3-tests/pull/92#commitcomment-15775972 Signed-off-by: Robin H. Johnson --- s3tests/functional/__init__.py | 56 +++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 25 deletions(-) diff --git a/s3tests/functional/__init__.py b/s3tests/functional/__init__.py index 7a5eb57..4bfc580 100644 --- a/s3tests/functional/__init__.py +++ b/s3tests/functional/__init__.py @@ -405,7 +405,9 @@ def _make_request(method, bucket, key, body=None, authenticated=False, response_ """ issue a request for a specified method, on a specified , with a specified (optional) body (encrypted per the connection), and - return the response (status, reason) + return the response (status, reason). + + If key is None, then this will be treated as a bucket-level request. """ if response_headers is None: response_headers = {} @@ -416,44 +418,48 @@ def _make_request(method, bucket, key, body=None, authenticated=False, response_ request_headers['Host'] = conn.calling_format.build_host(conn.server_name(), bucket.name) if authenticated: - url = key.generate_url(expires_in, method=method, response_headers=response_headers, headers=request_headers) + urlobj = None + if key is not None: + urlobj = key + elif bucket is not None: + urlobj = bucket + else: + raise RuntimeError('Unable to find bucket name') + url = urlobj.generate_url(expires_in, method=method, response_headers=response_headers, headers=request_headers) o = urlparse(url) path = o.path + '?' + o.query else: - if path_style: - path = '/{bucket}/{obj}'.format(bucket=key.bucket.name, obj=key.name) + bucketobj = None + if key is not None: + path = '/{obj}'.format(obj=key.name) + bucketobj = key.bucket + elif bucket is not None: + path = '/' + bucketobj = bucket else: - path = '/{obj}'.format(bucket=key.bucket.name, obj=key.name) + raise RuntimeError('Unable to find bucket name') + if path_style: + path = '/{bucket}'.format(bucket=bucketobj.name) + path return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure, timeout=timeout) def _make_bucket_request(method, bucket, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True, timeout=None): """ - issue a request for a specified method, on a specified , + issue a request for a specified method, on a specified , with a specified (optional) body (encrypted per the connection), and return the response (status, reason) """ - if response_headers is None: - response_headers = {} - if request_headers is None: - request_headers = {} - if not path_style: - conn = bucket.connection - request_headers['Host'] = conn.calling_format.build_host(conn.server_name(), bucket.name) - - if authenticated: - url = bucket.generate_url(expires_in, method=method, response_headers=response_headers, headers=request_headers) - o = urlparse(url) - path = o.path + '?' + o.query - else: - if path_style: - path = '/{bucket}'.format(bucket=bucket.name) - else: - path = '/' - - return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure, timeout=timeout) + return _make_request(method=method, bucket=bucket, key=None, body=body, authenticated=authenticated, response_headers=response_headers, request_headers=request_headers, expires_in=expires_in, path_style=path_style, timeout=timeout) def _make_raw_request(host, port, method, path, body=None, request_headers=None, secure=False, timeout=None): + """ + issue a request to a specific host & port, for a specified method, on a + specified path with a specified (optional) body (encrypted per the + connection), and return the response (status, reason). + + This allows construction of special cases not covered by the bucket/key to + URL mapping of _make_request/_make_bucket_request. + """ if secure: class_ = HTTPSConnection else: From 8da96c83afee37169627eb545b12ec69bcc2ca85 Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Sun, 27 Mar 2016 02:36:46 +0000 Subject: [PATCH 26/27] functional._make_request: go back to simple version of httplib.request. The httplib.request function automatically constructs the following headers: content-length, host, accept-encoding Until such time as we need to override those values in incompatible ways, simply use it. Fixes: https://github.com/ceph/s3-tests/pull/92#issuecomment-185970260 Signed-off-by: Robin H. Johnson --- s3tests/functional/__init__.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/s3tests/functional/__init__.py b/s3tests/functional/__init__.py index 4bfc580..8fb4845 100644 --- a/s3tests/functional/__init__.py +++ b/s3tests/functional/__init__.py @@ -408,11 +408,10 @@ def _make_request(method, bucket, key, body=None, authenticated=False, response_ return the response (status, reason). If key is None, then this will be treated as a bucket-level request. + + If the request or response headers are None, then default values will be + provided by later methods. """ - if response_headers is None: - response_headers = {} - if request_headers is None: - request_headers = {} if not path_style: conn = bucket.connection request_headers['Host'] = conn.calling_format.build_host(conn.server_name(), bucket.name) @@ -468,17 +467,11 @@ def _make_raw_request(host, port, method, path, body=None, request_headers=None, if request_headers is None: request_headers = {} - skip_host=('Host' in request_headers) - skip_accept_encoding = False c = class_(host, port, strict=True, timeout=timeout) - # We do the request manually, so we can muck with headers - #c.request(method, path, body=body, headers=request_headers) - c.connect() - c.putrequest(method, path, skip_host, skip_accept_encoding) - for k,v in request_headers.items(): - c.putheader(k,v) - c.endheaders(message_body=body) + # TODO: We might have to modify this in future if we need to interact with + # how httplib.request handles Accept-Encoding and Host. + c.request(method, path, body=body, headers=request_headers) res = c.getresponse() #c.close() From 811e7f6d7e5157f7bb5c9d6bf9aeb2a4fc59d0ff Mon Sep 17 00:00:00 2001 From: "Robin H. Johnson" Date: Thu, 21 Apr 2016 15:10:15 -0700 Subject: [PATCH 27/27] s3website: Refactor strings & aid debugging Refactor duplicated static sites template content, and include a debug copy of the WebsiteConf XML in every bucket for ease of debug with the test generator. Signed-off-by: Robin H. Johnson --- s3tests/functional/test_s3_website.py | 59 ++++++++++++++++----------- 1 file changed, 36 insertions(+), 23 deletions(-) diff --git a/s3tests/functional/test_s3_website.py b/s3tests/functional/test_s3_website.py index f55c478..aa837e8 100644 --- a/s3tests/functional/test_s3_website.py +++ b/s3tests/functional/test_s3_website.py @@ -37,6 +37,8 @@ WEBSITE_CONFIGS_XMLFRAG = { 'RedirectAll': '${RedirectAllRequestsTo_HostName}${RoutingRules}', 'RedirectAll+Protocol': '${RedirectAllRequestsTo_HostName}${RedirectAllRequestsTo_Protocol}${RoutingRules}', } +INDEXDOC_TEMPLATE = '

    IndexDoc

    {random}' +ERRORDOC_TEMPLATE = '

    ErrorDoc

    {random}' CAN_WEBSITE = None @@ -279,7 +281,7 @@ def test_website_public_bucket_list_public_index(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.make_public() #time.sleep(1) @@ -305,7 +307,7 @@ def test_website_private_bucket_list_public_index(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.set_canned_acl('private') indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.make_public() #time.sleep(1) @@ -365,7 +367,7 @@ def test_website_public_bucket_list_private_index(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.make_public() indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') #time.sleep(1) @@ -390,7 +392,7 @@ def test_website_private_bucket_list_private_index(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDoc']) bucket.set_canned_acl('private') indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') ##time.sleep(1) @@ -447,7 +449,7 @@ def test_website_public_bucket_list_private_index_missingerrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') #time.sleep(1) @@ -471,7 +473,7 @@ def test_website_private_bucket_list_private_index_missingerrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') #time.sleep(1) @@ -496,7 +498,7 @@ def test_website_private_bucket_list_empty_blockederrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') #time.sleep(1) @@ -523,7 +525,7 @@ def test_website_public_bucket_list_empty_blockederrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') while bucket.get_key(f['ErrorDocument_Key']) is None: @@ -549,11 +551,11 @@ def test_website_public_bucket_list_private_index_blockederrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') #time.sleep(1) @@ -581,11 +583,11 @@ def test_website_private_bucket_list_private_index_blockederrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('private') #time.sleep(1) @@ -614,9 +616,8 @@ def test_website_private_bucket_list_empty_gooderrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) - errorhtml.set_contents_from_string(errorstring) - errorhtml.set_canned_acl('public-read') + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) + errorhtml.set_contents_from_string(errorstring, policy='public-read') #time.sleep(1) while bucket.get_key(f['ErrorDocument_Key']) is None: time.sleep(SLEEP_INTERVAL) @@ -638,7 +639,7 @@ def test_website_public_bucket_list_empty_gooderrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') #time.sleep(1) @@ -662,11 +663,11 @@ def test_website_public_bucket_list_private_index_gooderrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.make_public() indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') #time.sleep(1) @@ -691,11 +692,11 @@ def test_website_private_bucket_list_private_index_gooderrordoc(): f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc']) bucket.set_canned_acl('private') indexhtml = bucket.new_key(f['IndexDocument_Suffix']) - indexstring = choose_bucket_prefix(template='{random}', max_len=256) + indexstring = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=256) indexhtml.set_contents_from_string(indexstring) indexhtml.set_canned_acl('private') errorhtml = bucket.new_key(f['ErrorDocument_Key']) - errorstring = choose_bucket_prefix(template='{random}', max_len=256) + errorstring = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=256) errorhtml.set_contents_from_string(errorstring) errorhtml.set_canned_acl('public-read') #time.sleep(1) @@ -1010,6 +1011,10 @@ ROUTING_RULES = { """, } +for k in ROUTING_RULES.keys(): + if len(ROUTING_RULES[k]) > 0: + ROUTING_RULES[k] = "\n%s" % (k, ROUTING_RULES[k]) + ROUTING_RULES_TESTS = [ dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='', location=None, code=200), dict(xml=dict(RoutingRules=ROUTING_RULES['empty']), url='/', location=None, code=200), @@ -1043,7 +1048,7 @@ ROUTING_RULES_TESTS = [ ROUTING_ERROR_PROTOCOL = dict(code=400, reason='Bad Request', errorcode='InvalidRequest', bodyregex=r'Invalid protocol, protocol can be http or https. If not defined the protocol will be selected automatically.') -ROUTING_RULES_TESTS_ERRORS = [ +ROUTING_RULES_TESTS_ERRORS = [ # TODO: Unused! # Invalid protocol, protocol can be http or https. If not defined the protocol will be selected automatically. dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), dict(xml=dict(RoutingRules=ROUTING_RULES['AmazonExample1+Protocol=http2']), url='/x', location=None, code=400, error=ROUTING_ERROR_PROTOCOL), @@ -1079,16 +1084,20 @@ def routing_setup(): f = _test_website_prep(bucket, '') kwargs.update(f) bucket.set_canned_acl('public-read') + + k = bucket.new_key('debug-ws.xml') + kwargs['obj'].append(k) + k.set_contents_from_string('', policy='public-read') k = bucket.new_key(f['IndexDocument_Suffix']) kwargs['obj'].append(k) - s = choose_bucket_prefix(template='

    Index

    {random}', max_len=64) + s = choose_bucket_prefix(template=INDEXDOC_TEMPLATE, max_len=64) k.set_contents_from_string(s) k.set_canned_acl('public-read') k = bucket.new_key(f['ErrorDocument_Key']) kwargs['obj'].append(k) - s = choose_bucket_prefix(template='

    Error

    {random}', max_len=64) + s = choose_bucket_prefix(template=ERRORDOC_TEMPLATE, max_len=64) k.set_contents_from_string(s) k.set_canned_acl('public-read') @@ -1112,6 +1121,10 @@ def routing_check(*args, **kwargs): pprint(args) xml_fields = kwargs.copy() xml_fields.update(args['xml']) + + k = bucket.get_key('debug-ws.xml') + k.set_contents_from_string(str(args)+str(kwargs), policy='public-read') + pprint(xml_fields) f = _test_website_prep(bucket, WEBSITE_CONFIGS_XMLFRAG['IndexDocErrorDoc'], hardcoded_fields=xml_fields) #print(f)