mirror of
https://github.com/ceph/s3-tests.git
synced 2024-11-25 03:47:22 +00:00
Merge remote-tracking branch 'dreamhost/wip-static-website'
Signed-off-by: Yehuda Sadeh <yehuda@redhat.com> Conflicts: s3tests/functional/test_s3.py
This commit is contained in:
commit
14d58a3bda
5 changed files with 1409 additions and 63 deletions
|
@ -9,3 +9,4 @@ requests ==0.14.0
|
|||
pytz >=2011k
|
||||
ordereddict
|
||||
httplib2
|
||||
lxml
|
||||
|
|
|
@ -5,6 +5,11 @@ import os
|
|||
import random
|
||||
import string
|
||||
import yaml
|
||||
import re
|
||||
from lxml import etree
|
||||
|
||||
from doctest import Example
|
||||
from lxml.doctestcompare import LXMLOutputChecker
|
||||
|
||||
s3 = bunch.Bunch()
|
||||
config = bunch.Bunch()
|
||||
|
@ -181,3 +186,116 @@ def get_new_bucket(connection=None):
|
|||
|
||||
def teardown():
|
||||
nuke_prefixed_buckets()
|
||||
|
||||
def with_setup_kwargs(setup, teardown=None):
|
||||
"""Decorator to add setup and/or teardown methods to a test function::
|
||||
|
||||
@with_setup_args(setup, teardown)
|
||||
def test_something():
|
||||
" ... "
|
||||
|
||||
The setup function should return (kwargs) which will be passed to
|
||||
test function, and teardown function.
|
||||
|
||||
Note that `with_setup_kwargs` is useful *only* for test functions, not for test
|
||||
methods or inside of TestCase subclasses.
|
||||
"""
|
||||
def decorate(func):
|
||||
kwargs = {}
|
||||
|
||||
def test_wrapped(*args, **kwargs2):
|
||||
k2 = kwargs.copy()
|
||||
k2.update(kwargs2)
|
||||
k2['testname'] = func.__name__
|
||||
func(*args, **k2)
|
||||
|
||||
test_wrapped.__name__ = func.__name__
|
||||
|
||||
def setup_wrapped():
|
||||
k = setup()
|
||||
kwargs.update(k)
|
||||
if hasattr(func, 'setup'):
|
||||
func.setup()
|
||||
test_wrapped.setup = setup_wrapped
|
||||
|
||||
if teardown:
|
||||
def teardown_wrapped():
|
||||
if hasattr(func, 'teardown'):
|
||||
func.teardown()
|
||||
teardown(**kwargs)
|
||||
|
||||
test_wrapped.teardown = teardown_wrapped
|
||||
else:
|
||||
if hasattr(func, 'teardown'):
|
||||
test_wrapped.teardown = func.teardown()
|
||||
return test_wrapped
|
||||
return decorate
|
||||
|
||||
# Demo case for the above, when you run test_gen():
|
||||
# _test_gen will run twice,
|
||||
# with the following stderr printing
|
||||
# setup_func {'b': 2}
|
||||
# testcase ('1',) {'b': 2, 'testname': '_test_gen'}
|
||||
# teardown_func {'b': 2}
|
||||
# setup_func {'b': 2}
|
||||
# testcase () {'b': 2, 'testname': '_test_gen'}
|
||||
# teardown_func {'b': 2}
|
||||
#
|
||||
#def setup_func():
|
||||
# kwargs = {'b': 2}
|
||||
# print("setup_func", kwargs, file=sys.stderr)
|
||||
# return kwargs
|
||||
#
|
||||
#def teardown_func(**kwargs):
|
||||
# print("teardown_func", kwargs, file=sys.stderr)
|
||||
#
|
||||
#@with_setup_kwargs(setup=setup_func, teardown=teardown_func)
|
||||
#def _test_gen(*args, **kwargs):
|
||||
# print("testcase", args, kwargs, file=sys.stderr)
|
||||
#
|
||||
#def test_gen():
|
||||
# yield _test_gen, '1'
|
||||
# yield _test_gen
|
||||
|
||||
def trim_xml(xml_str):
|
||||
p = etree.XMLParser(remove_blank_text=True)
|
||||
elem = etree.XML(xml_str, parser=p)
|
||||
return etree.tostring(elem)
|
||||
|
||||
def normalize_xml(xml, pretty_print=True):
|
||||
if xml is None:
|
||||
return xml
|
||||
|
||||
root = etree.fromstring(xml.encode(encoding='ascii'))
|
||||
|
||||
for element in root.iter('*'):
|
||||
if element.text is not None and not element.text.strip():
|
||||
element.text = None
|
||||
if element.text is not None:
|
||||
element.text = element.text.strip().replace("\n", "").replace("\r", "")
|
||||
if element.tail is not None and not element.tail.strip():
|
||||
element.tail = None
|
||||
if element.tail is not None:
|
||||
element.tail = element.tail.strip().replace("\n", "").replace("\r", "")
|
||||
|
||||
# Sort the elements
|
||||
for parent in root.xpath('//*[./*]'): # Search for parent elements
|
||||
parent[:] = sorted(parent,key=lambda x: x.tag)
|
||||
|
||||
xmlstr = etree.tostring(root, encoding="utf-8", xml_declaration=True, pretty_print=pretty_print)
|
||||
# there are two different DTD URIs
|
||||
xmlstr = re.sub(r'xmlns="[^"]+"', 'xmlns="s3"', xmlstr)
|
||||
xmlstr = re.sub(r'xmlns=\'[^\']+\'', 'xmlns="s3"', xmlstr)
|
||||
for uri in ['http://doc.s3.amazonaws.com/doc/2006-03-01/', 'http://s3.amazonaws.com/doc/2006-03-01/']:
|
||||
xmlstr = xmlstr.replace(uri, 'URI-DTD')
|
||||
#xmlstr = re.sub(r'>\s+', '>', xmlstr, count=0, flags=re.MULTILINE)
|
||||
return xmlstr
|
||||
|
||||
def assert_xml_equal(got, want):
|
||||
assert want is not None, 'Wanted XML cannot be None'
|
||||
if got is None:
|
||||
raise AssertionError('Got input to validate was None')
|
||||
checker = LXMLOutputChecker()
|
||||
if not checker.check_output(want, got, 0):
|
||||
message = checker.output_difference(Example("", want), got, 0)
|
||||
raise AssertionError(message)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import print_function
|
||||
import sys
|
||||
import ConfigParser
|
||||
import boto.exception
|
||||
import boto.s3.connection
|
||||
|
@ -6,6 +8,8 @@ import itertools
|
|||
import os
|
||||
import random
|
||||
import string
|
||||
from httplib import HTTPConnection, HTTPSConnection
|
||||
from urlparse import urlparse
|
||||
|
||||
from .utils import region_sync_meta
|
||||
|
||||
|
@ -55,15 +59,15 @@ def choose_bucket_prefix(template, max_len=30):
|
|||
|
||||
|
||||
def nuke_prefixed_buckets_on_conn(prefix, name, conn):
|
||||
print 'Cleaning buckets from connection {name} prefix {prefix!r}.'.format(
|
||||
print('Cleaning buckets from connection {name} prefix {prefix!r}.'.format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
))
|
||||
|
||||
for bucket in conn.get_all_buckets():
|
||||
print 'prefix=',prefix
|
||||
print('prefix=',prefix)
|
||||
if bucket.name.startswith(prefix):
|
||||
print 'Cleaning bucket {bucket}'.format(bucket=bucket)
|
||||
print('Cleaning bucket {bucket}'.format(bucket=bucket))
|
||||
success = False
|
||||
for i in xrange(2):
|
||||
try:
|
||||
|
@ -81,17 +85,17 @@ def nuke_prefixed_buckets_on_conn(prefix, name, conn):
|
|||
raise e
|
||||
keys = bucket.list();
|
||||
for key in keys:
|
||||
print 'Cleaning bucket {bucket} key {key}'.format(
|
||||
print('Cleaning bucket {bucket} key {key}'.format(
|
||||
bucket=bucket,
|
||||
key=key,
|
||||
)
|
||||
))
|
||||
# key.set_canned_acl('private')
|
||||
bucket.delete_key(key.name, version_id = key.version_id)
|
||||
bucket.delete()
|
||||
success = True
|
||||
except boto.exception.S3ResponseError as e:
|
||||
if e.error_code != 'AccessDenied':
|
||||
print 'GOT UNWANTED ERROR', e.error_code
|
||||
print('GOT UNWANTED ERROR', e.error_code)
|
||||
raise
|
||||
# seems like we don't have permissions set appropriately, we'll
|
||||
# modify permissions and retry
|
||||
|
@ -107,26 +111,26 @@ def nuke_prefixed_buckets(prefix):
|
|||
# If no regions are specified, use the simple method
|
||||
if targets.main.master == None:
|
||||
for name, conn in s3.items():
|
||||
print 'Deleting buckets on {name}'.format(name=name)
|
||||
print('Deleting buckets on {name}'.format(name=name))
|
||||
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
||||
else:
|
||||
# First, delete all buckets on the master connection
|
||||
for name, conn in s3.items():
|
||||
if conn == targets.main.master.connection:
|
||||
print 'Deleting buckets on {name} (master)'.format(name=name)
|
||||
print('Deleting buckets on {name} (master)'.format(name=name))
|
||||
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
||||
|
||||
# Then sync to propagate deletes to secondaries
|
||||
region_sync_meta(targets.main, targets.main.master.connection)
|
||||
print 'region-sync in nuke_prefixed_buckets'
|
||||
print('region-sync in nuke_prefixed_buckets')
|
||||
|
||||
# Now delete remaining buckets on any other connection
|
||||
for name, conn in s3.items():
|
||||
if conn != targets.main.master.connection:
|
||||
print 'Deleting buckets on {name} (non-master)'.format(name=name)
|
||||
print('Deleting buckets on {name} (non-master)'.format(name=name))
|
||||
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
||||
|
||||
print 'Done with cleanup of test buckets.'
|
||||
print('Done with cleanup of test buckets.')
|
||||
|
||||
class TargetConfig:
|
||||
def __init__(self, cfg, section):
|
||||
|
@ -310,6 +314,10 @@ def setup():
|
|||
'user_id',
|
||||
'display_name',
|
||||
'email',
|
||||
's3website_domain',
|
||||
'host',
|
||||
'port',
|
||||
'is_secure',
|
||||
]:
|
||||
try:
|
||||
config[name][var] = cfg.get(section, var)
|
||||
|
@ -392,3 +400,83 @@ def get_new_bucket(target=None, name=None, headers=None):
|
|||
# ignore that as astronomically unlikely
|
||||
bucket = connection.create_bucket(name, location=target.conf.api_name, headers=headers)
|
||||
return bucket
|
||||
|
||||
def _make_request(method, bucket, key, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True, timeout=None):
|
||||
"""
|
||||
issue a request for a specified method, on a specified <bucket,key>,
|
||||
with a specified (optional) body (encrypted per the connection), and
|
||||
return the response (status, reason).
|
||||
|
||||
If key is None, then this will be treated as a bucket-level request.
|
||||
|
||||
If the request or response headers are None, then default values will be
|
||||
provided by later methods.
|
||||
"""
|
||||
if not path_style:
|
||||
conn = bucket.connection
|
||||
request_headers['Host'] = conn.calling_format.build_host(conn.server_name(), bucket.name)
|
||||
|
||||
if authenticated:
|
||||
urlobj = None
|
||||
if key is not None:
|
||||
urlobj = key
|
||||
elif bucket is not None:
|
||||
urlobj = bucket
|
||||
else:
|
||||
raise RuntimeError('Unable to find bucket name')
|
||||
url = urlobj.generate_url(expires_in, method=method, response_headers=response_headers, headers=request_headers)
|
||||
o = urlparse(url)
|
||||
path = o.path + '?' + o.query
|
||||
else:
|
||||
bucketobj = None
|
||||
if key is not None:
|
||||
path = '/{obj}'.format(obj=key.name)
|
||||
bucketobj = key.bucket
|
||||
elif bucket is not None:
|
||||
path = '/'
|
||||
bucketobj = bucket
|
||||
else:
|
||||
raise RuntimeError('Unable to find bucket name')
|
||||
if path_style:
|
||||
path = '/{bucket}'.format(bucket=bucketobj.name) + path
|
||||
|
||||
return _make_raw_request(host=s3.main.host, port=s3.main.port, method=method, path=path, body=body, request_headers=request_headers, secure=s3.main.is_secure, timeout=timeout)
|
||||
|
||||
def _make_bucket_request(method, bucket, body=None, authenticated=False, response_headers=None, request_headers=None, expires_in=100000, path_style=True, timeout=None):
|
||||
"""
|
||||
issue a request for a specified method, on a specified <bucket>,
|
||||
with a specified (optional) body (encrypted per the connection), and
|
||||
return the response (status, reason)
|
||||
"""
|
||||
return _make_request(method=method, bucket=bucket, key=None, body=body, authenticated=authenticated, response_headers=response_headers, request_headers=request_headers, expires_in=expires_in, path_style=path_style, timeout=timeout)
|
||||
|
||||
def _make_raw_request(host, port, method, path, body=None, request_headers=None, secure=False, timeout=None):
|
||||
"""
|
||||
issue a request to a specific host & port, for a specified method, on a
|
||||
specified path with a specified (optional) body (encrypted per the
|
||||
connection), and return the response (status, reason).
|
||||
|
||||
This allows construction of special cases not covered by the bucket/key to
|
||||
URL mapping of _make_request/_make_bucket_request.
|
||||
"""
|
||||
if secure:
|
||||
class_ = HTTPSConnection
|
||||
else:
|
||||
class_ = HTTPConnection
|
||||
|
||||
if request_headers is None:
|
||||
request_headers = {}
|
||||
|
||||
c = class_(host, port, strict=True, timeout=timeout)
|
||||
|
||||
# TODO: We might have to modify this in future if we need to interact with
|
||||
# how httplib.request handles Accept-Encoding and Host.
|
||||
c.request(method, path, body=body, headers=request_headers)
|
||||
|
||||
res = c.getresponse()
|
||||
#c.close()
|
||||
|
||||
print(res.status, res.reason)
|
||||
return res
|
||||
|
||||
|
||||
|
|
|
@ -54,6 +54,8 @@ from . import (
|
|||
config,
|
||||
get_prefix,
|
||||
is_slow_backend,
|
||||
_make_request,
|
||||
_make_bucket_request,
|
||||
)
|
||||
|
||||
|
||||
|
@ -2569,57 +2571,6 @@ def _setup_bucket_request(bucket_acl=None):
|
|||
|
||||
return bucket
|
||||
|
||||
def _make_request(method, bucket, key, body=None, authenticated=False, response_headers=None, expires_in=100000):
|
||||
"""
|
||||
issue a request for a specified method, on a specified <bucket,key>,
|
||||
with a specified (optional) body (encrypted per the connection), and
|
||||
return the response (status, reason)
|
||||
"""
|
||||
if authenticated:
|
||||
url = key.generate_url(expires_in, method=method, response_headers=response_headers)
|
||||
o = urlparse(url)
|
||||
path = o.path + '?' + o.query
|
||||
else:
|
||||
path = '/{bucket}/{obj}'.format(bucket=key.bucket.name, obj=key.name)
|
||||
|
||||
if s3.main.is_secure:
|
||||
class_ = HTTPSConnection
|
||||
else:
|
||||
class_ = HTTPConnection
|
||||
|
||||
c = class_(s3.main.host, s3.main.port, strict=True)
|
||||
c.request(method, path, body=body)
|
||||
res = c.getresponse()
|
||||
|
||||
print res.status, res.reason
|
||||
return res
|
||||
|
||||
def _make_bucket_request(method, bucket, body=None, authenticated=False, expires_in=100000):
|
||||
"""
|
||||
issue a request for a specified method, on a specified <bucket,key>,
|
||||
with a specified (optional) body (encrypted per the connection), and
|
||||
return the response (status, reason)
|
||||
"""
|
||||
if authenticated:
|
||||
url = bucket.generate_url(expires_in, method=method)
|
||||
o = urlparse(url)
|
||||
path = o.path + '?' + o.query
|
||||
else:
|
||||
path = '/{bucket}'.format(bucket=bucket.name)
|
||||
|
||||
if s3.main.is_secure:
|
||||
class_ = HTTPSConnection
|
||||
else:
|
||||
class_ = HTTPConnection
|
||||
|
||||
c = class_(s3.main.host, s3.main.port, strict=True)
|
||||
c.request(method, path, body=body)
|
||||
res = c.getresponse()
|
||||
|
||||
print res.status, res.reason
|
||||
return res
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='get')
|
||||
@attr(operation='publically readable bucket')
|
||||
|
@ -3052,6 +3003,7 @@ def _test_bucket_create_naming_good_long(length):
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/250 byte name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_good_long_250():
|
||||
_test_bucket_create_naming_good_long(250)
|
||||
|
||||
|
@ -3062,6 +3014,7 @@ def test_bucket_create_naming_good_long_250():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/251 byte name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_good_long_251():
|
||||
_test_bucket_create_naming_good_long(251)
|
||||
|
||||
|
@ -3072,6 +3025,7 @@ def test_bucket_create_naming_good_long_251():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/252 byte name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_good_long_252():
|
||||
_test_bucket_create_naming_good_long(252)
|
||||
|
||||
|
@ -3111,6 +3065,7 @@ def test_bucket_create_naming_good_long_255():
|
|||
@attr(method='get')
|
||||
@attr(operation='list w/251 byte name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_list_long_name():
|
||||
prefix = get_new_bucket_name()
|
||||
length = 251
|
||||
|
@ -3151,6 +3106,7 @@ def test_bucket_create_naming_bad_punctuation():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/underscore in name')
|
||||
@attr(assertion='succeeds')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_dns_underscore():
|
||||
check_good_bucket_name('foo_bar')
|
||||
|
||||
|
@ -3161,6 +3117,7 @@ def test_bucket_create_naming_dns_underscore():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/100 byte name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_dns_long():
|
||||
prefix = get_prefix()
|
||||
assert len(prefix) < 50
|
||||
|
@ -3174,6 +3131,7 @@ def test_bucket_create_naming_dns_long():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/dash at end of name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_dns_dash_at_end():
|
||||
check_good_bucket_name('foo-')
|
||||
|
||||
|
@ -3184,6 +3142,7 @@ def test_bucket_create_naming_dns_dash_at_end():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/.. in name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_dns_dot_dot():
|
||||
check_good_bucket_name('foo..bar')
|
||||
|
||||
|
@ -3194,6 +3153,7 @@ def test_bucket_create_naming_dns_dot_dot():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/.- in name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_dns_dot_dash():
|
||||
check_good_bucket_name('foo.-bar')
|
||||
|
||||
|
@ -3204,6 +3164,7 @@ def test_bucket_create_naming_dns_dot_dash():
|
|||
@attr(method='put')
|
||||
@attr(operation='create w/-. in name')
|
||||
@attr(assertion='fails with subdomain')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidBucketName</Code><Message>The specified bucket is not valid.</Message>...</Error>
|
||||
def test_bucket_create_naming_dns_dash_dot():
|
||||
check_good_bucket_name('foo-.bar')
|
||||
|
||||
|
@ -3286,6 +3247,7 @@ def test_bucket_acl_default():
|
|||
@attr(method='get')
|
||||
@attr(operation='public-read acl')
|
||||
@attr(assertion='read back expected defaults')
|
||||
@attr('fails_on_aws') # <Error><Code>IllegalLocationConstraintException</Code><Message>The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.</Message>
|
||||
def test_bucket_acl_canned_during_create():
|
||||
name = get_new_bucket_name()
|
||||
bucket = targets.main.default.connection.create_bucket(name, policy = 'public-read')
|
||||
|
@ -3714,6 +3676,7 @@ def test_object_acl_canned_bucketownerfullcontrol():
|
|||
@attr(method='put')
|
||||
@attr(operation='set write-acp')
|
||||
@attr(assertion='does not modify owner')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_object_acl_full_control_verify_owner():
|
||||
bucket = get_new_bucket(targets.main.default)
|
||||
bucket.set_acl('public-read-write')
|
||||
|
@ -3804,6 +3767,7 @@ def _build_bucket_acl_xml(permission, bucket=None):
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl FULL_CONTROL (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_bucket_acl_xml_fullcontrol():
|
||||
_build_bucket_acl_xml('FULL_CONTROL')
|
||||
|
||||
|
@ -3812,6 +3776,7 @@ def test_bucket_acl_xml_fullcontrol():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl WRITE (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_bucket_acl_xml_write():
|
||||
_build_bucket_acl_xml('WRITE')
|
||||
|
||||
|
@ -3820,6 +3785,7 @@ def test_bucket_acl_xml_write():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl WRITE_ACP (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_bucket_acl_xml_writeacp():
|
||||
_build_bucket_acl_xml('WRITE_ACP')
|
||||
|
||||
|
@ -3828,6 +3794,7 @@ def test_bucket_acl_xml_writeacp():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl READ (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_bucket_acl_xml_read():
|
||||
_build_bucket_acl_xml('READ')
|
||||
|
||||
|
@ -3836,6 +3803,7 @@ def test_bucket_acl_xml_read():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl READ_ACP (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_bucket_acl_xml_readacp():
|
||||
_build_bucket_acl_xml('READ_ACP')
|
||||
|
||||
|
@ -3874,6 +3842,7 @@ def _build_object_acl_xml(permission):
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl FULL_CONTROL (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_object_acl_xml():
|
||||
_build_object_acl_xml('FULL_CONTROL')
|
||||
|
||||
|
@ -3882,6 +3851,7 @@ def test_object_acl_xml():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl WRITE (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_object_acl_xml_write():
|
||||
_build_object_acl_xml('WRITE')
|
||||
|
||||
|
@ -3890,6 +3860,7 @@ def test_object_acl_xml_write():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl WRITE_ACP (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_object_acl_xml_writeacp():
|
||||
_build_object_acl_xml('WRITE_ACP')
|
||||
|
||||
|
@ -3898,6 +3869,7 @@ def test_object_acl_xml_writeacp():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl READ (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_object_acl_xml_read():
|
||||
_build_object_acl_xml('READ')
|
||||
|
||||
|
@ -3906,6 +3878,7 @@ def test_object_acl_xml_read():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl READ_ACP (xml)')
|
||||
@attr(assertion='reads back correctly')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_object_acl_xml_readacp():
|
||||
_build_object_acl_xml('READ_ACP')
|
||||
|
||||
|
@ -4014,6 +3987,7 @@ def _check_bucket_acl_grant_cant_writeacp(bucket):
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl w/userid FULL_CONTROL')
|
||||
@attr(assertion='can read/write data/acls')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${USER}</ArgumentValue>
|
||||
def test_bucket_acl_grant_userid_fullcontrol():
|
||||
bucket = _bucket_acl_grant_userid('FULL_CONTROL')
|
||||
|
||||
|
@ -4037,6 +4011,7 @@ def test_bucket_acl_grant_userid_fullcontrol():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl w/userid READ')
|
||||
@attr(assertion='can read data, no other r/w')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_bucket_acl_grant_userid_read():
|
||||
bucket = _bucket_acl_grant_userid('READ')
|
||||
|
||||
|
@ -4054,6 +4029,7 @@ def test_bucket_acl_grant_userid_read():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl w/userid READ_ACP')
|
||||
@attr(assertion='can read acl, no other r/w')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_bucket_acl_grant_userid_readacp():
|
||||
bucket = _bucket_acl_grant_userid('READ_ACP')
|
||||
|
||||
|
@ -4071,6 +4047,7 @@ def test_bucket_acl_grant_userid_readacp():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl w/userid WRITE')
|
||||
@attr(assertion='can write data, no other r/w')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_bucket_acl_grant_userid_write():
|
||||
bucket = _bucket_acl_grant_userid('WRITE')
|
||||
|
||||
|
@ -4088,6 +4065,7 @@ def test_bucket_acl_grant_userid_write():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='set acl w/userid WRITE_ACP')
|
||||
@attr(assertion='can write acls, no other r/w')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_bucket_acl_grant_userid_writeacp():
|
||||
bucket = _bucket_acl_grant_userid('WRITE_ACP')
|
||||
|
||||
|
@ -4171,6 +4149,7 @@ def _get_acl_header(user=None, perms=None):
|
|||
@attr(operation='add all grants to user through headers')
|
||||
@attr(assertion='adds all grants individually to second user')
|
||||
@attr('fails_on_dho')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_object_header_acl_grants():
|
||||
bucket = get_new_bucket()
|
||||
headers = _get_acl_header()
|
||||
|
@ -4230,6 +4209,7 @@ def test_object_header_acl_grants():
|
|||
@attr(operation='add all grants to user through headers')
|
||||
@attr(assertion='adds all grants individually to second user')
|
||||
@attr('fails_on_dho')
|
||||
@attr('fails_on_aws') # <Error><Code>InvalidArgument</Code><Message>Invalid id</Message><ArgumentName>CanonicalUser/ID</ArgumentName><ArgumentValue>${ALTUSER}</ArgumentValue>
|
||||
def test_bucket_header_acl_grants():
|
||||
headers = _get_acl_header()
|
||||
bucket = get_new_bucket(targets.main.default, get_prefix(), headers)
|
||||
|
@ -4294,6 +4274,7 @@ def test_bucket_header_acl_grants():
|
|||
@attr(method='ACLs')
|
||||
@attr(operation='add second FULL_CONTROL user')
|
||||
@attr(assertion='works for S3, fails for DHO')
|
||||
@attr('fails_on_aws') # <Error><Code>AmbiguousGrantByEmailAddress</Code><Message>The e-mail address you provided is associated with more than one account. Please retry your request using a different identification method or after resolving the ambiguity.</Message>
|
||||
def test_bucket_acl_grant_email():
|
||||
bucket = get_new_bucket()
|
||||
# add alt user
|
||||
|
|
1158
s3tests/functional/test_s3_website.py
Normal file
1158
s3tests/functional/test_s3_website.py
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue