mirror of
https://github.com/ceph/s3-tests.git
synced 2024-11-21 11:51:06 +00:00
Revert "Passing sse-kms keys from configuration instead of hard coding in tests"
This commit is contained in:
parent
65f3441636
commit
c8eb31060a
2 changed files with 140 additions and 123 deletions
|
@ -169,11 +169,8 @@ def setup():
|
|||
try:
|
||||
config.main_kms_keyid = cfg.get('s3 main',"kms_keyid")
|
||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||
config.main_kms_keyid = 'testkey-1'
|
||||
try:
|
||||
config.main_kms_keyid2 = cfg.get('s3 main',"kms_keyid2")
|
||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
||||
config.main_kms_keyid2 = 'testkey-2'
|
||||
config.main_kms_keyid = None
|
||||
pass
|
||||
|
||||
try:
|
||||
config.main_api_name = cfg.get('s3 main',"api_name")
|
||||
|
@ -299,7 +296,7 @@ def get_new_bucket_resource(name=None):
|
|||
Always recreates a bucket from scratch. This is useful to also
|
||||
reset ACLs and such.
|
||||
"""
|
||||
s3 = boto3.resource('s3',
|
||||
s3 = boto3.resource('s3',
|
||||
aws_access_key_id=config.main_access_key,
|
||||
aws_secret_access_key=config.main_secret_key,
|
||||
endpoint_url=config.default_endpoint,
|
||||
|
@ -359,9 +356,6 @@ def get_main_api_name():
|
|||
def get_main_kms_keyid():
|
||||
return config.main_kms_keyid
|
||||
|
||||
def get_secondary_kms_keyid():
|
||||
return config.main_kms_keyid2
|
||||
|
||||
def get_alt_aws_access_key():
|
||||
return config.alt_access_key
|
||||
|
||||
|
|
|
@ -68,7 +68,6 @@ from . import (
|
|||
get_buckets_list,
|
||||
get_objects_list,
|
||||
get_main_kms_keyid,
|
||||
get_secondary_kms_keyid,
|
||||
nuke_prefixed_buckets,
|
||||
)
|
||||
|
||||
|
@ -86,7 +85,7 @@ def _bucket_is_empty(bucket):
|
|||
@attr(assertion='empty buckets return no contents')
|
||||
def test_bucket_list_empty():
|
||||
bucket = get_new_bucket_resource()
|
||||
is_empty = _bucket_is_empty(bucket)
|
||||
is_empty = _bucket_is_empty(bucket)
|
||||
eq(is_empty, True)
|
||||
|
||||
@attr(resource='bucket')
|
||||
|
@ -97,9 +96,9 @@ def test_bucket_list_distinct():
|
|||
bucket1 = get_new_bucket_resource()
|
||||
bucket2 = get_new_bucket_resource()
|
||||
obj = bucket1.put_object(Body='str', Key='asdf')
|
||||
is_empty = _bucket_is_empty(bucket2)
|
||||
is_empty = _bucket_is_empty(bucket2)
|
||||
eq(is_empty, True)
|
||||
|
||||
|
||||
def _create_objects(bucket=None, bucket_name=None, keys=[]):
|
||||
"""
|
||||
Populate a (specified or new) bucket with objects with
|
||||
|
@ -192,8 +191,8 @@ def test_basic_key_count():
|
|||
response1 = client.list_objects_v2(Bucket=bucket_name)
|
||||
eq(response1['KeyCount'], 5)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@attr(resource='bucket')
|
||||
@attr(method='get')
|
||||
@attr(operation='list')
|
||||
|
@ -1497,7 +1496,7 @@ def _compare_dates(datetime1, datetime2):
|
|||
"""
|
||||
changes ms from datetime1 to 0, compares it to datetime2
|
||||
"""
|
||||
# both times are in datetime format but datetime1 has
|
||||
# both times are in datetime format but datetime1 has
|
||||
# microseconds and datetime2 does not
|
||||
datetime1 = datetime1.replace(microsecond=0)
|
||||
eq(datetime1, datetime2)
|
||||
|
@ -1605,7 +1604,7 @@ def test_bucket_list_return_data_versioning():
|
|||
@attr(operation='list all objects (anonymous)')
|
||||
@attr(assertion='succeeds')
|
||||
def test_bucket_list_objects_anonymous():
|
||||
bucket_name = get_new_bucket()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
|
||||
|
||||
|
@ -1618,7 +1617,7 @@ def test_bucket_list_objects_anonymous():
|
|||
@attr(assertion='succeeds')
|
||||
@attr('list-objects-v2')
|
||||
def test_bucket_listv2_objects_anonymous():
|
||||
bucket_name = get_new_bucket()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
client.put_bucket_acl(Bucket=bucket_name, ACL='public-read')
|
||||
|
||||
|
@ -1630,7 +1629,7 @@ def test_bucket_listv2_objects_anonymous():
|
|||
@attr(operation='list all objects (anonymous)')
|
||||
@attr(assertion='fails')
|
||||
def test_bucket_list_objects_anonymous_fail():
|
||||
bucket_name = get_new_bucket()
|
||||
bucket_name = get_new_bucket()
|
||||
|
||||
unauthenticated_client = get_unauthenticated_client()
|
||||
e = assert_raises(ClientError, unauthenticated_client.list_objects, Bucket=bucket_name)
|
||||
|
@ -1645,7 +1644,7 @@ def test_bucket_list_objects_anonymous_fail():
|
|||
@attr(assertion='fails')
|
||||
@attr('list-objects-v2')
|
||||
def test_bucket_listv2_objects_anonymous_fail():
|
||||
bucket_name = get_new_bucket()
|
||||
bucket_name = get_new_bucket()
|
||||
|
||||
unauthenticated_client = get_unauthenticated_client()
|
||||
e = assert_raises(ClientError, unauthenticated_client.list_objects_v2, Bucket=bucket_name)
|
||||
|
@ -1659,7 +1658,7 @@ def test_bucket_listv2_objects_anonymous_fail():
|
|||
@attr(operation='non-existant bucket')
|
||||
@attr(assertion='fails 404')
|
||||
def test_bucket_notexist():
|
||||
bucket_name = get_new_bucket_name()
|
||||
bucket_name = get_new_bucket_name()
|
||||
client = get_client()
|
||||
|
||||
e = assert_raises(ClientError, client.list_objects, Bucket=bucket_name)
|
||||
|
@ -1674,7 +1673,7 @@ def test_bucket_notexist():
|
|||
@attr(assertion='fails 404')
|
||||
@attr('list-objects-v2')
|
||||
def test_bucketv2_notexist():
|
||||
bucket_name = get_new_bucket_name()
|
||||
bucket_name = get_new_bucket_name()
|
||||
client = get_client()
|
||||
|
||||
e = assert_raises(ClientError, client.list_objects_v2, Bucket=bucket_name)
|
||||
|
@ -1688,7 +1687,7 @@ def test_bucketv2_notexist():
|
|||
@attr(operation='non-existant bucket')
|
||||
@attr(assertion='fails 404')
|
||||
def test_bucket_delete_notexist():
|
||||
bucket_name = get_new_bucket_name()
|
||||
bucket_name = get_new_bucket_name()
|
||||
client = get_client()
|
||||
|
||||
e = assert_raises(ClientError, client.delete_bucket, Bucket=bucket_name)
|
||||
|
@ -1798,7 +1797,7 @@ def test_object_read_notexist():
|
|||
http_response = None
|
||||
|
||||
def get_http_response(**kwargs):
|
||||
global http_response
|
||||
global http_response
|
||||
http_response = kwargs['http_response'].__dict__
|
||||
|
||||
@attr(resource='object')
|
||||
|
@ -1835,16 +1834,16 @@ def test_multi_object_delete():
|
|||
client = get_client()
|
||||
response = client.list_objects(Bucket=bucket_name)
|
||||
eq(len(response['Contents']), 3)
|
||||
|
||||
|
||||
objs_dict = _make_objs_dict(key_names=key_names)
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
|
||||
eq(len(response['Deleted']), 3)
|
||||
assert 'Errors' not in response
|
||||
response = client.list_objects(Bucket=bucket_name)
|
||||
assert 'Contents' not in response
|
||||
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
eq(len(response['Deleted']), 3)
|
||||
assert 'Errors' not in response
|
||||
response = client.list_objects(Bucket=bucket_name)
|
||||
|
@ -1861,16 +1860,16 @@ def test_multi_objectv2_delete():
|
|||
client = get_client()
|
||||
response = client.list_objects_v2(Bucket=bucket_name)
|
||||
eq(len(response['Contents']), 3)
|
||||
|
||||
|
||||
objs_dict = _make_objs_dict(key_names=key_names)
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
|
||||
eq(len(response['Deleted']), 3)
|
||||
assert 'Errors' not in response
|
||||
response = client.list_objects_v2(Bucket=bucket_name)
|
||||
assert 'Contents' not in response
|
||||
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
response = client.delete_objects(Bucket=bucket_name, Delete=objs_dict)
|
||||
eq(len(response['Deleted']), 3)
|
||||
assert 'Errors' not in response
|
||||
response = client.list_objects_v2(Bucket=bucket_name)
|
||||
|
@ -3313,7 +3312,7 @@ def test_get_object_ifmodifiedsince_failed():
|
|||
client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
|
||||
response = client.get_object(Bucket=bucket_name, Key='foo')
|
||||
last_modified = str(response['LastModified'])
|
||||
|
||||
|
||||
last_modified = last_modified.split('+')[0]
|
||||
mtime = datetime.datetime.strptime(last_modified, '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
|
@ -3549,7 +3548,7 @@ def _setup_bucket_object_acl(bucket_acl, object_acl):
|
|||
client.create_bucket(ACL=bucket_acl, Bucket=bucket_name)
|
||||
client.put_object(ACL=object_acl, Bucket=bucket_name, Key='foo')
|
||||
|
||||
return bucket_name
|
||||
return bucket_name
|
||||
|
||||
def _setup_bucket_acl(bucket_acl=None):
|
||||
"""
|
||||
|
@ -4074,9 +4073,9 @@ def test_bucket_list_long_name():
|
|||
name=name,
|
||||
)
|
||||
bucket = get_new_bucket_resource(name=bucket_name)
|
||||
is_empty = _bucket_is_empty(bucket)
|
||||
is_empty = _bucket_is_empty(bucket)
|
||||
eq(is_empty, True)
|
||||
|
||||
|
||||
# AWS does not enforce all documented bucket restrictions.
|
||||
# http://docs.amazonwebservices.com/AmazonS3/2006-03-01/dev/index.html?BucketRestrictions.html
|
||||
@attr('fails_on_aws')
|
||||
|
@ -4221,7 +4220,7 @@ def test_bucket_get_location():
|
|||
if location_constraint == "":
|
||||
location_constraint = None
|
||||
eq(response['LocationConstraint'], location_constraint)
|
||||
|
||||
|
||||
@attr(resource='bucket')
|
||||
@attr(method='put')
|
||||
@attr(operation='re-create by non-owner')
|
||||
|
@ -4274,7 +4273,7 @@ def test_bucket_acl_default():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
eq(response['Owner']['DisplayName'], display_name)
|
||||
eq(response['Owner']['ID'], user_id)
|
||||
|
||||
|
@ -4306,7 +4305,7 @@ def test_bucket_acl_canned_during_create():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -4342,7 +4341,7 @@ def test_bucket_acl_canned():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -4396,6 +4395,7 @@ def test_bucket_acl_canned_publicreadwrite():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -4439,7 +4439,7 @@ def test_bucket_acl_canned_authenticatedread():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -4477,7 +4477,7 @@ def test_object_acl_default():
|
|||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -4507,7 +4507,7 @@ def test_object_acl_canned_during_create():
|
|||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -4680,7 +4680,7 @@ def test_object_acl_canned_bucketownerread():
|
|||
alt_client = get_alt_client()
|
||||
|
||||
main_client.create_bucket(Bucket=bucket_name, ACL='public-read-write')
|
||||
|
||||
|
||||
alt_client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
|
||||
|
||||
bucket_acl_response = main_client.get_bucket_acl(Bucket=bucket_name)
|
||||
|
@ -4726,7 +4726,7 @@ def test_object_acl_canned_bucketownerfullcontrol():
|
|||
alt_client = get_alt_client()
|
||||
|
||||
main_client.create_bucket(Bucket=bucket_name, ACL='public-read-write')
|
||||
|
||||
|
||||
alt_client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
|
||||
|
||||
bucket_acl_response = main_client.get_bucket_acl(Bucket=bucket_name)
|
||||
|
@ -4773,7 +4773,7 @@ def test_object_acl_full_control_verify_owner():
|
|||
alt_client = get_alt_client()
|
||||
|
||||
main_client.create_bucket(Bucket=bucket_name, ACL='public-read-write')
|
||||
|
||||
|
||||
main_client.put_object(Bucket=bucket_name, Key='foo', Body='bar')
|
||||
|
||||
alt_user_id = get_alt_user_id()
|
||||
|
@ -4785,7 +4785,7 @@ def test_object_acl_full_control_verify_owner():
|
|||
grant = { 'Grants': [{'Grantee': {'ID': alt_user_id, 'Type': 'CanonicalUser' }, 'Permission': 'FULL_CONTROL'}], 'Owner': {'DisplayName': main_display_name, 'ID': main_user_id}}
|
||||
|
||||
main_client.put_object_acl(Bucket=bucket_name, Key='foo', AccessControlPolicy=grant)
|
||||
|
||||
|
||||
grant = { 'Grants': [{'Grantee': {'ID': alt_user_id, 'Type': 'CanonicalUser' }, 'Permission': 'READ_ACP'}], 'Owner': {'DisplayName': main_display_name, 'ID': main_user_id}}
|
||||
|
||||
alt_client.put_object_acl(Bucket=bucket_name, Key='foo', AccessControlPolicy=grant)
|
||||
|
@ -4800,7 +4800,7 @@ def add_obj_user_grant(bucket_name, key, grant):
|
|||
owned by the main user, not the alt user
|
||||
A grant is a dictionary in the form of:
|
||||
{u'Grantee': {u'Type': 'type', u'DisplayName': 'name', u'ID': 'id'}, u'Permission': 'PERM'}
|
||||
|
||||
|
||||
"""
|
||||
client = get_client()
|
||||
main_user_id = get_main_user_id()
|
||||
|
@ -4825,7 +4825,7 @@ def test_object_acl_full_control_verify_attributes():
|
|||
alt_client = get_alt_client()
|
||||
|
||||
main_client.create_bucket(Bucket=bucket_name, ACL='public-read-write')
|
||||
|
||||
|
||||
header = {'x-amz-foo': 'bar'}
|
||||
# lambda to add any header
|
||||
add_header = (lambda **kwargs: kwargs['params']['headers'].update(header))
|
||||
|
@ -4883,7 +4883,7 @@ def add_bucket_user_grant(bucket_name, grant):
|
|||
|
||||
def _check_object_acl(permission):
|
||||
"""
|
||||
Sets the permission on an object then checks to see
|
||||
Sets the permission on an object then checks to see
|
||||
if it was set
|
||||
"""
|
||||
bucket_name = get_new_bucket()
|
||||
|
@ -5267,7 +5267,7 @@ def test_object_header_acl_grants():
|
|||
client.put_object(Bucket=bucket_name, Key='foo_key', Body='bar')
|
||||
|
||||
response = client.get_object_acl(Bucket=bucket_name, Key='foo_key')
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -5337,7 +5337,7 @@ def test_bucket_header_acl_grants():
|
|||
client.create_bucket(Bucket=bucket_name)
|
||||
|
||||
response = client.get_bucket_acl(Bucket=bucket_name)
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
alt_user_id = get_alt_user_id()
|
||||
alt_display_name = get_alt_display_name()
|
||||
|
@ -5394,7 +5394,7 @@ def test_bucket_header_acl_grants():
|
|||
|
||||
# set bucket acl to public-read-write so that teardown can work
|
||||
alt_client.put_bucket_acl(Bucket=bucket_name, ACL='public-read-write')
|
||||
|
||||
|
||||
|
||||
# This test will fail on DH Objects. DHO allows multiple users with one account, which
|
||||
# would violate the uniqueness requirement of a user's email. As such, DHO users are
|
||||
|
@ -5422,7 +5422,7 @@ def test_bucket_acl_grant_email():
|
|||
client.put_bucket_acl(Bucket=bucket_name, AccessControlPolicy = grant)
|
||||
|
||||
response = client.get_bucket_acl(Bucket=bucket_name)
|
||||
|
||||
|
||||
grants = response['Grants']
|
||||
check_grants(
|
||||
grants,
|
||||
|
@ -5563,7 +5563,7 @@ def test_access_bucket_private_object_private():
|
|||
|
||||
# acled object write fail
|
||||
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='barcontent')
|
||||
# NOTE: The above put's causes the connection to go bad, therefore the client can't be used
|
||||
# NOTE: The above put's causes the connection to go bad, therefore the client can't be used
|
||||
# anymore. This can be solved either by:
|
||||
# 1) putting an empty string ('') in the 'Body' field of those put_object calls
|
||||
# 2) getting a new client hence the creation of alt_client{2,3} for the tests below
|
||||
|
@ -5595,7 +5595,7 @@ def test_access_bucket_private_objectv2_private():
|
|||
|
||||
# acled object write fail
|
||||
check_access_denied(alt_client.put_object, Bucket=bucket_name, Key=key1, Body='barcontent')
|
||||
# NOTE: The above put's causes the connection to go bad, therefore the client can't be used
|
||||
# NOTE: The above put's causes the connection to go bad, therefore the client can't be used
|
||||
# anymore. This can be solved either by:
|
||||
# 1) putting an empty string ('') in the 'Body' field of those put_object calls
|
||||
# 2) getting a new client hence the creation of alt_client{2,3} for the tests below
|
||||
|
@ -6255,7 +6255,7 @@ def test_object_copy_key_not_found():
|
|||
e = assert_raises(ClientError, client.copy, copy_source, bucket_name, 'bar321foo')
|
||||
status = _get_status(e.response)
|
||||
eq(status, 404)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='put')
|
||||
@attr(operation='copy object to/from versioned bucket')
|
||||
|
@ -6528,7 +6528,7 @@ def _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size,
|
|||
for start_offset in range(0, size, part_size):
|
||||
end_offset = min(start_offset + part_size - 1, size - 1)
|
||||
part_num = i+1
|
||||
copy_source_range = 'bytes={start}-{end}'.format(start=start_offset, end=end_offset)
|
||||
copy_source_range = 'bytes={start}-{end}'.format(start=start_offset, end=end_offset)
|
||||
response = client.upload_part_copy(Bucket=dest_bucket_name, Key=dest_key, CopySource=copy_source, PartNumber=part_num, UploadId=upload_id, CopySourceRange=copy_source_range)
|
||||
parts.append({'ETag': response['CopyPartResult'][u'ETag'], 'PartNumber': part_num})
|
||||
i = i+1
|
||||
|
@ -6588,7 +6588,7 @@ def test_multipart_copy_invalid_range():
|
|||
upload_id = response['UploadId']
|
||||
|
||||
copy_source = {'Bucket': src_bucket_name, 'Key': src_key}
|
||||
copy_source_range = 'bytes={start}-{end}'.format(start=0, end=21)
|
||||
copy_source_range = 'bytes={start}-{end}'.format(start=0, end=21)
|
||||
|
||||
e = assert_raises(ClientError, client.upload_part_copy,Bucket=src_bucket_name, Key='dest', UploadId=upload_id, CopySource=copy_source, CopySourceRange=copy_source_range, PartNumber=1)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
|
@ -6614,7 +6614,7 @@ def test_multipart_copy_without_range():
|
|||
|
||||
copy_source = {'Bucket': src_bucket_name, 'Key': src_key}
|
||||
part_num = 1
|
||||
copy_source_range = 'bytes={start}-{end}'.format(start=0, end=9)
|
||||
copy_source_range = 'bytes={start}-{end}'.format(start=0, end=9)
|
||||
|
||||
response = client.upload_part_copy(Bucket=dest_bucket_name, Key=dest_key, CopySource=copy_source, PartNumber=part_num, UploadId=upload_id)
|
||||
|
||||
|
@ -6624,7 +6624,7 @@ def test_multipart_copy_without_range():
|
|||
response = client.get_object(Bucket=dest_bucket_name, Key=dest_key)
|
||||
eq(response['ContentLength'], 10)
|
||||
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='put')
|
||||
@attr(operation='check multipart copies with single small part')
|
||||
|
@ -6819,7 +6819,7 @@ def test_multipart_upload_multiple_sizes():
|
|||
objlen = 10*1024*1024
|
||||
(upload_id, data, parts) = _multipart_upload(bucket_name=bucket_name, key=key, size=objlen)
|
||||
client.complete_multipart_upload(Bucket=bucket_name, Key=key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
|
||||
|
||||
@attr(assertion='successful')
|
||||
def test_multipart_copy_multiple_sizes():
|
||||
src_key = 'foo'
|
||||
|
@ -6833,27 +6833,27 @@ def test_multipart_copy_multiple_sizes():
|
|||
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
|
||||
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
|
||||
|
||||
|
||||
size = 5*1024*1024+100*1024
|
||||
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
|
||||
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
|
||||
|
||||
|
||||
size = 5*1024*1024+600*1024
|
||||
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
|
||||
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
|
||||
|
||||
|
||||
size = 10*1024*1024+100*1024
|
||||
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
|
||||
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
|
||||
|
||||
|
||||
size = 10*1024*1024+600*1024
|
||||
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
|
||||
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
_check_key_content(src_key, src_bucket_name, dest_key, dest_bucket_name)
|
||||
|
||||
|
||||
size = 10*1024*1024
|
||||
(upload_id, parts) = _multipart_copy(src_bucket_name, src_key, dest_bucket_name, dest_key, size)
|
||||
client.complete_multipart_upload(Bucket=dest_bucket_name, Key=dest_key, UploadId=upload_id, MultipartUpload={'Parts': parts})
|
||||
|
@ -6881,7 +6881,7 @@ def gen_rand_string(size, chars=string.ascii_uppercase + string.digits):
|
|||
def _do_test_multipart_upload_contents(bucket_name, key, num_parts):
|
||||
payload=gen_rand_string(5)*1024*1024
|
||||
client = get_client()
|
||||
|
||||
|
||||
response = client.create_multipart_upload(Bucket=bucket_name, Key=key)
|
||||
upload_id = response['UploadId']
|
||||
|
||||
|
@ -6928,7 +6928,7 @@ def test_multipart_upload_overwrite_existing_object():
|
|||
num_parts=2
|
||||
client.put_object(Bucket=bucket_name, Key=key, Body=payload)
|
||||
|
||||
|
||||
|
||||
response = client.create_multipart_upload(Bucket=bucket_name, Key=key)
|
||||
upload_id = response['UploadId']
|
||||
|
||||
|
@ -7127,7 +7127,7 @@ def test_set_cors():
|
|||
|
||||
cors_config ={
|
||||
'CORSRules': [
|
||||
{'AllowedMethods': allowed_methods,
|
||||
{'AllowedMethods': allowed_methods,
|
||||
'AllowedOrigins': allowed_origins,
|
||||
},
|
||||
]
|
||||
|
@ -7153,7 +7153,7 @@ def _cors_request_and_check(func, url, headers, expect_status, expect_allow_orig
|
|||
|
||||
assert r.headers.get('access-control-allow-origin', None) == expect_allow_origin
|
||||
assert r.headers.get('access-control-allow-methods', None) == expect_allow_methods
|
||||
|
||||
|
||||
@attr(resource='bucket')
|
||||
@attr(method='get')
|
||||
@attr(operation='check cors response when origin header set')
|
||||
|
@ -7164,16 +7164,16 @@ def test_cors_origin_response():
|
|||
|
||||
cors_config ={
|
||||
'CORSRules': [
|
||||
{'AllowedMethods': ['GET'],
|
||||
{'AllowedMethods': ['GET'],
|
||||
'AllowedOrigins': ['*suffix'],
|
||||
},
|
||||
{'AllowedMethods': ['GET'],
|
||||
{'AllowedMethods': ['GET'],
|
||||
'AllowedOrigins': ['start*end'],
|
||||
},
|
||||
{'AllowedMethods': ['GET'],
|
||||
{'AllowedMethods': ['GET'],
|
||||
'AllowedOrigins': ['prefix*'],
|
||||
},
|
||||
{'AllowedMethods': ['PUT'],
|
||||
{'AllowedMethods': ['PUT'],
|
||||
'AllowedOrigins': ['*.put'],
|
||||
}
|
||||
]
|
||||
|
@ -7243,7 +7243,7 @@ def test_cors_origin_wildcard():
|
|||
|
||||
cors_config ={
|
||||
'CORSRules': [
|
||||
{'AllowedMethods': ['GET'],
|
||||
{'AllowedMethods': ['GET'],
|
||||
'AllowedOrigins': ['*'],
|
||||
},
|
||||
]
|
||||
|
@ -7272,7 +7272,7 @@ def test_cors_header_option():
|
|||
|
||||
cors_config ={
|
||||
'CORSRules': [
|
||||
{'AllowedMethods': ['GET'],
|
||||
{'AllowedMethods': ['GET'],
|
||||
'AllowedOrigins': ['*'],
|
||||
'ExposeHeaders': ['x-amz-meta-header1'],
|
||||
},
|
||||
|
@ -7705,7 +7705,7 @@ def test_multipart_resend_first_finishes_last():
|
|||
|
||||
# clear parts
|
||||
parts[:] = []
|
||||
|
||||
|
||||
# ok, now for the actual test
|
||||
fp_b = FakeWriteFile(file_size, 'B')
|
||||
def upload_fp_b():
|
||||
|
@ -7886,7 +7886,7 @@ def create_multiple_versions(client, bucket_name, key, num_versions, version_ids
|
|||
version_ids.append(version_id)
|
||||
|
||||
if check_versions:
|
||||
check_obj_versions(client, bucket_name, key, version_ids, contents)
|
||||
check_obj_versions(client, bucket_name, key, version_ids, contents)
|
||||
|
||||
return (version_ids, contents)
|
||||
|
||||
|
@ -8106,13 +8106,13 @@ def overwrite_suspended_versioning_obj(client, bucket_name, key, version_ids, co
|
|||
version_ids.pop(i)
|
||||
contents.pop(i)
|
||||
i += 1
|
||||
|
||||
|
||||
# add new content with 'null' version id to the end
|
||||
contents.append(content)
|
||||
version_ids.append('null')
|
||||
|
||||
return (version_ids, contents)
|
||||
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='create')
|
||||
|
@ -8219,7 +8219,7 @@ def test_versioning_obj_create_overwrite_multipart():
|
|||
version_ids.append(version['VersionId'])
|
||||
|
||||
version_ids.reverse()
|
||||
check_obj_versions(client, bucket_name, key, version_ids, contents)
|
||||
check_obj_versions(client, bucket_name, key, version_ids, contents)
|
||||
|
||||
for idx in xrange(num_versions):
|
||||
remove_obj_version(client, bucket_name, key, version_ids, contents, idx)
|
||||
|
@ -8310,7 +8310,7 @@ def test_versioning_copy_obj_version():
|
|||
response = client.get_object(Bucket=bucket_name, Key=new_key_name)
|
||||
body = _get_body(response)
|
||||
eq(body, contents[i])
|
||||
|
||||
|
||||
another_bucket_name = get_new_bucket()
|
||||
|
||||
for i in xrange(num_versions):
|
||||
|
@ -8320,7 +8320,7 @@ def test_versioning_copy_obj_version():
|
|||
response = client.get_object(Bucket=another_bucket_name, Key=new_key_name)
|
||||
body = _get_body(response)
|
||||
eq(body, contents[i])
|
||||
|
||||
|
||||
new_key_name = 'new_key'
|
||||
copy_source = {'Bucket': bucket_name, 'Key': key}
|
||||
client.copy_object(Bucket=another_bucket_name, CopySource=copy_source, Key=new_key_name)
|
||||
|
@ -8453,7 +8453,7 @@ def test_versioned_object_acl():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
eq(response['Owner']['DisplayName'], display_name)
|
||||
eq(response['Owner']['ID'], user_id)
|
||||
|
||||
|
@ -8526,7 +8526,7 @@ def test_versioned_object_acl_no_version_specified():
|
|||
|
||||
display_name = get_main_display_name()
|
||||
user_id = get_main_user_id()
|
||||
|
||||
|
||||
eq(response['Owner']['DisplayName'], display_name)
|
||||
eq(response['Owner']['ID'], user_id)
|
||||
|
||||
|
@ -8805,7 +8805,7 @@ def test_lifecycle_id_too_long():
|
|||
status, error_code = _get_status_and_error_code(e.response)
|
||||
eq(status, 400)
|
||||
eq(error_code, 'InvalidArgument')
|
||||
|
||||
|
||||
@attr(resource='bucket')
|
||||
@attr(method='put')
|
||||
@attr(operation='same id')
|
||||
|
@ -8822,7 +8822,7 @@ def test_lifecycle_same_id():
|
|||
status, error_code = _get_status_and_error_code(e.response)
|
||||
eq(status, 400)
|
||||
eq(error_code, 'InvalidArgument')
|
||||
|
||||
|
||||
@attr(resource='bucket')
|
||||
@attr(method='put')
|
||||
@attr(operation='invalid status in lifecycle rule')
|
||||
|
@ -8838,7 +8838,7 @@ def test_lifecycle_invalid_status():
|
|||
status, error_code = _get_status_and_error_code(e.response)
|
||||
eq(status, 400)
|
||||
eq(error_code, 'MalformedXML')
|
||||
|
||||
|
||||
rules=[{'ID': 'rule1', 'Expiration': {'Days': 2}, 'Prefix': 'test1/', 'Status':'disabled'}]
|
||||
lifecycle = {'Rules': rules}
|
||||
|
||||
|
@ -9540,7 +9540,7 @@ def test_encryption_sse_c_multipart_upload():
|
|||
}
|
||||
resend_parts = []
|
||||
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, objlen,
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, objlen,
|
||||
part_size=5*1024*1024, init_headers=enc_headers, part_headers=enc_headers, metadata=metadata, resend_parts=resend_parts)
|
||||
|
||||
lf = (lambda **kwargs: kwargs['params']['headers'].update(enc_headers))
|
||||
|
@ -9595,7 +9595,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_1():
|
|||
}
|
||||
resend_parts = []
|
||||
|
||||
e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name,
|
||||
e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name,
|
||||
key=key, size=objlen, part_size=5*1024*1024, init_headers=init_headers, part_headers=part_headers, metadata=metadata, resend_parts=resend_parts)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
eq(status, 400)
|
||||
|
@ -9627,7 +9627,7 @@ def test_encryption_sse_c_multipart_invalid_chunks_2():
|
|||
}
|
||||
resend_parts = []
|
||||
|
||||
e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name,
|
||||
e = assert_raises(ClientError, _multipart_upload_enc, client=client, bucket_name=bucket_name,
|
||||
key=key, size=objlen, part_size=5*1024*1024, init_headers=init_headers, part_headers=part_headers, metadata=metadata, resend_parts=resend_parts)
|
||||
status, error_code = _get_status_and_error_code(e.response)
|
||||
eq(status, 400)
|
||||
|
@ -9657,7 +9657,7 @@ def test_encryption_sse_c_multipart_bad_download():
|
|||
}
|
||||
resend_parts = []
|
||||
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, objlen,
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, objlen,
|
||||
part_size=5*1024*1024, init_headers=put_headers, part_headers=put_headers, metadata=metadata, resend_parts=resend_parts)
|
||||
|
||||
lf = (lambda **kwargs: kwargs['params']['headers'].update(put_headers))
|
||||
|
@ -9765,22 +9765,52 @@ def _test_sse_kms_customer_write(file_size, key_id = 'testkey-1'):
|
|||
eq(body, data)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='put')
|
||||
@attr(operation='Test SSE-KMS encrypted transfer 1 byte')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_1b():
|
||||
_test_sse_kms_customer_write(1)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='put')
|
||||
@attr(operation='Test SSE-KMS encrypted transfer 1KB')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_1kb():
|
||||
_test_sse_kms_customer_write(1024)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='put')
|
||||
@attr(operation='Test SSE-KMS encrypted transfer 1MB')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_1MB():
|
||||
_test_sse_kms_customer_write(1024*1024)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='put')
|
||||
@attr(operation='Test SSE-KMS encrypted transfer 13 bytes')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_13b():
|
||||
_test_sse_kms_customer_write(13)
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='head')
|
||||
@attr(operation='Test SSE-KMS encrypted does perform head properly')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_method_head():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
sse_kms_client_headers = {
|
||||
'x-amz-server-side-encryption': 'aws:kms',
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': kms_keyid
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': 'testkey-1'
|
||||
}
|
||||
data = 'A'*1000
|
||||
key = 'testobj'
|
||||
|
@ -9791,7 +9821,7 @@ def test_sse_kms_method_head():
|
|||
|
||||
response = client.head_object(Bucket=bucket_name, Key=key)
|
||||
eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption'], 'aws:kms')
|
||||
eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], kms_keyid)
|
||||
eq(response['ResponseMetadata']['HTTPHeaders']['x-amz-server-side-encryption-aws-kms-key-id'], 'testkey-1')
|
||||
|
||||
lf = (lambda **kwargs: kwargs['params']['headers'].update(sse_kms_client_headers))
|
||||
client.meta.events.register('before-call.s3.HeadObject', lf)
|
||||
|
@ -9805,12 +9835,11 @@ def test_sse_kms_method_head():
|
|||
@attr(assertion='operation success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_present():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
sse_kms_client_headers = {
|
||||
'x-amz-server-side-encryption': 'aws:kms',
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': kms_keyid
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': 'testkey-1'
|
||||
}
|
||||
data = 'A'*100
|
||||
key = 'testobj'
|
||||
|
@ -9870,7 +9899,6 @@ def test_sse_kms_not_declared():
|
|||
@attr(assertion='successful')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_multipart_upload():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
key = "multipart_enc"
|
||||
|
@ -9879,12 +9907,12 @@ def test_sse_kms_multipart_upload():
|
|||
metadata = {'foo': 'bar'}
|
||||
enc_headers = {
|
||||
'x-amz-server-side-encryption': 'aws:kms',
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': kms_keyid,
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': 'testkey-2',
|
||||
'Content-Type': content_type
|
||||
}
|
||||
resend_parts = []
|
||||
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, objlen,
|
||||
(upload_id, data, parts) = _multipart_upload_enc(client, bucket_name, key, objlen,
|
||||
part_size=5*1024*1024, init_headers=enc_headers, part_headers=enc_headers, metadata=metadata, resend_parts=resend_parts)
|
||||
|
||||
lf = (lambda **kwargs: kwargs['params']['headers'].update(enc_headers))
|
||||
|
@ -9920,8 +9948,6 @@ def test_sse_kms_multipart_upload():
|
|||
@attr(assertion='successful')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_multipart_invalid_chunks_1():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
kms_keyid2 = get_secondary_kms_keyid()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
key = "multipart_enc"
|
||||
|
@ -9930,17 +9956,17 @@ def test_sse_kms_multipart_invalid_chunks_1():
|
|||
metadata = {'foo': 'bar'}
|
||||
init_headers = {
|
||||
'x-amz-server-side-encryption': 'aws:kms',
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': kms_keyid,
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': 'testkey-1',
|
||||
'Content-Type': content_type
|
||||
}
|
||||
part_headers = {
|
||||
'x-amz-server-side-encryption': 'aws:kms',
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': kms_keyid2
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': 'testkey-2'
|
||||
}
|
||||
resend_parts = []
|
||||
|
||||
_multipart_upload_enc(client, bucket_name, key, objlen, part_size=5*1024*1024,
|
||||
init_headers=init_headers, part_headers=part_headers, metadata=metadata,
|
||||
_multipart_upload_enc(client, bucket_name, key, objlen, part_size=5*1024*1024,
|
||||
init_headers=init_headers, part_headers=part_headers, metadata=metadata,
|
||||
resend_parts=resend_parts)
|
||||
|
||||
|
||||
|
@ -9950,7 +9976,6 @@ def test_sse_kms_multipart_invalid_chunks_1():
|
|||
@attr(assertion='successful')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_multipart_invalid_chunks_2():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
key = "multipart_enc"
|
||||
|
@ -9959,7 +9984,7 @@ def test_sse_kms_multipart_invalid_chunks_2():
|
|||
metadata = {'foo': 'bar'}
|
||||
init_headers = {
|
||||
'x-amz-server-side-encryption': 'aws:kms',
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': kms_keyid,
|
||||
'x-amz-server-side-encryption-aws-kms-key-id': 'testkey-1',
|
||||
'Content-Type': content_type
|
||||
}
|
||||
part_headers = {
|
||||
|
@ -9968,18 +9993,16 @@ def test_sse_kms_multipart_invalid_chunks_2():
|
|||
}
|
||||
resend_parts = []
|
||||
|
||||
_multipart_upload_enc(client, bucket_name, key, objlen, part_size=5*1024*1024,
|
||||
init_headers=init_headers, part_headers=part_headers, metadata=metadata,
|
||||
_multipart_upload_enc(client, bucket_name, key, objlen, part_size=5*1024*1024,
|
||||
init_headers=init_headers, part_headers=part_headers, metadata=metadata,
|
||||
resend_parts=resend_parts)
|
||||
|
||||
|
||||
@attr(resource='object')
|
||||
@attr(method='post')
|
||||
@attr(operation='authenticated KMS browser based upload via POST request')
|
||||
@attr(assertion='succeeds and returns written data')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_post_object_authenticated_request():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
bucket_name = get_new_bucket()
|
||||
client = get_client()
|
||||
|
||||
|
@ -10011,7 +10034,7 @@ def test_sse_kms_post_object_authenticated_request():
|
|||
("acl" , "private"),("signature" , signature),("policy" , policy),\
|
||||
("Content-Type" , "text/plain"),
|
||||
('x-amz-server-side-encryption', 'aws:kms'), \
|
||||
('x-amz-server-side-encryption-aws-kms-key-id', kms_keyid), \
|
||||
('x-amz-server-side-encryption-aws-kms-key-id', 'testkey-1'), \
|
||||
('file', ('bar'))])
|
||||
|
||||
r = requests.post(url, files = payload)
|
||||
|
@ -10026,7 +10049,7 @@ def test_sse_kms_post_object_authenticated_request():
|
|||
@attr(operation='Test SSE-KMS encrypted transfer 1 byte')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_1b():
|
||||
def test_sse_kms_barb_transfer_1b():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
if kms_keyid is None:
|
||||
raise SkipTest
|
||||
|
@ -10038,7 +10061,7 @@ def test_sse_kms_transfer_1b():
|
|||
@attr(operation='Test SSE-KMS encrypted transfer 1KB')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_1kb():
|
||||
def test_sse_kms_barb_transfer_1kb():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
if kms_keyid is None:
|
||||
raise SkipTest
|
||||
|
@ -10050,7 +10073,7 @@ def test_sse_kms_transfer_1kb():
|
|||
@attr(operation='Test SSE-KMS encrypted transfer 1MB')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_1MB():
|
||||
def test_sse_kms_barb_transfer_1MB():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
if kms_keyid is None:
|
||||
raise SkipTest
|
||||
|
@ -10062,7 +10085,7 @@ def test_sse_kms_transfer_1MB():
|
|||
@attr(operation='Test SSE-KMS encrypted transfer 13 bytes')
|
||||
@attr(assertion='success')
|
||||
@attr('encryption')
|
||||
def test_sse_kms_transfer_13b():
|
||||
def test_sse_kms_barb_transfer_13b():
|
||||
kms_keyid = get_main_kms_keyid()
|
||||
if kms_keyid is None:
|
||||
raise SkipTest
|
||||
|
@ -10460,7 +10483,7 @@ def test_bucket_policy_set_condition_operator_end_with_IfExists():
|
|||
response = client.get_object(Bucket=bucket_name, Key=key)
|
||||
eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
|
||||
|
||||
# the 'referer' headers need to be removed for this one
|
||||
# the 'referer' headers need to be removed for this one
|
||||
#response = client.get_object(Bucket=bucket_name, Key=key)
|
||||
#eq(response['ResponseMetadata']['HTTPStatusCode'], 200)
|
||||
|
||||
|
@ -10763,7 +10786,7 @@ def test_post_object_tags_authenticated_request():
|
|||
|
||||
signature = base64.b64encode(hmac.new(aws_secret_access_key, policy, sha).digest())
|
||||
|
||||
payload = OrderedDict([
|
||||
payload = OrderedDict([
|
||||
("key" , "foo.txt"),
|
||||
("AWSAccessKeyId" , aws_access_key_id),\
|
||||
("acl" , "private"),("signature" , signature),("policy" , policy),\
|
||||
|
@ -10908,7 +10931,7 @@ def test_versioning_bucket_atomic_upload_return_version_id():
|
|||
response = client.list_object_versions(Bucket=bucket_name)
|
||||
versions = response['Versions']
|
||||
for version in versions:
|
||||
eq(version['VersionId'], version_id)
|
||||
eq(version['VersionId'], version_id)
|
||||
|
||||
|
||||
# for versioning-default-bucket, no version-id should return.
|
||||
|
@ -10948,7 +10971,7 @@ def test_versioning_bucket_multipart_upload_return_version_id():
|
|||
response = client.list_object_versions(Bucket=bucket_name)
|
||||
versions = response['Versions']
|
||||
for version in versions:
|
||||
eq(version['VersionId'], version_id)
|
||||
eq(version['VersionId'], version_id)
|
||||
|
||||
# for versioning-default-bucket, no version-id should return.
|
||||
bucket_name = get_new_bucket()
|
||||
|
@ -11203,7 +11226,7 @@ def test_bucket_policy_put_obj_copy_source():
|
|||
response = alt_client.get_object(Bucket=bucket_name2, Key='new_foo')
|
||||
body = _get_body(response)
|
||||
eq(body, 'public/foo')
|
||||
|
||||
|
||||
copy_source = {'Bucket': bucket_name, 'Key': 'public/bar'}
|
||||
alt_client.copy_object(Bucket=bucket_name2, CopySource=copy_source, Key='new_foo2')
|
||||
|
||||
|
@ -11263,7 +11286,7 @@ def test_bucket_policy_put_obj_copy_source_meta():
|
|||
del kwargs['params']['headers']["x-amz-metadata-directive"]
|
||||
|
||||
alt_client.meta.events.register('before-call.s3.CopyObject', remove_header)
|
||||
|
||||
|
||||
copy_source = {'Bucket': src_bucket_name, 'Key': 'public/bar'}
|
||||
check_access_denied(alt_client.copy_object, Bucket=bucket_name, CopySource=copy_source, Key='new_foo2', Metadata={"foo": "bar"})
|
||||
|
||||
|
@ -12148,7 +12171,7 @@ def test_object_lock_get_obj_metadata():
|
|||
eq(response['ObjectLockMode'], retention['Mode'])
|
||||
eq(response['ObjectLockRetainUntilDate'], retention['RetainUntilDate'])
|
||||
eq(response['ObjectLockLegalHoldStatus'], legal_hold['Status'])
|
||||
|
||||
|
||||
client.put_object_legal_hold(Bucket=bucket_name, Key=key, LegalHold={'Status':'OFF'})
|
||||
client.delete_object(Bucket=bucket_name, Key=key, VersionId=response['VersionId'], BypassGovernanceRetention=True)
|
||||
|
||||
|
|
Loading…
Reference in a new issue