forked from TrueCloudLab/s3-tests
Port functional tests from python 2 to python 3
Add fails_on_rgw to tests not passing. Some tests from the master branch do not pass on the rgw yet. Others waiting on rgw tracker issues to be resolved. Signed-off-by: Ali Maredia <amaredia@redhat.com>
This commit is contained in:
parent
92f056532b
commit
be9935ba1a
30 changed files with 597 additions and 561 deletions
40
bootstrap
40
bootstrap
|
@ -4,56 +4,52 @@ set -e
|
||||||
virtualenv="virtualenv"
|
virtualenv="virtualenv"
|
||||||
declare -a packages
|
declare -a packages
|
||||||
if [ -f /etc/debian_version ]; then
|
if [ -f /etc/debian_version ]; then
|
||||||
packages=(debianutils python-pip python-virtualenv python-dev libevent-dev libffi-dev libxml2-dev libxslt-dev zlib1g-dev)
|
packages=(debianutils python3-pip python3-virtualenv python3-dev libevent-dev libffi-dev libxml2-dev libxslt-dev zlib1g-dev)
|
||||||
for package in ${packages[@]}; do
|
for package in ${packages[@]}; do
|
||||||
if [ "$(dpkg --status -- $package 2>/dev/null|sed -n 's/^Status: //p')" != "install ok installed" ]; then
|
if [ "$(dpkg --status -- $package 2>/dev/null|sed -n 's/^Status: //p')" != "install ok installed" ]; then
|
||||||
# add a space after old values
|
# add a space after old values
|
||||||
missing="${missing:+$missing }$package"
|
missing="${missing:+$missing }$package"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ -n "$missing" ]; then
|
if [ -n "$missing" ]; then
|
||||||
echo "$0: missing required DEB packages. Installing via sudo." 1>&2
|
echo "$0: missing required DEB packages. Installing via sudo." 1>&2
|
||||||
sudo apt-get -y install $missing
|
sudo apt-get -y install $missing
|
||||||
fi
|
fi
|
||||||
else
|
elif [ -f /etc/redhat-release ]; then
|
||||||
packages=(which libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
|
packages=(which python3-virtualenv python36-devel libevent-devel libffi-devel libxml2-devel libxslt-devel zlib-devel)
|
||||||
if [ -f /etc/fedora-release ]; then
|
|
||||||
packages+=(python2-pip python2-virtualenv python2-devel)
|
|
||||||
elif [ -f /etc/redhat-release ]; then
|
|
||||||
unset ${GREP_OPTIONS}
|
|
||||||
eval $(cat /etc/os-release | grep VERSION_ID)
|
|
||||||
if [ ${VERSION_ID:0:1} -lt 8 ]; then
|
|
||||||
packages+=(python-virtualenv python-devel)
|
|
||||||
else
|
|
||||||
packages+=(python2-virtualenv python2-devel)
|
|
||||||
virtualenv="virtualenv-2"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
for package in ${packages[@]}; do
|
for package in ${packages[@]}; do
|
||||||
|
# When the package is python36-devel we change it to python3-devel on Fedora
|
||||||
|
if [[ ${package} == "python36-devel" && -f /etc/fedora-release ]]; then
|
||||||
|
package=python36
|
||||||
|
fi
|
||||||
if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then
|
if [ "$(rpm -qa $package 2>/dev/null)" == "" ]; then
|
||||||
missing="${missing:+$missing }$package"
|
missing="${missing:+$missing }$package"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ -n "$missing" ]; then
|
if [ -n "$missing" ]; then
|
||||||
echo "$0: missing required RPM packages. Installing via sudo." 1>&2
|
echo "$0: Missing required RPM packages: ${missing}." 1>&2
|
||||||
sudo yum -y install $missing
|
sudo yum -y install $missing
|
||||||
fi
|
fi
|
||||||
|
else
|
||||||
|
echo "s3-tests can only be run on Red Hat, Centos, Fedora, Ubunutu, or Debian platforms"
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
${virtualenv} --python=$(which python2) --no-site-packages --distribute virtualenv
|
# s3-tests only works on python 3.6 not newer versions of python3
|
||||||
|
${virtualenv} --python=$(which python3.6) --no-site-packages --distribute virtualenv
|
||||||
|
|
||||||
# avoid pip bugs
|
# avoid pip bugs
|
||||||
./virtualenv/bin/pip install --upgrade pip
|
./virtualenv/bin/pip3 install --upgrade pip
|
||||||
|
|
||||||
# slightly old version of setuptools; newer fails w/ requests 0.14.0
|
# slightly old version of setuptools; newer fails w/ requests 0.14.0
|
||||||
./virtualenv/bin/pip install setuptools==32.3.1
|
./virtualenv/bin/pip3 install setuptools==32.3.1
|
||||||
|
|
||||||
./virtualenv/bin/pip install -r requirements.txt
|
./virtualenv/bin/pip3 install -r requirements.txt
|
||||||
|
|
||||||
# forbid setuptools from using the network because it'll try to use
|
# forbid setuptools from using the network because it'll try to use
|
||||||
# easy_install, and we really wanted pip; next line will fail if pip
|
# easy_install, and we really wanted pip; next line will fail if pip
|
||||||
# requirements.txt does not match setup.py requirements -- sucky but
|
# requirements.txt does not match setup.py requirements -- sucky but
|
||||||
# good enough for now
|
# good enough for now
|
||||||
./virtualenv/bin/python setup.py develop
|
./virtualenv/bin/python3 setup.py develop
|
||||||
|
|
|
@ -2,12 +2,11 @@ PyYAML
|
||||||
nose >=1.0.0
|
nose >=1.0.0
|
||||||
boto >=2.6.0
|
boto >=2.6.0
|
||||||
boto3 >=1.0.0
|
boto3 >=1.0.0
|
||||||
bunch >=1.0.0
|
munch >=2.0.0
|
||||||
# 0.14 switches to libev, that means bootstrap needs to change too
|
# 0.14 switches to libev, that means bootstrap needs to change too
|
||||||
gevent >=1.0
|
gevent >=1.0
|
||||||
isodate >=0.4.4
|
isodate >=0.4.4
|
||||||
requests >=0.14.0
|
requests >=0.14.0
|
||||||
pytz >=2011k
|
pytz >=2011k
|
||||||
ordereddict
|
|
||||||
httplib2
|
httplib2
|
||||||
lxml
|
lxml
|
||||||
|
|
|
@ -57,7 +57,7 @@ def main():
|
||||||
|
|
||||||
def calculate_stats(options, total, durations, min_time, max_time, errors,
|
def calculate_stats(options, total, durations, min_time, max_time, errors,
|
||||||
success):
|
success):
|
||||||
print 'Calculating statistics...'
|
print('Calculating statistics...')
|
||||||
|
|
||||||
f = sys.stdin
|
f = sys.stdin
|
||||||
if options.input:
|
if options.input:
|
||||||
|
@ -81,13 +81,13 @@ def calculate_stats(options, total, durations, min_time, max_time, errors,
|
||||||
end = start + duration / float(NANOSECONDS)
|
end = start + duration / float(NANOSECONDS)
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
print "[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \
|
print("[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \
|
||||||
"{data:>11.2f} KB".format(
|
"{data:>11.2f} KB".format(
|
||||||
type=type_,
|
type=type_,
|
||||||
start=start,
|
start=start,
|
||||||
end=end,
|
end=end,
|
||||||
data=data_size / 1024.0, # convert to KB
|
data=data_size / 1024.0, # convert to KB
|
||||||
)
|
))
|
||||||
|
|
||||||
# update time boundaries
|
# update time boundaries
|
||||||
prev = min_time.setdefault(type_, start)
|
prev = min_time.setdefault(type_, start)
|
||||||
|
@ -106,7 +106,7 @@ def calculate_stats(options, total, durations, min_time, max_time, errors,
|
||||||
total[type_] = total.get(type_, 0) + data_size
|
total[type_] = total.get(type_, 0) + data_size
|
||||||
|
|
||||||
def print_results(total, durations, min_time, max_time, errors, success):
|
def print_results(total, durations, min_time, max_time, errors, success):
|
||||||
for type_ in total.keys():
|
for type_ in list(total.keys()):
|
||||||
trans_success = success.get(type_, 0)
|
trans_success = success.get(type_, 0)
|
||||||
trans_fail = errors.get(type_, 0)
|
trans_fail = errors.get(type_, 0)
|
||||||
trans = trans_success + trans_fail
|
trans = trans_success + trans_fail
|
||||||
|
@ -121,7 +121,7 @@ def print_results(total, durations, min_time, max_time, errors, success):
|
||||||
trans_long = max(durations[type_]) / float(NANOSECONDS)
|
trans_long = max(durations[type_]) / float(NANOSECONDS)
|
||||||
trans_short = min(durations[type_]) / float(NANOSECONDS)
|
trans_short = min(durations[type_]) / float(NANOSECONDS)
|
||||||
|
|
||||||
print OUTPUT_FORMAT.format(
|
print(OUTPUT_FORMAT.format(
|
||||||
type=type_,
|
type=type_,
|
||||||
trans_success=trans_success,
|
trans_success=trans_success,
|
||||||
trans_fail=trans_fail,
|
trans_fail=trans_fail,
|
||||||
|
@ -135,7 +135,7 @@ def print_results(total, durations, min_time, max_time, errors, success):
|
||||||
conc=conc,
|
conc=conc,
|
||||||
trans_long=trans_long,
|
trans_long=trans_long,
|
||||||
trans_short=trans_short,
|
trans_short=trans_short,
|
||||||
)
|
))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import boto.s3.connection
|
import boto.s3.connection
|
||||||
import bunch
|
import munch
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
@ -11,8 +11,8 @@ from lxml import etree
|
||||||
from doctest import Example
|
from doctest import Example
|
||||||
from lxml.doctestcompare import LXMLOutputChecker
|
from lxml.doctestcompare import LXMLOutputChecker
|
||||||
|
|
||||||
s3 = bunch.Bunch()
|
s3 = munch.Munch()
|
||||||
config = bunch.Bunch()
|
config = munch.Munch()
|
||||||
prefix = ''
|
prefix = ''
|
||||||
|
|
||||||
bucket_counter = itertools.count(1)
|
bucket_counter = itertools.count(1)
|
||||||
|
@ -51,10 +51,10 @@ def nuke_bucket(bucket):
|
||||||
while deleted_cnt:
|
while deleted_cnt:
|
||||||
deleted_cnt = 0
|
deleted_cnt = 0
|
||||||
for key in bucket.list():
|
for key in bucket.list():
|
||||||
print 'Cleaning bucket {bucket} key {key}'.format(
|
print('Cleaning bucket {bucket} key {key}'.format(
|
||||||
bucket=bucket,
|
bucket=bucket,
|
||||||
key=key,
|
key=key,
|
||||||
)
|
))
|
||||||
key.set_canned_acl('private')
|
key.set_canned_acl('private')
|
||||||
key.delete()
|
key.delete()
|
||||||
deleted_cnt += 1
|
deleted_cnt += 1
|
||||||
|
@ -67,26 +67,26 @@ def nuke_bucket(bucket):
|
||||||
and e.body == ''):
|
and e.body == ''):
|
||||||
e.error_code = 'AccessDenied'
|
e.error_code = 'AccessDenied'
|
||||||
if e.error_code != 'AccessDenied':
|
if e.error_code != 'AccessDenied':
|
||||||
print 'GOT UNWANTED ERROR', e.error_code
|
print('GOT UNWANTED ERROR', e.error_code)
|
||||||
raise
|
raise
|
||||||
# seems like we're not the owner of the bucket; ignore
|
# seems like we're not the owner of the bucket; ignore
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def nuke_prefixed_buckets():
|
def nuke_prefixed_buckets():
|
||||||
for name, conn in s3.items():
|
for name, conn in list(s3.items()):
|
||||||
print 'Cleaning buckets from connection {name}'.format(name=name)
|
print('Cleaning buckets from connection {name}'.format(name=name))
|
||||||
for bucket in conn.get_all_buckets():
|
for bucket in conn.get_all_buckets():
|
||||||
if bucket.name.startswith(prefix):
|
if bucket.name.startswith(prefix):
|
||||||
print 'Cleaning bucket {bucket}'.format(bucket=bucket)
|
print('Cleaning bucket {bucket}'.format(bucket=bucket))
|
||||||
nuke_bucket(bucket)
|
nuke_bucket(bucket)
|
||||||
|
|
||||||
print 'Done with cleanup of test buckets.'
|
print('Done with cleanup of test buckets.')
|
||||||
|
|
||||||
def read_config(fp):
|
def read_config(fp):
|
||||||
config = bunch.Bunch()
|
config = munch.Munch()
|
||||||
g = yaml.safe_load_all(fp)
|
g = yaml.safe_load_all(fp)
|
||||||
for new in g:
|
for new in g:
|
||||||
config.update(bunch.bunchify(new))
|
config.update(munch.Munchify(new))
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def connect(conf):
|
def connect(conf):
|
||||||
|
@ -97,7 +97,7 @@ def connect(conf):
|
||||||
access_key='aws_access_key_id',
|
access_key='aws_access_key_id',
|
||||||
secret_key='aws_secret_access_key',
|
secret_key='aws_secret_access_key',
|
||||||
)
|
)
|
||||||
kwargs = dict((mapping[k],v) for (k,v) in conf.iteritems() if k in mapping)
|
kwargs = dict((mapping[k],v) for (k,v) in conf.items() if k in mapping)
|
||||||
#process calling_format argument
|
#process calling_format argument
|
||||||
calling_formats = dict(
|
calling_formats = dict(
|
||||||
ordinary=boto.s3.connection.OrdinaryCallingFormat(),
|
ordinary=boto.s3.connection.OrdinaryCallingFormat(),
|
||||||
|
@ -105,7 +105,7 @@ def connect(conf):
|
||||||
vhost=boto.s3.connection.VHostCallingFormat(),
|
vhost=boto.s3.connection.VHostCallingFormat(),
|
||||||
)
|
)
|
||||||
kwargs['calling_format'] = calling_formats['ordinary']
|
kwargs['calling_format'] = calling_formats['ordinary']
|
||||||
if conf.has_key('calling_format'):
|
if 'calling_format' in conf:
|
||||||
raw_calling_format = conf['calling_format']
|
raw_calling_format = conf['calling_format']
|
||||||
try:
|
try:
|
||||||
kwargs['calling_format'] = calling_formats[raw_calling_format]
|
kwargs['calling_format'] = calling_formats[raw_calling_format]
|
||||||
|
@ -146,7 +146,7 @@ def setup():
|
||||||
raise RuntimeError("Empty Prefix! Aborting!")
|
raise RuntimeError("Empty Prefix! Aborting!")
|
||||||
|
|
||||||
defaults = config.s3.defaults
|
defaults = config.s3.defaults
|
||||||
for section in config.s3.keys():
|
for section in list(config.s3.keys()):
|
||||||
if section == 'defaults':
|
if section == 'defaults':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -258,9 +258,10 @@ def with_setup_kwargs(setup, teardown=None):
|
||||||
# yield _test_gen
|
# yield _test_gen
|
||||||
|
|
||||||
def trim_xml(xml_str):
|
def trim_xml(xml_str):
|
||||||
p = etree.XMLParser(remove_blank_text=True)
|
p = etree.XMLParser(encoding="utf-8", remove_blank_text=True)
|
||||||
|
xml_str = bytes(xml_str, "utf-8")
|
||||||
elem = etree.XML(xml_str, parser=p)
|
elem = etree.XML(xml_str, parser=p)
|
||||||
return etree.tostring(elem)
|
return etree.tostring(elem, encoding="unicode")
|
||||||
|
|
||||||
def normalize_xml(xml, pretty_print=True):
|
def normalize_xml(xml, pretty_print=True):
|
||||||
if xml is None:
|
if xml is None:
|
||||||
|
@ -282,7 +283,7 @@ def normalize_xml(xml, pretty_print=True):
|
||||||
for parent in root.xpath('//*[./*]'): # Search for parent elements
|
for parent in root.xpath('//*[./*]'): # Search for parent elements
|
||||||
parent[:] = sorted(parent,key=lambda x: x.tag)
|
parent[:] = sorted(parent,key=lambda x: x.tag)
|
||||||
|
|
||||||
xmlstr = etree.tostring(root, encoding="utf-8", xml_declaration=True, pretty_print=pretty_print)
|
xmlstr = etree.tostring(root, encoding="unicode", pretty_print=pretty_print)
|
||||||
# there are two different DTD URIs
|
# there are two different DTD URIs
|
||||||
xmlstr = re.sub(r'xmlns="[^"]+"', 'xmlns="s3"', xmlstr)
|
xmlstr = re.sub(r'xmlns="[^"]+"', 'xmlns="s3"', xmlstr)
|
||||||
xmlstr = re.sub(r'xmlns=\'[^\']+\'', 'xmlns="s3"', xmlstr)
|
xmlstr = re.sub(r'xmlns=\'[^\']+\'', 'xmlns="s3"', xmlstr)
|
||||||
|
|
|
@ -1,21 +1,20 @@
|
||||||
from __future__ import print_function
|
|
||||||
import sys
|
import sys
|
||||||
import ConfigParser
|
import configparser
|
||||||
import boto.exception
|
import boto.exception
|
||||||
import boto.s3.connection
|
import boto.s3.connection
|
||||||
import bunch
|
import munch
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
from httplib import HTTPConnection, HTTPSConnection
|
from http.client import HTTPConnection, HTTPSConnection
|
||||||
from urlparse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from .utils import region_sync_meta
|
from .utils import region_sync_meta
|
||||||
|
|
||||||
s3 = bunch.Bunch()
|
s3 = munch.Munch()
|
||||||
config = bunch.Bunch()
|
config = munch.Munch()
|
||||||
targets = bunch.Bunch()
|
targets = munch.Munch()
|
||||||
|
|
||||||
# this will be assigned by setup()
|
# this will be assigned by setup()
|
||||||
prefix = None
|
prefix = None
|
||||||
|
@ -69,7 +68,7 @@ def nuke_prefixed_buckets_on_conn(prefix, name, conn):
|
||||||
if bucket.name.startswith(prefix):
|
if bucket.name.startswith(prefix):
|
||||||
print('Cleaning bucket {bucket}'.format(bucket=bucket))
|
print('Cleaning bucket {bucket}'.format(bucket=bucket))
|
||||||
success = False
|
success = False
|
||||||
for i in xrange(2):
|
for i in range(2):
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
iterator = iter(bucket.list_versions())
|
iterator = iter(bucket.list_versions())
|
||||||
|
@ -116,12 +115,12 @@ def nuke_prefixed_buckets_on_conn(prefix, name, conn):
|
||||||
def nuke_prefixed_buckets(prefix):
|
def nuke_prefixed_buckets(prefix):
|
||||||
# If no regions are specified, use the simple method
|
# If no regions are specified, use the simple method
|
||||||
if targets.main.master == None:
|
if targets.main.master == None:
|
||||||
for name, conn in s3.items():
|
for name, conn in list(s3.items()):
|
||||||
print('Deleting buckets on {name}'.format(name=name))
|
print('Deleting buckets on {name}'.format(name=name))
|
||||||
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
||||||
else:
|
else:
|
||||||
# First, delete all buckets on the master connection
|
# First, delete all buckets on the master connection
|
||||||
for name, conn in s3.items():
|
for name, conn in list(s3.items()):
|
||||||
if conn == targets.main.master.connection:
|
if conn == targets.main.master.connection:
|
||||||
print('Deleting buckets on {name} (master)'.format(name=name))
|
print('Deleting buckets on {name} (master)'.format(name=name))
|
||||||
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
||||||
|
@ -131,7 +130,7 @@ def nuke_prefixed_buckets(prefix):
|
||||||
print('region-sync in nuke_prefixed_buckets')
|
print('region-sync in nuke_prefixed_buckets')
|
||||||
|
|
||||||
# Now delete remaining buckets on any other connection
|
# Now delete remaining buckets on any other connection
|
||||||
for name, conn in s3.items():
|
for name, conn in list(s3.items()):
|
||||||
if conn != targets.main.master.connection:
|
if conn != targets.main.master.connection:
|
||||||
print('Deleting buckets on {name} (non-master)'.format(name=name))
|
print('Deleting buckets on {name} (non-master)'.format(name=name))
|
||||||
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
nuke_prefixed_buckets_on_conn(prefix, name, conn)
|
||||||
|
@ -149,46 +148,46 @@ class TargetConfig:
|
||||||
self.sync_meta_wait = 0
|
self.sync_meta_wait = 0
|
||||||
try:
|
try:
|
||||||
self.api_name = cfg.get(section, 'api_name')
|
self.api_name = cfg.get(section, 'api_name')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
self.port = cfg.getint(section, 'port')
|
self.port = cfg.getint(section, 'port')
|
||||||
except ConfigParser.NoOptionError:
|
except configparser.NoOptionError:
|
||||||
pass
|
pass
|
||||||
try:
|
try:
|
||||||
self.host=cfg.get(section, 'host')
|
self.host=cfg.get(section, 'host')
|
||||||
except ConfigParser.NoOptionError:
|
except configparser.NoOptionError:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
'host not specified for section {s}'.format(s=section)
|
'host not specified for section {s}'.format(s=section)
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
self.is_master=cfg.getboolean(section, 'is_master')
|
self.is_master=cfg.getboolean(section, 'is_master')
|
||||||
except ConfigParser.NoOptionError:
|
except configparser.NoOptionError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.is_secure=cfg.getboolean(section, 'is_secure')
|
self.is_secure=cfg.getboolean(section, 'is_secure')
|
||||||
except ConfigParser.NoOptionError:
|
except configparser.NoOptionError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
raw_calling_format = cfg.get(section, 'calling_format')
|
raw_calling_format = cfg.get(section, 'calling_format')
|
||||||
except ConfigParser.NoOptionError:
|
except configparser.NoOptionError:
|
||||||
raw_calling_format = 'ordinary'
|
raw_calling_format = 'ordinary'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.sync_agent_addr = cfg.get(section, 'sync_agent_addr')
|
self.sync_agent_addr = cfg.get(section, 'sync_agent_addr')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.sync_agent_port = cfg.getint(section, 'sync_agent_port')
|
self.sync_agent_port = cfg.getint(section, 'sync_agent_port')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.sync_meta_wait = cfg.getint(section, 'sync_meta_wait')
|
self.sync_meta_wait = cfg.getint(section, 'sync_meta_wait')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -208,7 +207,7 @@ class TargetConnection:
|
||||||
|
|
||||||
class RegionsInfo:
|
class RegionsInfo:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.m = bunch.Bunch()
|
self.m = munch.Munch()
|
||||||
self.master = None
|
self.master = None
|
||||||
self.secondaries = []
|
self.secondaries = []
|
||||||
|
|
||||||
|
@ -226,21 +225,21 @@ class RegionsInfo:
|
||||||
return self.m[name]
|
return self.m[name]
|
||||||
def get(self):
|
def get(self):
|
||||||
return self.m
|
return self.m
|
||||||
def iteritems(self):
|
def items(self):
|
||||||
return self.m.iteritems()
|
return self.m.items()
|
||||||
|
|
||||||
regions = RegionsInfo()
|
regions = RegionsInfo()
|
||||||
|
|
||||||
|
|
||||||
class RegionsConn:
|
class RegionsConn:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.m = bunch.Bunch()
|
self.m = munch.Munch()
|
||||||
self.default = None
|
self.default = None
|
||||||
self.master = None
|
self.master = None
|
||||||
self.secondaries = []
|
self.secondaries = []
|
||||||
|
|
||||||
def iteritems(self):
|
def items(self):
|
||||||
return self.m.iteritems()
|
return self.m.items()
|
||||||
|
|
||||||
def set_default(self, conn):
|
def set_default(self, conn):
|
||||||
self.default = conn
|
self.default = conn
|
||||||
|
@ -260,7 +259,7 @@ _multiprocess_can_split_ = True
|
||||||
|
|
||||||
def setup():
|
def setup():
|
||||||
|
|
||||||
cfg = ConfigParser.RawConfigParser()
|
cfg = configparser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
path = os.environ['S3TEST_CONF']
|
path = os.environ['S3TEST_CONF']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -268,8 +267,7 @@ def setup():
|
||||||
'To run tests, point environment '
|
'To run tests, point environment '
|
||||||
+ 'variable S3TEST_CONF to a config file.',
|
+ 'variable S3TEST_CONF to a config file.',
|
||||||
)
|
)
|
||||||
with file(path) as f:
|
cfg.read(path)
|
||||||
cfg.readfp(f)
|
|
||||||
|
|
||||||
global prefix
|
global prefix
|
||||||
global targets
|
global targets
|
||||||
|
@ -277,19 +275,19 @@ def setup():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
template = cfg.get('fixtures', 'bucket prefix')
|
template = cfg.get('fixtures', 'bucket prefix')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
template = 'test-{random}-'
|
template = 'test-{random}-'
|
||||||
prefix = choose_bucket_prefix(template=template)
|
prefix = choose_bucket_prefix(template=template)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
slow_backend = cfg.getboolean('fixtures', 'slow backend')
|
slow_backend = cfg.getboolean('fixtures', 'slow backend')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
slow_backend = False
|
slow_backend = False
|
||||||
|
|
||||||
# pull the default_region out, if it exists
|
# pull the default_region out, if it exists
|
||||||
try:
|
try:
|
||||||
default_region = cfg.get('fixtures', 'default_region')
|
default_region = cfg.get('fixtures', 'default_region')
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
default_region = None
|
default_region = None
|
||||||
|
|
||||||
s3.clear()
|
s3.clear()
|
||||||
|
@ -315,7 +313,7 @@ def setup():
|
||||||
if len(regions.get()) == 0:
|
if len(regions.get()) == 0:
|
||||||
regions.add("default", TargetConfig(cfg, section))
|
regions.add("default", TargetConfig(cfg, section))
|
||||||
|
|
||||||
config[name] = bunch.Bunch()
|
config[name] = munch.Munch()
|
||||||
for var in [
|
for var in [
|
||||||
'user_id',
|
'user_id',
|
||||||
'display_name',
|
'display_name',
|
||||||
|
@ -329,12 +327,12 @@ def setup():
|
||||||
]:
|
]:
|
||||||
try:
|
try:
|
||||||
config[name][var] = cfg.get(section, var)
|
config[name][var] = cfg.get(section, var)
|
||||||
except ConfigParser.NoOptionError:
|
except configparser.NoOptionError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
targets[name] = RegionsConn()
|
targets[name] = RegionsConn()
|
||||||
|
|
||||||
for (k, conf) in regions.iteritems():
|
for (k, conf) in regions.items():
|
||||||
conn = boto.s3.connection.S3Connection(
|
conn = boto.s3.connection.S3Connection(
|
||||||
aws_access_key_id=cfg.get(section, 'access_key'),
|
aws_access_key_id=cfg.get(section, 'access_key'),
|
||||||
aws_secret_access_key=cfg.get(section, 'secret_key'),
|
aws_secret_access_key=cfg.get(section, 'secret_key'),
|
||||||
|
@ -475,7 +473,7 @@ def _make_raw_request(host, port, method, path, body=None, request_headers=None,
|
||||||
if request_headers is None:
|
if request_headers is None:
|
||||||
request_headers = {}
|
request_headers = {}
|
||||||
|
|
||||||
c = class_(host, port, strict=True, timeout=timeout)
|
c = class_(host, port=port, timeout=timeout)
|
||||||
|
|
||||||
# TODO: We might have to modify this in future if we need to interact with
|
# TODO: We might have to modify this in future if we need to interact with
|
||||||
# how httplib.request handles Accept-Encoding and Host.
|
# how httplib.request handles Accept-Encoding and Host.
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
from cStringIO import StringIO
|
from io import StringIO
|
||||||
import boto.connection
|
import boto.connection
|
||||||
import boto.exception
|
import boto.exception
|
||||||
import boto.s3.connection
|
import boto.s3.connection
|
||||||
import boto.s3.acl
|
import boto.s3.acl
|
||||||
import boto.utils
|
import boto.utils
|
||||||
import bunch
|
|
||||||
import nose
|
import nose
|
||||||
import operator
|
import operator
|
||||||
import random
|
import random
|
||||||
|
@ -15,7 +14,7 @@ import os
|
||||||
import re
|
import re
|
||||||
from email.utils import formatdate
|
from email.utils import formatdate
|
||||||
|
|
||||||
from urlparse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from boto.s3.connection import S3Connection
|
from boto.s3.connection import S3Connection
|
||||||
|
|
||||||
|
@ -24,7 +23,7 @@ from nose.plugins.attrib import attr
|
||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
|
|
||||||
from .utils import assert_raises
|
from .utils import assert_raises
|
||||||
import AnonymousAuth
|
from . import AnonymousAuth
|
||||||
|
|
||||||
from email.header import decode_header
|
from email.header import decode_header
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
from cStringIO import StringIO
|
from io import StringIO
|
||||||
import boto.exception
|
import boto.exception
|
||||||
import boto.s3.connection
|
import boto.s3.connection
|
||||||
import boto.s3.acl
|
import boto.s3.acl
|
||||||
import boto.s3.lifecycle
|
import boto.s3.lifecycle
|
||||||
import bunch
|
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import email.utils
|
import email.utils
|
||||||
|
@ -16,7 +15,6 @@ import os
|
||||||
import requests
|
import requests
|
||||||
import base64
|
import base64
|
||||||
import hmac
|
import hmac
|
||||||
import sha
|
|
||||||
import pytz
|
import pytz
|
||||||
import json
|
import json
|
||||||
import httplib2
|
import httplib2
|
||||||
|
@ -27,13 +25,13 @@ import random
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from urlparse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from nose.tools import eq_ as eq
|
from nose.tools import eq_ as eq
|
||||||
from nose.plugins.attrib import attr
|
from nose.plugins.attrib import attr
|
||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
|
|
||||||
import utils
|
from . import utils
|
||||||
from .utils import assert_raises
|
from .utils import assert_raises
|
||||||
|
|
||||||
from .policy import Policy, Statement, make_json_policy
|
from .policy import Policy, Statement, make_json_policy
|
||||||
|
@ -117,7 +115,7 @@ def check_configure_versioning_retry(bucket, status, expected_string):
|
||||||
|
|
||||||
read_status = None
|
read_status = None
|
||||||
|
|
||||||
for i in xrange(5):
|
for i in range(5):
|
||||||
try:
|
try:
|
||||||
read_status = bucket.get_versioning_status()['Versioning']
|
read_status = bucket.get_versioning_status()['Versioning']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -330,26 +328,26 @@ def generate_lifecycle_body(rules):
|
||||||
body = '<?xml version="1.0" encoding="UTF-8"?><LifecycleConfiguration>'
|
body = '<?xml version="1.0" encoding="UTF-8"?><LifecycleConfiguration>'
|
||||||
for rule in rules:
|
for rule in rules:
|
||||||
body += '<Rule><ID>%s</ID><Status>%s</Status>' % (rule['ID'], rule['Status'])
|
body += '<Rule><ID>%s</ID><Status>%s</Status>' % (rule['ID'], rule['Status'])
|
||||||
if 'Prefix' in rule.keys():
|
if 'Prefix' in list(rule.keys()):
|
||||||
body += '<Prefix>%s</Prefix>' % rule['Prefix']
|
body += '<Prefix>%s</Prefix>' % rule['Prefix']
|
||||||
if 'Filter' in rule.keys():
|
if 'Filter' in list(rule.keys()):
|
||||||
prefix_str= '' # AWS supports empty filters
|
prefix_str= '' # AWS supports empty filters
|
||||||
if 'Prefix' in rule['Filter'].keys():
|
if 'Prefix' in list(rule['Filter'].keys()):
|
||||||
prefix_str = '<Prefix>%s</Prefix>' % rule['Filter']['Prefix']
|
prefix_str = '<Prefix>%s</Prefix>' % rule['Filter']['Prefix']
|
||||||
body += '<Filter>%s</Filter>' % prefix_str
|
body += '<Filter>%s</Filter>' % prefix_str
|
||||||
|
|
||||||
if 'Expiration' in rule.keys():
|
if 'Expiration' in list(rule.keys()):
|
||||||
if 'ExpiredObjectDeleteMarker' in rule['Expiration'].keys():
|
if 'ExpiredObjectDeleteMarker' in list(rule['Expiration'].keys()):
|
||||||
body += '<Expiration><ExpiredObjectDeleteMarker>%s</ExpiredObjectDeleteMarker></Expiration>' \
|
body += '<Expiration><ExpiredObjectDeleteMarker>%s</ExpiredObjectDeleteMarker></Expiration>' \
|
||||||
% rule['Expiration']['ExpiredObjectDeleteMarker']
|
% rule['Expiration']['ExpiredObjectDeleteMarker']
|
||||||
elif 'Date' in rule['Expiration'].keys():
|
elif 'Date' in list(rule['Expiration'].keys()):
|
||||||
body += '<Expiration><Date>%s</Date></Expiration>' % rule['Expiration']['Date']
|
body += '<Expiration><Date>%s</Date></Expiration>' % rule['Expiration']['Date']
|
||||||
else:
|
else:
|
||||||
body += '<Expiration><Days>%d</Days></Expiration>' % rule['Expiration']['Days']
|
body += '<Expiration><Days>%d</Days></Expiration>' % rule['Expiration']['Days']
|
||||||
if 'NoncurrentVersionExpiration' in rule.keys():
|
if 'NoncurrentVersionExpiration' in list(rule.keys()):
|
||||||
body += '<NoncurrentVersionExpiration><NoncurrentDays>%d</NoncurrentDays></NoncurrentVersionExpiration>' % \
|
body += '<NoncurrentVersionExpiration><NoncurrentDays>%d</NoncurrentDays></NoncurrentVersionExpiration>' % \
|
||||||
rule['NoncurrentVersionExpiration']['NoncurrentDays']
|
rule['NoncurrentVersionExpiration']['NoncurrentDays']
|
||||||
if 'NoncurrentVersionTransition' in rule.keys():
|
if 'NoncurrentVersionTransition' in list(rule.keys()):
|
||||||
for t in rule['NoncurrentVersionTransition']:
|
for t in rule['NoncurrentVersionTransition']:
|
||||||
body += '<NoncurrentVersionTransition>'
|
body += '<NoncurrentVersionTransition>'
|
||||||
body += '<NoncurrentDays>%d</NoncurrentDays>' % \
|
body += '<NoncurrentDays>%d</NoncurrentDays>' % \
|
||||||
|
@ -357,7 +355,7 @@ def generate_lifecycle_body(rules):
|
||||||
body += '<StorageClass>%s</StorageClass>' % \
|
body += '<StorageClass>%s</StorageClass>' % \
|
||||||
t['StorageClass']
|
t['StorageClass']
|
||||||
body += '</NoncurrentVersionTransition>'
|
body += '</NoncurrentVersionTransition>'
|
||||||
if 'AbortIncompleteMultipartUpload' in rule.keys():
|
if 'AbortIncompleteMultipartUpload' in list(rule.keys()):
|
||||||
body += '<AbortIncompleteMultipartUpload><DaysAfterInitiation>%d</DaysAfterInitiation>' \
|
body += '<AbortIncompleteMultipartUpload><DaysAfterInitiation>%d</DaysAfterInitiation>' \
|
||||||
'</AbortIncompleteMultipartUpload>' % rule['AbortIncompleteMultipartUpload']['DaysAfterInitiation']
|
'</AbortIncompleteMultipartUpload>' % rule['AbortIncompleteMultipartUpload']['DaysAfterInitiation']
|
||||||
body += '</Rule>'
|
body += '</Rule>'
|
||||||
|
@ -491,11 +489,11 @@ def generate_random(size, part_size=5*1024*1024):
|
||||||
chunk = 1024
|
chunk = 1024
|
||||||
allowed = string.ascii_letters
|
allowed = string.ascii_letters
|
||||||
for x in range(0, size, part_size):
|
for x in range(0, size, part_size):
|
||||||
strpart = ''.join([allowed[random.randint(0, len(allowed) - 1)] for _ in xrange(chunk)])
|
strpart = ''.join([allowed[random.randint(0, len(allowed) - 1)] for _ in range(chunk)])
|
||||||
s = ''
|
s = ''
|
||||||
left = size - x
|
left = size - x
|
||||||
this_part_size = min(left, part_size)
|
this_part_size = min(left, part_size)
|
||||||
for y in range(this_part_size / chunk):
|
for y in range(this_part_size // chunk):
|
||||||
s = s + strpart
|
s = s + strpart
|
||||||
if this_part_size > len(s):
|
if this_part_size > len(s):
|
||||||
s = s + strpart[0:this_part_size - len(s)]
|
s = s + strpart[0:this_part_size - len(s)]
|
||||||
|
@ -535,7 +533,7 @@ def _populate_key(bucket, keyname, size=7*1024*1024, storage_class=None):
|
||||||
key = bucket.new_key(keyname)
|
key = bucket.new_key(keyname)
|
||||||
if storage_class:
|
if storage_class:
|
||||||
key.storage_class = storage_class
|
key.storage_class = storage_class
|
||||||
data_str = str(generate_random(size, size).next())
|
data_str = str(next(generate_random(size, size)))
|
||||||
data = StringIO(data_str)
|
data = StringIO(data_str)
|
||||||
key.set_contents_from_file(fp=data)
|
key.set_contents_from_file(fp=data)
|
||||||
return (key, data_str)
|
return (key, data_str)
|
||||||
|
@ -754,7 +752,7 @@ class FakeFile(object):
|
||||||
"""
|
"""
|
||||||
def __init__(self, char='A', interrupt=None):
|
def __init__(self, char='A', interrupt=None):
|
||||||
self.offset = 0
|
self.offset = 0
|
||||||
self.char = char
|
self.char = bytes(char, 'utf-8')
|
||||||
self.interrupt = interrupt
|
self.interrupt = interrupt
|
||||||
|
|
||||||
def seek(self, offset, whence=os.SEEK_SET):
|
def seek(self, offset, whence=os.SEEK_SET):
|
||||||
|
@ -801,7 +799,7 @@ class FakeFileVerifier(object):
|
||||||
if self.char == None:
|
if self.char == None:
|
||||||
self.char = data[0]
|
self.char = data[0]
|
||||||
self.size += size
|
self.size += size
|
||||||
eq(data, self.char*size)
|
eq(data.decode(), self.char*size)
|
||||||
|
|
||||||
def _verify_atomic_key_data(key, size=-1, char=None):
|
def _verify_atomic_key_data(key, size=-1, char=None):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from __future__ import print_function
|
|
||||||
import sys
|
import sys
|
||||||
import collections
|
import collections
|
||||||
import nose
|
import nose
|
||||||
|
@ -8,7 +8,7 @@ from pprint import pprint
|
||||||
import time
|
import time
|
||||||
import boto.exception
|
import boto.exception
|
||||||
|
|
||||||
from urlparse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from nose.tools import eq_ as eq, ok_ as ok
|
from nose.tools import eq_ as eq, ok_ as ok
|
||||||
from nose.plugins.attrib import attr
|
from nose.plugins.attrib import attr
|
||||||
|
@ -110,7 +110,7 @@ def get_website_url(**kwargs):
|
||||||
|
|
||||||
def _test_website_populate_fragment(xml_fragment, fields):
|
def _test_website_populate_fragment(xml_fragment, fields):
|
||||||
for k in ['RoutingRules']:
|
for k in ['RoutingRules']:
|
||||||
if k in fields.keys() and len(fields[k]) > 0:
|
if k in list(fields.keys()) and len(fields[k]) > 0:
|
||||||
fields[k] = '<%s>%s</%s>' % (k, fields[k], k)
|
fields[k] = '<%s>%s</%s>' % (k, fields[k], k)
|
||||||
f = {
|
f = {
|
||||||
'IndexDocument_Suffix': choose_bucket_prefix(template='index-{random}.html', max_len=32),
|
'IndexDocument_Suffix': choose_bucket_prefix(template='index-{random}.html', max_len=32),
|
||||||
|
@ -185,7 +185,7 @@ def __website_expected_reponse_status(res, status, reason):
|
||||||
|
|
||||||
def _website_expected_default_html(**kwargs):
|
def _website_expected_default_html(**kwargs):
|
||||||
fields = []
|
fields = []
|
||||||
for k in kwargs.keys():
|
for k in list(kwargs.keys()):
|
||||||
# AmazonS3 seems to be inconsistent, some HTML errors include BucketName, but others do not.
|
# AmazonS3 seems to be inconsistent, some HTML errors include BucketName, but others do not.
|
||||||
if k is 'BucketName':
|
if k is 'BucketName':
|
||||||
continue
|
continue
|
||||||
|
@ -217,6 +217,7 @@ def _website_expected_error_response(res, bucket_name, status, reason, code, con
|
||||||
content = set([content])
|
content = set([content])
|
||||||
for f in content:
|
for f in content:
|
||||||
if f is not IGNORE_FIELD and f is not None:
|
if f is not IGNORE_FIELD and f is not None:
|
||||||
|
f = bytes(f, 'utf-8')
|
||||||
ok(f in body, 'HTML should contain "%s"' % (f, ))
|
ok(f in body, 'HTML should contain "%s"' % (f, ))
|
||||||
|
|
||||||
def _website_expected_redirect_response(res, status, reason, new_url):
|
def _website_expected_redirect_response(res, status, reason, new_url):
|
||||||
|
@ -237,7 +238,7 @@ def _website_request(bucket_name, path, connect_hostname=None, method='GET', tim
|
||||||
request_headers={}
|
request_headers={}
|
||||||
request_headers['Host'] = o.hostname
|
request_headers['Host'] = o.hostname
|
||||||
request_headers['Accept'] = '*/*'
|
request_headers['Accept'] = '*/*'
|
||||||
print('Request: {method} {path}\n{headers}'.format(method=method, path=path, headers=''.join(map(lambda t: t[0]+':'+t[1]+"\n", request_headers.items()))))
|
print('Request: {method} {path}\n{headers}'.format(method=method, path=path, headers=''.join([t[0]+':'+t[1]+"\n" for t in list(request_headers.items())])))
|
||||||
res = _make_raw_request(connect_hostname, config.main.port, method, path, request_headers=request_headers, secure=False, timeout=timeout)
|
res = _make_raw_request(connect_hostname, config.main.port, method, path, request_headers=request_headers, secure=False, timeout=timeout)
|
||||||
for (k,v) in res.getheaders():
|
for (k,v) in res.getheaders():
|
||||||
print(k,v)
|
print(k,v)
|
||||||
|
@ -293,6 +294,7 @@ def test_website_public_bucket_list_public_index():
|
||||||
res = _website_request(bucket.name, '')
|
res = _website_request(bucket.name, '')
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
|
indexstring = bytes(indexstring, 'utf-8')
|
||||||
eq(body, indexstring) # default content should match index.html set content
|
eq(body, indexstring) # default content should match index.html set content
|
||||||
__website_expected_reponse_status(res, 200, 'OK')
|
__website_expected_reponse_status(res, 200, 'OK')
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
|
@ -321,6 +323,7 @@ def test_website_private_bucket_list_public_index():
|
||||||
__website_expected_reponse_status(res, 200, 'OK')
|
__website_expected_reponse_status(res, 200, 'OK')
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
|
indexstring = bytes(indexstring, 'utf-8')
|
||||||
eq(body, indexstring, 'default content should match index.html set content')
|
eq(body, indexstring, 'default content should match index.html set content')
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
bucket.delete()
|
bucket.delete()
|
||||||
|
@ -511,6 +514,7 @@ def test_website_private_bucket_list_empty_blockederrordoc():
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
||||||
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should NOT match error.html set content')
|
ok(errorstring not in body, 'error content should NOT match error.html set content')
|
||||||
|
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
|
@ -537,6 +541,7 @@ def test_website_public_bucket_list_empty_blockederrordoc():
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'), body=body)
|
_website_expected_error_response(res, bucket.name, 404, 'Not Found', 'NoSuchKey', content=_website_expected_default_html(Code='NoSuchKey'), body=body)
|
||||||
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should match error.html set content')
|
ok(errorstring not in body, 'error content should match error.html set content')
|
||||||
|
|
||||||
errorhtml.delete()
|
errorhtml.delete()
|
||||||
|
@ -568,6 +573,7 @@ def test_website_public_bucket_list_private_index_blockederrordoc():
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
||||||
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should match error.html set content')
|
ok(errorstring not in body, 'error content should match error.html set content')
|
||||||
|
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
|
@ -600,6 +606,7 @@ def test_website_private_bucket_list_private_index_blockederrordoc():
|
||||||
body = res.read()
|
body = res.read()
|
||||||
print(body)
|
print(body)
|
||||||
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
_website_expected_error_response(res, bucket.name, 403, 'Forbidden', 'AccessDenied', content=_website_expected_default_html(Code='AccessDenied'), body=body)
|
||||||
|
errorstring = bytes(errorstring, 'utf-8')
|
||||||
ok(errorstring not in body, 'error content should match error.html set content')
|
ok(errorstring not in body, 'error content should match error.html set content')
|
||||||
|
|
||||||
indexhtml.delete()
|
indexhtml.delete()
|
||||||
|
@ -1013,7 +1020,7 @@ ROUTING_RULES = {
|
||||||
""",
|
""",
|
||||||
}
|
}
|
||||||
|
|
||||||
for k in ROUTING_RULES.keys():
|
for k in list(ROUTING_RULES.keys()):
|
||||||
if len(ROUTING_RULES[k]) > 0:
|
if len(ROUTING_RULES[k]) > 0:
|
||||||
ROUTING_RULES[k] = "<!-- %s -->\n%s" % (k, ROUTING_RULES[k])
|
ROUTING_RULES[k] = "<!-- %s -->\n%s" % (k, ROUTING_RULES[k])
|
||||||
|
|
||||||
|
@ -1142,7 +1149,7 @@ def routing_check(*args, **kwargs):
|
||||||
#body = res.read()
|
#body = res.read()
|
||||||
#print(body)
|
#print(body)
|
||||||
#eq(body, args['content'], 'default content should match index.html set content')
|
#eq(body, args['content'], 'default content should match index.html set content')
|
||||||
ok(res.getheader('Content-Length', -1) > 0)
|
ok(int(res.getheader('Content-Length', -1)) > 0)
|
||||||
elif args['code'] >= 300 and args['code'] < 400:
|
elif args['code'] >= 300 and args['code'] < 400:
|
||||||
_website_expected_redirect_response(res, args['code'], IGNORE_FIELD, new_url)
|
_website_expected_redirect_response(res, args['code'], IGNORE_FIELD, new_url)
|
||||||
elif args['code'] >= 400:
|
elif args['code'] >= 400:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from nose.tools import eq_ as eq
|
from nose.tools import eq_ as eq
|
||||||
|
|
||||||
import utils
|
from . import utils
|
||||||
|
|
||||||
def test_generate():
|
def test_generate():
|
||||||
FIVE_MB = 5 * 1024 * 1024
|
FIVE_MB = 5 * 1024 * 1024
|
||||||
|
|
|
@ -28,11 +28,11 @@ def generate_random(size, part_size=5*1024*1024):
|
||||||
chunk = 1024
|
chunk = 1024
|
||||||
allowed = string.ascii_letters
|
allowed = string.ascii_letters
|
||||||
for x in range(0, size, part_size):
|
for x in range(0, size, part_size):
|
||||||
strpart = ''.join([allowed[random.randint(0, len(allowed) - 1)] for _ in xrange(chunk)])
|
strpart = ''.join([allowed[random.randint(0, len(allowed) - 1)] for _ in range(chunk)])
|
||||||
s = ''
|
s = ''
|
||||||
left = size - x
|
left = size - x
|
||||||
this_part_size = min(left, part_size)
|
this_part_size = min(left, part_size)
|
||||||
for y in range(this_part_size / chunk):
|
for y in range(this_part_size // chunk):
|
||||||
s = s + strpart
|
s = s + strpart
|
||||||
s = s + strpart[:(this_part_size % chunk)]
|
s = s + strpart[:(this_part_size % chunk)]
|
||||||
yield s
|
yield s
|
||||||
|
@ -42,7 +42,7 @@ def generate_random(size, part_size=5*1024*1024):
|
||||||
# syncs all the regions except for the one passed in
|
# syncs all the regions except for the one passed in
|
||||||
def region_sync_meta(targets, region):
|
def region_sync_meta(targets, region):
|
||||||
|
|
||||||
for (k, r) in targets.iteritems():
|
for (k, r) in targets.items():
|
||||||
if r == region:
|
if r == region:
|
||||||
continue
|
continue
|
||||||
conf = r.conf
|
conf = r.conf
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from boto.s3.connection import S3Connection
|
from boto.s3.connection import S3Connection
|
||||||
from boto.exception import BotoServerError
|
from boto.exception import BotoServerError
|
||||||
from boto.s3.key import Key
|
from boto.s3.key import Key
|
||||||
from httplib import BadStatusLine
|
from http.client import BadStatusLine
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from .. import common
|
from .. import common
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ def descend_graph(decision_graph, node_name, prng):
|
||||||
except IndexError:
|
except IndexError:
|
||||||
decision = {}
|
decision = {}
|
||||||
|
|
||||||
for key, choices in node['set'].iteritems():
|
for key, choices in node['set'].items():
|
||||||
if key in decision:
|
if key in decision:
|
||||||
raise DecisionGraphError("Node %s tried to set '%s', but that key was already set by a lower node!" %(node_name, key))
|
raise DecisionGraphError("Node %s tried to set '%s', but that key was already set by a lower node!" %(node_name, key))
|
||||||
decision[key] = make_choice(choices, prng)
|
decision[key] = make_choice(choices, prng)
|
||||||
|
@ -85,7 +85,7 @@ def descend_graph(decision_graph, node_name, prng):
|
||||||
num_reps = prng.randint(size_min, size_max)
|
num_reps = prng.randint(size_min, size_max)
|
||||||
if header in [h for h, v in decision['headers']]:
|
if header in [h for h, v in decision['headers']]:
|
||||||
raise DecisionGraphError("Node %s tried to add header '%s', but that header already exists!" %(node_name, header))
|
raise DecisionGraphError("Node %s tried to add header '%s', but that header already exists!" %(node_name, header))
|
||||||
for _ in xrange(num_reps):
|
for _ in range(num_reps):
|
||||||
decision['headers'].append([header, value])
|
decision['headers'].append([header, value])
|
||||||
|
|
||||||
return decision
|
return decision
|
||||||
|
@ -113,7 +113,7 @@ def make_choice(choices, prng):
|
||||||
if value == 'null' or value == 'None':
|
if value == 'null' or value == 'None':
|
||||||
value = ''
|
value = ''
|
||||||
|
|
||||||
for _ in xrange(weight):
|
for _ in range(weight):
|
||||||
weighted_choices.append(value)
|
weighted_choices.append(value)
|
||||||
|
|
||||||
return prng.choice(weighted_choices)
|
return prng.choice(weighted_choices)
|
||||||
|
@ -137,7 +137,8 @@ def expand(decision, value, prng):
|
||||||
|
|
||||||
class RepeatExpandingFormatter(string.Formatter):
|
class RepeatExpandingFormatter(string.Formatter):
|
||||||
charsets = {
|
charsets = {
|
||||||
'printable_no_whitespace': string.printable.translate(None, string.whitespace),
|
'printable_no_whitespace': string.printable.translate(
|
||||||
|
"".maketrans('', '', string.whitespace)),
|
||||||
'printable': string.printable,
|
'printable': string.printable,
|
||||||
'punctuation': string.punctuation,
|
'punctuation': string.punctuation,
|
||||||
'whitespace': string.whitespace,
|
'whitespace': string.whitespace,
|
||||||
|
@ -188,14 +189,15 @@ class RepeatExpandingFormatter(string.Formatter):
|
||||||
|
|
||||||
if charset_arg == 'binary' or charset_arg == 'binary_no_whitespace':
|
if charset_arg == 'binary' or charset_arg == 'binary_no_whitespace':
|
||||||
num_bytes = length + 8
|
num_bytes = length + 8
|
||||||
tmplist = [self.prng.getrandbits(64) for _ in xrange(num_bytes / 8)]
|
tmplist = [self.prng.getrandbits(64) for _ in range(num_bytes // 8)]
|
||||||
tmpstring = struct.pack((num_bytes / 8) * 'Q', *tmplist)
|
tmpstring = struct.pack((num_bytes // 8) * 'Q', *tmplist)
|
||||||
if charset_arg == 'binary_no_whitespace':
|
if charset_arg == 'binary_no_whitespace':
|
||||||
tmpstring = ''.join(c for c in tmpstring if c not in string.whitespace)
|
tmpstring = b''.join([c] for c in tmpstring if c not in bytes(
|
||||||
|
string.whitespace, 'utf-8'))
|
||||||
return tmpstring[0:length]
|
return tmpstring[0:length]
|
||||||
else:
|
else:
|
||||||
charset = self.charsets[charset_arg]
|
charset = self.charsets[charset_arg]
|
||||||
return ''.join([self.prng.choice(charset) for _ in xrange(length)]) # Won't scale nicely
|
return ''.join([self.prng.choice(charset) for _ in range(length)]) # Won't scale nicely
|
||||||
|
|
||||||
|
|
||||||
def parse_options():
|
def parse_options():
|
||||||
|
@ -281,29 +283,29 @@ def _main():
|
||||||
if options.seedfile:
|
if options.seedfile:
|
||||||
FH = open(options.seedfile, 'r')
|
FH = open(options.seedfile, 'r')
|
||||||
request_seeds = [int(line) for line in FH if line != '\n']
|
request_seeds = [int(line) for line in FH if line != '\n']
|
||||||
print>>OUT, 'Seedfile: %s' %options.seedfile
|
print('Seedfile: %s' %options.seedfile, file=OUT)
|
||||||
print>>OUT, 'Number of requests: %d' %len(request_seeds)
|
print('Number of requests: %d' %len(request_seeds), file=OUT)
|
||||||
else:
|
else:
|
||||||
if options.seed:
|
if options.seed:
|
||||||
print>>OUT, 'Initial Seed: %d' %options.seed
|
print('Initial Seed: %d' %options.seed, file=OUT)
|
||||||
print>>OUT, 'Number of requests: %d' %options.num_requests
|
print('Number of requests: %d' %options.num_requests, file=OUT)
|
||||||
random_list = randomlist(options.seed)
|
random_list = randomlist(options.seed)
|
||||||
request_seeds = itertools.islice(random_list, options.num_requests)
|
request_seeds = itertools.islice(random_list, options.num_requests)
|
||||||
|
|
||||||
print>>OUT, 'Decision Graph: %s' %options.graph_filename
|
print('Decision Graph: %s' %options.graph_filename, file=OUT)
|
||||||
|
|
||||||
graph_file = open(options.graph_filename, 'r')
|
graph_file = open(options.graph_filename, 'r')
|
||||||
decision_graph = yaml.safe_load(graph_file)
|
decision_graph = yaml.safe_load(graph_file)
|
||||||
|
|
||||||
constants = populate_buckets(s3_connection, alt_connection)
|
constants = populate_buckets(s3_connection, alt_connection)
|
||||||
print>>VERBOSE, "Test Buckets/Objects:"
|
print("Test Buckets/Objects:", file=VERBOSE)
|
||||||
for key, value in constants.iteritems():
|
for key, value in constants.items():
|
||||||
print>>VERBOSE, "\t%s: %s" %(key, value)
|
print("\t%s: %s" %(key, value), file=VERBOSE)
|
||||||
|
|
||||||
print>>OUT, "Begin Fuzzing..."
|
print("Begin Fuzzing...", file=OUT)
|
||||||
print>>VERBOSE, '='*80
|
print('='*80, file=VERBOSE)
|
||||||
for request_seed in request_seeds:
|
for request_seed in request_seeds:
|
||||||
print>>VERBOSE, 'Seed is: %r' %request_seed
|
print('Seed is: %r' %request_seed, file=VERBOSE)
|
||||||
prng = random.Random(request_seed)
|
prng = random.Random(request_seed)
|
||||||
decision = assemble_decision(decision_graph, prng)
|
decision = assemble_decision(decision_graph, prng)
|
||||||
decision.update(constants)
|
decision.update(constants)
|
||||||
|
@ -321,46 +323,46 @@ def _main():
|
||||||
except KeyError:
|
except KeyError:
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
print>>VERBOSE, "%r %r" %(method[:100], path[:100])
|
print("%r %r" %(method[:100], path[:100]), file=VERBOSE)
|
||||||
for h, v in headers.iteritems():
|
for h, v in headers.items():
|
||||||
print>>VERBOSE, "%r: %r" %(h[:50], v[:50])
|
print("%r: %r" %(h[:50], v[:50]), file=VERBOSE)
|
||||||
print>>VERBOSE, "%r\n" % body[:100]
|
print("%r\n" % body[:100], file=VERBOSE)
|
||||||
|
|
||||||
print>>DEBUG, 'FULL REQUEST'
|
print('FULL REQUEST', file=DEBUG)
|
||||||
print>>DEBUG, 'Method: %r' %method
|
print('Method: %r' %method, file=DEBUG)
|
||||||
print>>DEBUG, 'Path: %r' %path
|
print('Path: %r' %path, file=DEBUG)
|
||||||
print>>DEBUG, 'Headers:'
|
print('Headers:', file=DEBUG)
|
||||||
for h, v in headers.iteritems():
|
for h, v in headers.items():
|
||||||
print>>DEBUG, "\t%r: %r" %(h, v)
|
print("\t%r: %r" %(h, v), file=DEBUG)
|
||||||
print>>DEBUG, 'Body: %r\n' %body
|
print('Body: %r\n' %body, file=DEBUG)
|
||||||
|
|
||||||
failed = False # Let's be optimistic, shall we?
|
failed = False # Let's be optimistic, shall we?
|
||||||
try:
|
try:
|
||||||
response = s3_connection.make_request(method, path, data=body, headers=headers, override_num_retries=1)
|
response = s3_connection.make_request(method, path, data=body, headers=headers, override_num_retries=1)
|
||||||
body = response.read()
|
body = response.read()
|
||||||
except BotoServerError, e:
|
except BotoServerError as e:
|
||||||
response = e
|
response = e
|
||||||
body = e.body
|
body = e.body
|
||||||
failed = True
|
failed = True
|
||||||
except BadStatusLine, e:
|
except BadStatusLine as e:
|
||||||
print>>OUT, 'FAILED: failed to parse response (BadStatusLine); probably a NUL byte in your request?'
|
print('FAILED: failed to parse response (BadStatusLine); probably a NUL byte in your request?', file=OUT)
|
||||||
print>>VERBOSE, '='*80
|
print('='*80, file=VERBOSE)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
print>>OUT, 'FAILED:'
|
print('FAILED:', file=OUT)
|
||||||
OLD_VERBOSE = VERBOSE
|
OLD_VERBOSE = VERBOSE
|
||||||
OLD_DEBUG = DEBUG
|
OLD_DEBUG = DEBUG
|
||||||
VERBOSE = DEBUG = OUT
|
VERBOSE = DEBUG = OUT
|
||||||
print>>VERBOSE, 'Seed was: %r' %request_seed
|
print('Seed was: %r' %request_seed, file=VERBOSE)
|
||||||
print>>VERBOSE, 'Response status code: %d %s' %(response.status, response.reason)
|
print('Response status code: %d %s' %(response.status, response.reason), file=VERBOSE)
|
||||||
print>>DEBUG, 'Body:\n%s' %body
|
print('Body:\n%s' %body, file=DEBUG)
|
||||||
print>>VERBOSE, '='*80
|
print('='*80, file=VERBOSE)
|
||||||
if failed:
|
if failed:
|
||||||
VERBOSE = OLD_VERBOSE
|
VERBOSE = OLD_VERBOSE
|
||||||
DEBUG = OLD_DEBUG
|
DEBUG = OLD_DEBUG
|
||||||
|
|
||||||
print>>OUT, '...done fuzzing'
|
print('...done fuzzing', file=OUT)
|
||||||
|
|
||||||
if options.cleanup:
|
if options.cleanup:
|
||||||
common.teardown()
|
common.teardown()
|
||||||
|
|
|
@ -25,6 +25,7 @@ from nose.tools import assert_true
|
||||||
from nose.plugins.attrib import attr
|
from nose.plugins.attrib import attr
|
||||||
|
|
||||||
from ...functional.utils import assert_raises
|
from ...functional.utils import assert_raises
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
_decision_graph = {}
|
_decision_graph = {}
|
||||||
|
|
||||||
|
@ -173,21 +174,21 @@ def test_expand_random_binary():
|
||||||
|
|
||||||
def test_expand_random_printable_no_whitespace():
|
def test_expand_random_printable_no_whitespace():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 500 printable_no_whitespace}', prng)
|
got = expand({}, '{random 500 printable_no_whitespace}', prng)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace and x in string.printable for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace and x in string.printable for x in got]))
|
||||||
|
|
||||||
|
|
||||||
def test_expand_random_binary_no_whitespace():
|
def test_expand_random_binary_no_whitespace():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 500 binary_no_whitespace}', prng)
|
got = expand({}, '{random 500 binary_no_whitespace}', prng)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace for x in got]))
|
||||||
|
|
||||||
|
|
||||||
def test_expand_random_no_args():
|
def test_expand_random_no_args():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random}', prng)
|
got = expand({}, '{random}', prng)
|
||||||
assert_true(0 <= len(got) <= 1000)
|
assert_true(0 <= len(got) <= 1000)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
||||||
|
@ -195,7 +196,7 @@ def test_expand_random_no_args():
|
||||||
|
|
||||||
def test_expand_random_no_charset():
|
def test_expand_random_no_charset():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 10-30}', prng)
|
got = expand({}, '{random 10-30}', prng)
|
||||||
assert_true(10 <= len(got) <= 30)
|
assert_true(10 <= len(got) <= 30)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
||||||
|
@ -203,7 +204,7 @@ def test_expand_random_no_charset():
|
||||||
|
|
||||||
def test_expand_random_exact_length():
|
def test_expand_random_exact_length():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 10 digits}', prng)
|
got = expand({}, '{random 10 digits}', prng)
|
||||||
assert_true(len(got) == 10)
|
assert_true(len(got) == 10)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x in string.digits for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x in string.digits for x in got]))
|
||||||
|
@ -300,9 +301,9 @@ def test_weighted_choices():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
|
|
||||||
choices_made = {}
|
choices_made = {}
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
choice = make_choice(graph['weighted_node']['choices'], prng)
|
choice = make_choice(graph['weighted_node']['choices'], prng)
|
||||||
if choices_made.has_key(choice):
|
if choice in choices_made:
|
||||||
choices_made[choice] += 1
|
choices_made[choice] += 1
|
||||||
else:
|
else:
|
||||||
choices_made[choice] = 1
|
choices_made[choice] = 1
|
||||||
|
@ -344,9 +345,9 @@ def test_weighted_set():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
|
|
||||||
choices_made = {}
|
choices_made = {}
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
choice = make_choice(graph['weighted_node']['set']['k1'], prng)
|
choice = make_choice(graph['weighted_node']['set']['k1'], prng)
|
||||||
if choices_made.has_key(choice):
|
if choice in choices_made:
|
||||||
choices_made[choice] += 1
|
choices_made[choice] += 1
|
||||||
else:
|
else:
|
||||||
choices_made[choice] = 1
|
choices_made[choice] = 1
|
||||||
|
@ -392,7 +393,7 @@ def test_expand_headers():
|
||||||
decision = descend_graph(graph, 'node1', prng)
|
decision = descend_graph(graph, 'node1', prng)
|
||||||
expanded_headers = expand_headers(decision, prng)
|
expanded_headers = expand_headers(decision, prng)
|
||||||
|
|
||||||
for header, value in expanded_headers.iteritems():
|
for header, value in expanded_headers.items():
|
||||||
if header == 'my-header':
|
if header == 'my-header':
|
||||||
assert_true(value in ['h1', 'h2', 'h3'])
|
assert_true(value in ['h1', 'h2', 'h3'])
|
||||||
elif header.startswith('random-header-'):
|
elif header.startswith('random-header-'):
|
||||||
|
|
|
@ -27,7 +27,7 @@ def get_random_files(quantity, mean, stddev, seed):
|
||||||
list of file handles
|
list of file handles
|
||||||
"""
|
"""
|
||||||
file_generator = realistic.files(mean, stddev, seed)
|
file_generator = realistic.files(mean, stddev, seed)
|
||||||
return [file_generator.next() for _ in xrange(quantity)]
|
return [next(file_generator) for _ in range(quantity)]
|
||||||
|
|
||||||
|
|
||||||
def upload_objects(bucket, files, seed):
|
def upload_objects(bucket, files, seed):
|
||||||
|
@ -43,9 +43,9 @@ def upload_objects(bucket, files, seed):
|
||||||
name_generator = realistic.names(15, 4, seed=seed)
|
name_generator = realistic.names(15, 4, seed=seed)
|
||||||
|
|
||||||
for fp in files:
|
for fp in files:
|
||||||
print >> sys.stderr, 'sending file with size %dB' % fp.size
|
print('sending file with size %dB' % fp.size, file=sys.stderr)
|
||||||
key = Key(bucket)
|
key = Key(bucket)
|
||||||
key.key = name_generator.next()
|
key.key = next(name_generator)
|
||||||
key.set_contents_from_file(fp, rewind=True)
|
key.set_contents_from_file(fp, rewind=True)
|
||||||
key.set_acl('public-read')
|
key.set_acl('public-read')
|
||||||
keys.append(key)
|
keys.append(key)
|
||||||
|
@ -94,18 +94,18 @@ def _main():
|
||||||
|
|
||||||
bucket.set_acl('public-read')
|
bucket.set_acl('public-read')
|
||||||
keys = []
|
keys = []
|
||||||
print >> OUTFILE, 'bucket: %s' % bucket.name
|
print('bucket: %s' % bucket.name, file=OUTFILE)
|
||||||
print >> sys.stderr, 'setup complete, generating files'
|
print('setup complete, generating files', file=sys.stderr)
|
||||||
for profile in common.config.file_generation.groups:
|
for profile in common.config.file_generation.groups:
|
||||||
seed = random.random()
|
seed = random.random()
|
||||||
files = get_random_files(profile[0], profile[1], profile[2], seed)
|
files = get_random_files(profile[0], profile[1], profile[2], seed)
|
||||||
keys += upload_objects(bucket, files, seed)
|
keys += upload_objects(bucket, files, seed)
|
||||||
|
|
||||||
print >> sys.stderr, 'finished sending files. generating urls'
|
print('finished sending files. generating urls', file=sys.stderr)
|
||||||
for key in keys:
|
for key in keys:
|
||||||
print >> OUTFILE, key.generate_url(0, query_auth=False)
|
print(key.generate_url(0, query_auth=False), file=OUTFILE)
|
||||||
|
|
||||||
print >> sys.stderr, 'done'
|
print('done', file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -11,8 +11,8 @@ import traceback
|
||||||
import random
|
import random
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
import realistic
|
from . import realistic
|
||||||
import common
|
from . import common
|
||||||
|
|
||||||
NANOSECOND = int(1e9)
|
NANOSECOND = int(1e9)
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
|
||||||
traceback=traceback.format_exc(),
|
traceback=traceback.format_exc(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
print "ERROR:", m
|
print("ERROR:", m)
|
||||||
else:
|
else:
|
||||||
elapsed = end - start
|
elapsed = end - start
|
||||||
result.update(
|
result.update(
|
||||||
|
@ -158,16 +158,16 @@ def main():
|
||||||
for name in ['names', 'contents', 'writer', 'reader']:
|
for name in ['names', 'contents', 'writer', 'reader']:
|
||||||
seeds.setdefault(name, rand.randrange(2**32))
|
seeds.setdefault(name, rand.randrange(2**32))
|
||||||
|
|
||||||
print 'Using random seeds: {seeds}'.format(seeds=seeds)
|
print('Using random seeds: {seeds}'.format(seeds=seeds))
|
||||||
|
|
||||||
# setup bucket and other objects
|
# setup bucket and other objects
|
||||||
bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
|
bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
|
||||||
bucket = conn.create_bucket(bucket_name)
|
bucket = conn.create_bucket(bucket_name)
|
||||||
print "Created bucket: {name}".format(name=bucket.name)
|
print("Created bucket: {name}".format(name=bucket.name))
|
||||||
|
|
||||||
# check flag for deterministic file name creation
|
# check flag for deterministic file name creation
|
||||||
if not config.readwrite.get('deterministic_file_names'):
|
if not config.readwrite.get('deterministic_file_names'):
|
||||||
print 'Creating random file names'
|
print('Creating random file names')
|
||||||
file_names = realistic.names(
|
file_names = realistic.names(
|
||||||
mean=15,
|
mean=15,
|
||||||
stddev=4,
|
stddev=4,
|
||||||
|
@ -176,9 +176,9 @@ def main():
|
||||||
file_names = itertools.islice(file_names, config.readwrite.files.num)
|
file_names = itertools.islice(file_names, config.readwrite.files.num)
|
||||||
file_names = list(file_names)
|
file_names = list(file_names)
|
||||||
else:
|
else:
|
||||||
print 'Creating file names that are deterministic'
|
print('Creating file names that are deterministic')
|
||||||
file_names = []
|
file_names = []
|
||||||
for x in xrange(config.readwrite.files.num):
|
for x in range(config.readwrite.files.num):
|
||||||
file_names.append('test_file_{num}'.format(num=x))
|
file_names.append('test_file_{num}'.format(num=x))
|
||||||
|
|
||||||
files = realistic.files2(
|
files = realistic.files2(
|
||||||
|
@ -191,7 +191,7 @@ def main():
|
||||||
|
|
||||||
# warmup - get initial set of files uploaded if there are any writers specified
|
# warmup - get initial set of files uploaded if there are any writers specified
|
||||||
if config.readwrite.writers > 0:
|
if config.readwrite.writers > 0:
|
||||||
print "Uploading initial set of {num} files".format(num=config.readwrite.files.num)
|
print("Uploading initial set of {num} files".format(num=config.readwrite.files.num))
|
||||||
warmup_pool = gevent.pool.Pool(size=100)
|
warmup_pool = gevent.pool.Pool(size=100)
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
fp = next(files)
|
fp = next(files)
|
||||||
|
@ -204,15 +204,15 @@ def main():
|
||||||
warmup_pool.join()
|
warmup_pool.join()
|
||||||
|
|
||||||
# main work
|
# main work
|
||||||
print "Starting main worker loop."
|
print("Starting main worker loop.")
|
||||||
print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev)
|
print("Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev))
|
||||||
print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers)
|
print("Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers))
|
||||||
group = gevent.pool.Group()
|
group = gevent.pool.Group()
|
||||||
rand_writer = random.Random(seeds['writer'])
|
rand_writer = random.Random(seeds['writer'])
|
||||||
|
|
||||||
# Don't create random files if deterministic_files_names is set and true
|
# Don't create random files if deterministic_files_names is set and true
|
||||||
if not config.readwrite.get('deterministic_file_names'):
|
if not config.readwrite.get('deterministic_file_names'):
|
||||||
for x in xrange(config.readwrite.writers):
|
for x in range(config.readwrite.writers):
|
||||||
this_rand = random.Random(rand_writer.randrange(2**32))
|
this_rand = random.Random(rand_writer.randrange(2**32))
|
||||||
group.spawn(
|
group.spawn(
|
||||||
writer,
|
writer,
|
||||||
|
@ -229,7 +229,7 @@ def main():
|
||||||
# this loop needs no additional qualifiers. If zero readers are specified,
|
# this loop needs no additional qualifiers. If zero readers are specified,
|
||||||
# it will behave as expected (no data is read)
|
# it will behave as expected (no data is read)
|
||||||
rand_reader = random.Random(seeds['reader'])
|
rand_reader = random.Random(seeds['reader'])
|
||||||
for x in xrange(config.readwrite.readers):
|
for x in range(config.readwrite.readers):
|
||||||
this_rand = random.Random(rand_reader.randrange(2**32))
|
this_rand = random.Random(rand_reader.randrange(2**32))
|
||||||
group.spawn(
|
group.spawn(
|
||||||
reader,
|
reader,
|
||||||
|
@ -246,7 +246,7 @@ def main():
|
||||||
|
|
||||||
# wait for all the tests to finish
|
# wait for all the tests to finish
|
||||||
group.join()
|
group.join()
|
||||||
print 'post-join, queue size {size}'.format(size=q.qsize())
|
print('post-join, queue size {size}'.format(size=q.qsize()))
|
||||||
|
|
||||||
if q.qsize() > 0:
|
if q.qsize() > 0:
|
||||||
for temp_dict in q:
|
for temp_dict in q:
|
||||||
|
|
|
@ -47,9 +47,9 @@ class FileValidator(object):
|
||||||
self.original_hash, binary = contents[-40:], contents[:-40]
|
self.original_hash, binary = contents[-40:], contents[:-40]
|
||||||
self.new_hash = hashlib.sha1(binary).hexdigest()
|
self.new_hash = hashlib.sha1(binary).hexdigest()
|
||||||
if not self.new_hash == self.original_hash:
|
if not self.new_hash == self.original_hash:
|
||||||
print 'original hash: ', self.original_hash
|
print('original hash: ', self.original_hash)
|
||||||
print 'new hash: ', self.new_hash
|
print('new hash: ', self.new_hash)
|
||||||
print 'size: ', self._file.tell()
|
print('size: ', self._file.tell())
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ class RandomContentFile(object):
|
||||||
size = min(self.size, 1*1024*1024) # generate at most 1 MB at a time
|
size = min(self.size, 1*1024*1024) # generate at most 1 MB at a time
|
||||||
chunks = int(math.ceil(size/8.0)) # number of 8-byte chunks to create
|
chunks = int(math.ceil(size/8.0)) # number of 8-byte chunks to create
|
||||||
|
|
||||||
l = [self.random.getrandbits(64) for _ in xrange(chunks)]
|
l = [self.random.getrandbits(64) for _ in range(chunks)]
|
||||||
s = struct.pack(chunks*'Q', *l)
|
s = struct.pack(chunks*'Q', *l)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
@ -252,7 +252,7 @@ def files2(mean, stddev, seed=None, numfiles=10):
|
||||||
"""
|
"""
|
||||||
# pre-compute all the files (and save with TemporaryFiles)
|
# pre-compute all the files (and save with TemporaryFiles)
|
||||||
fs = []
|
fs = []
|
||||||
for _ in xrange(numfiles):
|
for _ in range(numfiles):
|
||||||
t = tempfile.SpooledTemporaryFile()
|
t = tempfile.SpooledTemporaryFile()
|
||||||
t.write(generate_file_contents(random.normalvariate(mean, stddev)))
|
t.write(generate_file_contents(random.normalvariate(mean, stddev)))
|
||||||
t.seek(0)
|
t.seek(0)
|
||||||
|
@ -277,5 +277,5 @@ def names(mean, stddev, charset=None, seed=None):
|
||||||
length = int(rand.normalvariate(mean, stddev))
|
length = int(rand.normalvariate(mean, stddev))
|
||||||
if length > 0:
|
if length > 0:
|
||||||
break
|
break
|
||||||
name = ''.join(rand.choice(charset) for _ in xrange(length))
|
name = ''.join(rand.choice(charset) for _ in range(length))
|
||||||
yield name
|
yield name
|
||||||
|
|
|
@ -11,8 +11,8 @@ import traceback
|
||||||
import random
|
import random
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
import realistic
|
from . import realistic
|
||||||
import common
|
from . import common
|
||||||
|
|
||||||
NANOSECOND = int(1e9)
|
NANOSECOND = int(1e9)
|
||||||
|
|
||||||
|
@ -141,12 +141,12 @@ def main():
|
||||||
for name in ['names', 'contents', 'writer', 'reader']:
|
for name in ['names', 'contents', 'writer', 'reader']:
|
||||||
seeds.setdefault(name, rand.randrange(2**32))
|
seeds.setdefault(name, rand.randrange(2**32))
|
||||||
|
|
||||||
print 'Using random seeds: {seeds}'.format(seeds=seeds)
|
print('Using random seeds: {seeds}'.format(seeds=seeds))
|
||||||
|
|
||||||
# setup bucket and other objects
|
# setup bucket and other objects
|
||||||
bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
|
bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
|
||||||
bucket = conn.create_bucket(bucket_name)
|
bucket = conn.create_bucket(bucket_name)
|
||||||
print "Created bucket: {name}".format(name=bucket.name)
|
print("Created bucket: {name}".format(name=bucket.name))
|
||||||
objnames = realistic.names(
|
objnames = realistic.names(
|
||||||
mean=15,
|
mean=15,
|
||||||
stddev=4,
|
stddev=4,
|
||||||
|
@ -163,10 +163,10 @@ def main():
|
||||||
|
|
||||||
logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)
|
logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)
|
||||||
|
|
||||||
print "Writing {num} objects with {w} workers...".format(
|
print("Writing {num} objects with {w} workers...".format(
|
||||||
num=config.roundtrip.files.num,
|
num=config.roundtrip.files.num,
|
||||||
w=config.roundtrip.writers,
|
w=config.roundtrip.writers,
|
||||||
)
|
))
|
||||||
pool = gevent.pool.Pool(size=config.roundtrip.writers)
|
pool = gevent.pool.Pool(size=config.roundtrip.writers)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for objname in objnames:
|
for objname in objnames:
|
||||||
|
@ -186,10 +186,10 @@ def main():
|
||||||
duration=int(round(elapsed * NANOSECOND)),
|
duration=int(round(elapsed * NANOSECOND)),
|
||||||
))
|
))
|
||||||
|
|
||||||
print "Reading {num} objects with {w} workers...".format(
|
print("Reading {num} objects with {w} workers...".format(
|
||||||
num=config.roundtrip.files.num,
|
num=config.roundtrip.files.num,
|
||||||
w=config.roundtrip.readers,
|
w=config.roundtrip.readers,
|
||||||
)
|
))
|
||||||
# avoid accessing them in the same order as the writing
|
# avoid accessing them in the same order as the writing
|
||||||
rand.shuffle(objnames)
|
rand.shuffle(objnames)
|
||||||
pool = gevent.pool.Pool(size=config.roundtrip.readers)
|
pool = gevent.pool.Pool(size=config.roundtrip.readers)
|
||||||
|
|
|
@ -57,7 +57,7 @@ def main():
|
||||||
|
|
||||||
def calculate_stats(options, total, durations, min_time, max_time, errors,
|
def calculate_stats(options, total, durations, min_time, max_time, errors,
|
||||||
success):
|
success):
|
||||||
print 'Calculating statistics...'
|
print('Calculating statistics...')
|
||||||
|
|
||||||
f = sys.stdin
|
f = sys.stdin
|
||||||
if options.input:
|
if options.input:
|
||||||
|
@ -81,13 +81,13 @@ def calculate_stats(options, total, durations, min_time, max_time, errors,
|
||||||
end = start + duration / float(NANOSECONDS)
|
end = start + duration / float(NANOSECONDS)
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
print "[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \
|
print("[{type}] POSIX time: {start:>18.2f} - {end:<18.2f} " \
|
||||||
"{data:>11.2f} KB".format(
|
"{data:>11.2f} KB".format(
|
||||||
type=type_,
|
type=type_,
|
||||||
start=start,
|
start=start,
|
||||||
end=end,
|
end=end,
|
||||||
data=data_size / 1024.0, # convert to KB
|
data=data_size / 1024.0, # convert to KB
|
||||||
)
|
))
|
||||||
|
|
||||||
# update time boundaries
|
# update time boundaries
|
||||||
prev = min_time.setdefault(type_, start)
|
prev = min_time.setdefault(type_, start)
|
||||||
|
@ -106,7 +106,7 @@ def calculate_stats(options, total, durations, min_time, max_time, errors,
|
||||||
total[type_] = total.get(type_, 0) + data_size
|
total[type_] = total.get(type_, 0) + data_size
|
||||||
|
|
||||||
def print_results(total, durations, min_time, max_time, errors, success):
|
def print_results(total, durations, min_time, max_time, errors, success):
|
||||||
for type_ in total.keys():
|
for type_ in list(total.keys()):
|
||||||
trans_success = success.get(type_, 0)
|
trans_success = success.get(type_, 0)
|
||||||
trans_fail = errors.get(type_, 0)
|
trans_fail = errors.get(type_, 0)
|
||||||
trans = trans_success + trans_fail
|
trans = trans_success + trans_fail
|
||||||
|
@ -121,7 +121,7 @@ def print_results(total, durations, min_time, max_time, errors, success):
|
||||||
trans_long = max(durations[type_]) / float(NANOSECONDS)
|
trans_long = max(durations[type_]) / float(NANOSECONDS)
|
||||||
trans_short = min(durations[type_]) / float(NANOSECONDS)
|
trans_short = min(durations[type_]) / float(NANOSECONDS)
|
||||||
|
|
||||||
print OUTPUT_FORMAT.format(
|
print(OUTPUT_FORMAT.format(
|
||||||
type=type_,
|
type=type_,
|
||||||
trans_success=trans_success,
|
trans_success=trans_success,
|
||||||
trans_fail=trans_fail,
|
trans_fail=trans_fail,
|
||||||
|
@ -135,7 +135,7 @@ def print_results(total, durations, min_time, max_time, errors, success):
|
||||||
conc=conc,
|
conc=conc,
|
||||||
trans_long=trans_long,
|
trans_long=trans_long,
|
||||||
trans_short=trans_short,
|
trans_short=trans_short,
|
||||||
)
|
))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import boto.s3.connection
|
import boto.s3.connection
|
||||||
import bunch
|
import munch
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
|
@ -11,8 +11,8 @@ from lxml import etree
|
||||||
from doctest import Example
|
from doctest import Example
|
||||||
from lxml.doctestcompare import LXMLOutputChecker
|
from lxml.doctestcompare import LXMLOutputChecker
|
||||||
|
|
||||||
s3 = bunch.Bunch()
|
s3 = munch.Munch()
|
||||||
config = bunch.Bunch()
|
config = munch.Munch()
|
||||||
prefix = ''
|
prefix = ''
|
||||||
|
|
||||||
bucket_counter = itertools.count(1)
|
bucket_counter = itertools.count(1)
|
||||||
|
@ -51,10 +51,10 @@ def nuke_bucket(bucket):
|
||||||
while deleted_cnt:
|
while deleted_cnt:
|
||||||
deleted_cnt = 0
|
deleted_cnt = 0
|
||||||
for key in bucket.list():
|
for key in bucket.list():
|
||||||
print 'Cleaning bucket {bucket} key {key}'.format(
|
print('Cleaning bucket {bucket} key {key}'.format(
|
||||||
bucket=bucket,
|
bucket=bucket,
|
||||||
key=key,
|
key=key,
|
||||||
)
|
))
|
||||||
key.set_canned_acl('private')
|
key.set_canned_acl('private')
|
||||||
key.delete()
|
key.delete()
|
||||||
deleted_cnt += 1
|
deleted_cnt += 1
|
||||||
|
@ -67,26 +67,26 @@ def nuke_bucket(bucket):
|
||||||
and e.body == ''):
|
and e.body == ''):
|
||||||
e.error_code = 'AccessDenied'
|
e.error_code = 'AccessDenied'
|
||||||
if e.error_code != 'AccessDenied':
|
if e.error_code != 'AccessDenied':
|
||||||
print 'GOT UNWANTED ERROR', e.error_code
|
print('GOT UNWANTED ERROR', e.error_code)
|
||||||
raise
|
raise
|
||||||
# seems like we're not the owner of the bucket; ignore
|
# seems like we're not the owner of the bucket; ignore
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def nuke_prefixed_buckets():
|
def nuke_prefixed_buckets():
|
||||||
for name, conn in s3.items():
|
for name, conn in list(s3.items()):
|
||||||
print 'Cleaning buckets from connection {name}'.format(name=name)
|
print('Cleaning buckets from connection {name}'.format(name=name))
|
||||||
for bucket in conn.get_all_buckets():
|
for bucket in conn.get_all_buckets():
|
||||||
if bucket.name.startswith(prefix):
|
if bucket.name.startswith(prefix):
|
||||||
print 'Cleaning bucket {bucket}'.format(bucket=bucket)
|
print('Cleaning bucket {bucket}'.format(bucket=bucket))
|
||||||
nuke_bucket(bucket)
|
nuke_bucket(bucket)
|
||||||
|
|
||||||
print 'Done with cleanup of test buckets.'
|
print('Done with cleanup of test buckets.')
|
||||||
|
|
||||||
def read_config(fp):
|
def read_config(fp):
|
||||||
config = bunch.Bunch()
|
config = munch.Munch()
|
||||||
g = yaml.safe_load_all(fp)
|
g = yaml.safe_load_all(fp)
|
||||||
for new in g:
|
for new in g:
|
||||||
config.update(bunch.bunchify(new))
|
config.update(munch.Munchify(new))
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def connect(conf):
|
def connect(conf):
|
||||||
|
@ -97,7 +97,7 @@ def connect(conf):
|
||||||
access_key='aws_access_key_id',
|
access_key='aws_access_key_id',
|
||||||
secret_key='aws_secret_access_key',
|
secret_key='aws_secret_access_key',
|
||||||
)
|
)
|
||||||
kwargs = dict((mapping[k],v) for (k,v) in conf.iteritems() if k in mapping)
|
kwargs = dict((mapping[k],v) for (k,v) in conf.items() if k in mapping)
|
||||||
#process calling_format argument
|
#process calling_format argument
|
||||||
calling_formats = dict(
|
calling_formats = dict(
|
||||||
ordinary=boto.s3.connection.OrdinaryCallingFormat(),
|
ordinary=boto.s3.connection.OrdinaryCallingFormat(),
|
||||||
|
@ -105,7 +105,7 @@ def connect(conf):
|
||||||
vhost=boto.s3.connection.VHostCallingFormat(),
|
vhost=boto.s3.connection.VHostCallingFormat(),
|
||||||
)
|
)
|
||||||
kwargs['calling_format'] = calling_formats['ordinary']
|
kwargs['calling_format'] = calling_formats['ordinary']
|
||||||
if conf.has_key('calling_format'):
|
if 'calling_format' in conf:
|
||||||
raw_calling_format = conf['calling_format']
|
raw_calling_format = conf['calling_format']
|
||||||
try:
|
try:
|
||||||
kwargs['calling_format'] = calling_formats[raw_calling_format]
|
kwargs['calling_format'] = calling_formats[raw_calling_format]
|
||||||
|
@ -146,7 +146,7 @@ def setup():
|
||||||
raise RuntimeError("Empty Prefix! Aborting!")
|
raise RuntimeError("Empty Prefix! Aborting!")
|
||||||
|
|
||||||
defaults = config.s3.defaults
|
defaults = config.s3.defaults
|
||||||
for section in config.s3.keys():
|
for section in list(config.s3.keys()):
|
||||||
if section == 'defaults':
|
if section == 'defaults':
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -3,14 +3,14 @@ from botocore import UNSIGNED
|
||||||
from botocore.client import Config
|
from botocore.client import Config
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from botocore.handlers import disable_signing
|
from botocore.handlers import disable_signing
|
||||||
import ConfigParser
|
import configparser
|
||||||
import os
|
import os
|
||||||
import bunch
|
import munch
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
config = bunch.Bunch
|
config = munch.Munch
|
||||||
|
|
||||||
# this will be assigned by setup()
|
# this will be assigned by setup()
|
||||||
prefix = None
|
prefix = None
|
||||||
|
@ -125,17 +125,17 @@ def nuke_prefixed_buckets(prefix, client=None):
|
||||||
for obj in delete_markers:
|
for obj in delete_markers:
|
||||||
response = client.delete_object(Bucket=bucket_name,Key=obj[0],VersionId=obj[1])
|
response = client.delete_object(Bucket=bucket_name,Key=obj[0],VersionId=obj[1])
|
||||||
try:
|
try:
|
||||||
client.delete_bucket(Bucket=bucket_name)
|
response = client.delete_bucket(Bucket=bucket_name)
|
||||||
except ClientError, e:
|
except ClientError:
|
||||||
# if DELETE times out, the retry may see NoSuchBucket
|
# if DELETE times out, the retry may see NoSuchBucket
|
||||||
if e.response['Error']['Code'] != 'NoSuchBucket':
|
if response['Error']['Code'] != 'NoSuchBucket':
|
||||||
raise e
|
raise ClientError
|
||||||
pass
|
pass
|
||||||
|
|
||||||
print('Done with cleanup of buckets in tests.')
|
print('Done with cleanup of buckets in tests.')
|
||||||
|
|
||||||
def setup():
|
def setup():
|
||||||
cfg = ConfigParser.RawConfigParser()
|
cfg = configparser.RawConfigParser()
|
||||||
try:
|
try:
|
||||||
path = os.environ['S3TEST_CONF']
|
path = os.environ['S3TEST_CONF']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -143,8 +143,7 @@ def setup():
|
||||||
'To run tests, point environment '
|
'To run tests, point environment '
|
||||||
+ 'variable S3TEST_CONF to a config file.',
|
+ 'variable S3TEST_CONF to a config file.',
|
||||||
)
|
)
|
||||||
with file(path) as f:
|
cfg.read(path)
|
||||||
cfg.readfp(f)
|
|
||||||
|
|
||||||
if not cfg.defaults():
|
if not cfg.defaults():
|
||||||
raise RuntimeError('Your config file is missing the DEFAULT section!')
|
raise RuntimeError('Your config file is missing the DEFAULT section!')
|
||||||
|
@ -175,16 +174,17 @@ def setup():
|
||||||
config.main_email = cfg.get('s3 main',"email")
|
config.main_email = cfg.get('s3 main',"email")
|
||||||
try:
|
try:
|
||||||
config.main_kms_keyid = cfg.get('s3 main',"kms_keyid")
|
config.main_kms_keyid = cfg.get('s3 main',"kms_keyid")
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
config.main_kms_keyid = 'testkey-1'
|
config.main_kms_keyid = 'testkey-1'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config.main_kms_keyid2 = cfg.get('s3 main',"kms_keyid2")
|
config.main_kms_keyid2 = cfg.get('s3 main',"kms_keyid2")
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
config.main_kms_keyid2 = 'testkey-2'
|
config.main_kms_keyid2 = 'testkey-2'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
config.main_api_name = cfg.get('s3 main',"api_name")
|
config.main_api_name = cfg.get('s3 main',"api_name")
|
||||||
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
|
except (configparser.NoSectionError, configparser.NoOptionError):
|
||||||
config.main_api_name = ""
|
config.main_api_name = ""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ def setup():
|
||||||
# vars from the fixtures section
|
# vars from the fixtures section
|
||||||
try:
|
try:
|
||||||
template = cfg.get('fixtures', "bucket prefix")
|
template = cfg.get('fixtures', "bucket prefix")
|
||||||
except (ConfigParser.NoOptionError):
|
except (configparser.NoOptionError):
|
||||||
template = 'test-{random}-'
|
template = 'test-{random}-'
|
||||||
prefix = choose_bucket_prefix(template=template)
|
prefix = choose_bucket_prefix(template=template)
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,7 @@ def test_object_create_bad_contentlength_mismatch_above():
|
||||||
key_name = 'foo'
|
key_name = 'foo'
|
||||||
headers = {'Content-Length': str(length)}
|
headers = {'Content-Length': str(length)}
|
||||||
add_headers = (lambda **kwargs: kwargs['params']['headers'].update(headers))
|
add_headers = (lambda **kwargs: kwargs['params']['headers'].update(headers))
|
||||||
client.meta.events.register('before-sign.s3.PutObject', add_headers_before_sign)
|
client.meta.events.register('before-sign.s3.PutObject', add_headers)
|
||||||
|
|
||||||
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key_name, Body=content)
|
e = assert_raises(ClientError, client.put_object, Bucket=bucket_name, Key=key_name, Body=content)
|
||||||
status, error_code = _get_status_and_error_code(e.response)
|
status, error_code = _get_status_and_error_code(e.response)
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
||||||
from nose.tools import eq_ as eq
|
from nose.tools import eq_ as eq
|
||||||
|
|
||||||
import utils
|
from . import utils
|
||||||
|
|
||||||
def test_generate():
|
def test_generate():
|
||||||
FIVE_MB = 5 * 1024 * 1024
|
FIVE_MB = 5 * 1024 * 1024
|
||||||
|
|
|
@ -28,11 +28,11 @@ def generate_random(size, part_size=5*1024*1024):
|
||||||
chunk = 1024
|
chunk = 1024
|
||||||
allowed = string.ascii_letters
|
allowed = string.ascii_letters
|
||||||
for x in range(0, size, part_size):
|
for x in range(0, size, part_size):
|
||||||
strpart = ''.join([allowed[random.randint(0, len(allowed) - 1)] for _ in xrange(chunk)])
|
strpart = ''.join([allowed[random.randint(0, len(allowed) - 1)] for _ in range(chunk)])
|
||||||
s = ''
|
s = ''
|
||||||
left = size - x
|
left = size - x
|
||||||
this_part_size = min(left, part_size)
|
this_part_size = min(left, part_size)
|
||||||
for y in range(this_part_size / chunk):
|
for y in range(this_part_size // chunk):
|
||||||
s = s + strpart
|
s = s + strpart
|
||||||
s = s + strpart[:(this_part_size % chunk)]
|
s = s + strpart[:(this_part_size % chunk)]
|
||||||
yield s
|
yield s
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from boto.s3.connection import S3Connection
|
from boto.s3.connection import S3Connection
|
||||||
from boto.exception import BotoServerError
|
from boto.exception import BotoServerError
|
||||||
from boto.s3.key import Key
|
from boto.s3.key import Key
|
||||||
from httplib import BadStatusLine
|
from http.client import BadStatusLine
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from .. import common
|
from .. import common
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ def descend_graph(decision_graph, node_name, prng):
|
||||||
except IndexError:
|
except IndexError:
|
||||||
decision = {}
|
decision = {}
|
||||||
|
|
||||||
for key, choices in node['set'].iteritems():
|
for key, choices in node['set'].items():
|
||||||
if key in decision:
|
if key in decision:
|
||||||
raise DecisionGraphError("Node %s tried to set '%s', but that key was already set by a lower node!" %(node_name, key))
|
raise DecisionGraphError("Node %s tried to set '%s', but that key was already set by a lower node!" %(node_name, key))
|
||||||
decision[key] = make_choice(choices, prng)
|
decision[key] = make_choice(choices, prng)
|
||||||
|
@ -85,7 +85,7 @@ def descend_graph(decision_graph, node_name, prng):
|
||||||
num_reps = prng.randint(size_min, size_max)
|
num_reps = prng.randint(size_min, size_max)
|
||||||
if header in [h for h, v in decision['headers']]:
|
if header in [h for h, v in decision['headers']]:
|
||||||
raise DecisionGraphError("Node %s tried to add header '%s', but that header already exists!" %(node_name, header))
|
raise DecisionGraphError("Node %s tried to add header '%s', but that header already exists!" %(node_name, header))
|
||||||
for _ in xrange(num_reps):
|
for _ in range(num_reps):
|
||||||
decision['headers'].append([header, value])
|
decision['headers'].append([header, value])
|
||||||
|
|
||||||
return decision
|
return decision
|
||||||
|
@ -113,7 +113,7 @@ def make_choice(choices, prng):
|
||||||
if value == 'null' or value == 'None':
|
if value == 'null' or value == 'None':
|
||||||
value = ''
|
value = ''
|
||||||
|
|
||||||
for _ in xrange(weight):
|
for _ in range(weight):
|
||||||
weighted_choices.append(value)
|
weighted_choices.append(value)
|
||||||
|
|
||||||
return prng.choice(weighted_choices)
|
return prng.choice(weighted_choices)
|
||||||
|
@ -137,7 +137,8 @@ def expand(decision, value, prng):
|
||||||
|
|
||||||
class RepeatExpandingFormatter(string.Formatter):
|
class RepeatExpandingFormatter(string.Formatter):
|
||||||
charsets = {
|
charsets = {
|
||||||
'printable_no_whitespace': string.printable.translate(None, string.whitespace),
|
'printable_no_whitespace': string.printable.translate(
|
||||||
|
"".maketrans('', '', string.whitespace)),
|
||||||
'printable': string.printable,
|
'printable': string.printable,
|
||||||
'punctuation': string.punctuation,
|
'punctuation': string.punctuation,
|
||||||
'whitespace': string.whitespace,
|
'whitespace': string.whitespace,
|
||||||
|
@ -188,14 +189,15 @@ class RepeatExpandingFormatter(string.Formatter):
|
||||||
|
|
||||||
if charset_arg == 'binary' or charset_arg == 'binary_no_whitespace':
|
if charset_arg == 'binary' or charset_arg == 'binary_no_whitespace':
|
||||||
num_bytes = length + 8
|
num_bytes = length + 8
|
||||||
tmplist = [self.prng.getrandbits(64) for _ in xrange(num_bytes / 8)]
|
tmplist = [self.prng.getrandbits(64) for _ in range(num_bytes // 8)]
|
||||||
tmpstring = struct.pack((num_bytes / 8) * 'Q', *tmplist)
|
tmpstring = struct.pack((num_bytes // 8) * 'Q', *tmplist)
|
||||||
if charset_arg == 'binary_no_whitespace':
|
if charset_arg == 'binary_no_whitespace':
|
||||||
tmpstring = ''.join(c for c in tmpstring if c not in string.whitespace)
|
tmpstring = ''.join([c] for c in tmpstring if c not in bytes(
|
||||||
|
string.whitespace, 'utf-8'))
|
||||||
return tmpstring[0:length]
|
return tmpstring[0:length]
|
||||||
else:
|
else:
|
||||||
charset = self.charsets[charset_arg]
|
charset = self.charsets[charset_arg]
|
||||||
return ''.join([self.prng.choice(charset) for _ in xrange(length)]) # Won't scale nicely
|
return ''.join([self.prng.choice(charset) for _ in range(length)]) # Won't scale nicely
|
||||||
|
|
||||||
|
|
||||||
def parse_options():
|
def parse_options():
|
||||||
|
@ -281,29 +283,29 @@ def _main():
|
||||||
if options.seedfile:
|
if options.seedfile:
|
||||||
FH = open(options.seedfile, 'r')
|
FH = open(options.seedfile, 'r')
|
||||||
request_seeds = [int(line) for line in FH if line != '\n']
|
request_seeds = [int(line) for line in FH if line != '\n']
|
||||||
print>>OUT, 'Seedfile: %s' %options.seedfile
|
print('Seedfile: %s' %options.seedfile, file=OUT)
|
||||||
print>>OUT, 'Number of requests: %d' %len(request_seeds)
|
print('Number of requests: %d' %len(request_seeds), file=OUT)
|
||||||
else:
|
else:
|
||||||
if options.seed:
|
if options.seed:
|
||||||
print>>OUT, 'Initial Seed: %d' %options.seed
|
print('Initial Seed: %d' %options.seed, file=OUT)
|
||||||
print>>OUT, 'Number of requests: %d' %options.num_requests
|
print('Number of requests: %d' %options.num_requests, file=OUT)
|
||||||
random_list = randomlist(options.seed)
|
random_list = randomlist(options.seed)
|
||||||
request_seeds = itertools.islice(random_list, options.num_requests)
|
request_seeds = itertools.islice(random_list, options.num_requests)
|
||||||
|
|
||||||
print>>OUT, 'Decision Graph: %s' %options.graph_filename
|
print('Decision Graph: %s' %options.graph_filename, file=OUT)
|
||||||
|
|
||||||
graph_file = open(options.graph_filename, 'r')
|
graph_file = open(options.graph_filename, 'r')
|
||||||
decision_graph = yaml.safe_load(graph_file)
|
decision_graph = yaml.safe_load(graph_file)
|
||||||
|
|
||||||
constants = populate_buckets(s3_connection, alt_connection)
|
constants = populate_buckets(s3_connection, alt_connection)
|
||||||
print>>VERBOSE, "Test Buckets/Objects:"
|
print("Test Buckets/Objects:", file=VERBOSE)
|
||||||
for key, value in constants.iteritems():
|
for key, value in constants.items():
|
||||||
print>>VERBOSE, "\t%s: %s" %(key, value)
|
print("\t%s: %s" %(key, value), file=VERBOSE)
|
||||||
|
|
||||||
print>>OUT, "Begin Fuzzing..."
|
print("Begin Fuzzing...", file=OUT)
|
||||||
print>>VERBOSE, '='*80
|
print('='*80, file=VERBOSE)
|
||||||
for request_seed in request_seeds:
|
for request_seed in request_seeds:
|
||||||
print>>VERBOSE, 'Seed is: %r' %request_seed
|
print('Seed is: %r' %request_seed, file=VERBOSE)
|
||||||
prng = random.Random(request_seed)
|
prng = random.Random(request_seed)
|
||||||
decision = assemble_decision(decision_graph, prng)
|
decision = assemble_decision(decision_graph, prng)
|
||||||
decision.update(constants)
|
decision.update(constants)
|
||||||
|
@ -321,46 +323,46 @@ def _main():
|
||||||
except KeyError:
|
except KeyError:
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
print>>VERBOSE, "%r %r" %(method[:100], path[:100])
|
print("%r %r" %(method[:100], path[:100]), file=VERBOSE)
|
||||||
for h, v in headers.iteritems():
|
for h, v in headers.items():
|
||||||
print>>VERBOSE, "%r: %r" %(h[:50], v[:50])
|
print("%r: %r" %(h[:50], v[:50]), file=VERBOSE)
|
||||||
print>>VERBOSE, "%r\n" % body[:100]
|
print("%r\n" % body[:100], file=VERBOSE)
|
||||||
|
|
||||||
print>>DEBUG, 'FULL REQUEST'
|
print('FULL REQUEST', file=DEBUG)
|
||||||
print>>DEBUG, 'Method: %r' %method
|
print('Method: %r' %method, file=DEBUG)
|
||||||
print>>DEBUG, 'Path: %r' %path
|
print('Path: %r' %path, file=DEBUG)
|
||||||
print>>DEBUG, 'Headers:'
|
print('Headers:', file=DEBUG)
|
||||||
for h, v in headers.iteritems():
|
for h, v in headers.items():
|
||||||
print>>DEBUG, "\t%r: %r" %(h, v)
|
print("\t%r: %r" %(h, v), file=DEBUG)
|
||||||
print>>DEBUG, 'Body: %r\n' %body
|
print('Body: %r\n' %body, file=DEBUG)
|
||||||
|
|
||||||
failed = False # Let's be optimistic, shall we?
|
failed = False # Let's be optimistic, shall we?
|
||||||
try:
|
try:
|
||||||
response = s3_connection.make_request(method, path, data=body, headers=headers, override_num_retries=1)
|
response = s3_connection.make_request(method, path, data=body, headers=headers, override_num_retries=1)
|
||||||
body = response.read()
|
body = response.read()
|
||||||
except BotoServerError, e:
|
except BotoServerError as e:
|
||||||
response = e
|
response = e
|
||||||
body = e.body
|
body = e.body
|
||||||
failed = True
|
failed = True
|
||||||
except BadStatusLine, e:
|
except BadStatusLine as e:
|
||||||
print>>OUT, 'FAILED: failed to parse response (BadStatusLine); probably a NUL byte in your request?'
|
print('FAILED: failed to parse response (BadStatusLine); probably a NUL byte in your request?', file=OUT)
|
||||||
print>>VERBOSE, '='*80
|
print('='*80, file=VERBOSE)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
print>>OUT, 'FAILED:'
|
print('FAILED:', file=OUT)
|
||||||
OLD_VERBOSE = VERBOSE
|
OLD_VERBOSE = VERBOSE
|
||||||
OLD_DEBUG = DEBUG
|
OLD_DEBUG = DEBUG
|
||||||
VERBOSE = DEBUG = OUT
|
VERBOSE = DEBUG = OUT
|
||||||
print>>VERBOSE, 'Seed was: %r' %request_seed
|
print('Seed was: %r' %request_seed, file=VERBOSE)
|
||||||
print>>VERBOSE, 'Response status code: %d %s' %(response.status, response.reason)
|
print('Response status code: %d %s' %(response.status, response.reason), file=VERBOSE)
|
||||||
print>>DEBUG, 'Body:\n%s' %body
|
print('Body:\n%s' %body, file=DEBUG)
|
||||||
print>>VERBOSE, '='*80
|
print('='*80, file=VERBOSE)
|
||||||
if failed:
|
if failed:
|
||||||
VERBOSE = OLD_VERBOSE
|
VERBOSE = OLD_VERBOSE
|
||||||
DEBUG = OLD_DEBUG
|
DEBUG = OLD_DEBUG
|
||||||
|
|
||||||
print>>OUT, '...done fuzzing'
|
print('...done fuzzing', file=OUT)
|
||||||
|
|
||||||
if options.cleanup:
|
if options.cleanup:
|
||||||
common.teardown()
|
common.teardown()
|
||||||
|
|
|
@ -25,6 +25,7 @@ from nose.tools import assert_true
|
||||||
from nose.plugins.attrib import attr
|
from nose.plugins.attrib import attr
|
||||||
|
|
||||||
from ...functional.utils import assert_raises
|
from ...functional.utils import assert_raises
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
_decision_graph = {}
|
_decision_graph = {}
|
||||||
|
|
||||||
|
@ -173,21 +174,21 @@ def test_expand_random_binary():
|
||||||
|
|
||||||
def test_expand_random_printable_no_whitespace():
|
def test_expand_random_printable_no_whitespace():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 500 printable_no_whitespace}', prng)
|
got = expand({}, '{random 500 printable_no_whitespace}', prng)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace and x in string.printable for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace and x in string.printable for x in got]))
|
||||||
|
|
||||||
|
|
||||||
def test_expand_random_binary_no_whitespace():
|
def test_expand_random_binary_no_whitespace():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 500 binary_no_whitespace}', prng)
|
got = expand({}, '{random 500 binary_no_whitespace}', prng)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x not in string.whitespace for x in got]))
|
||||||
|
|
||||||
|
|
||||||
def test_expand_random_no_args():
|
def test_expand_random_no_args():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random}', prng)
|
got = expand({}, '{random}', prng)
|
||||||
assert_true(0 <= len(got) <= 1000)
|
assert_true(0 <= len(got) <= 1000)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
||||||
|
@ -195,7 +196,7 @@ def test_expand_random_no_args():
|
||||||
|
|
||||||
def test_expand_random_no_charset():
|
def test_expand_random_no_charset():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 10-30}', prng)
|
got = expand({}, '{random 10-30}', prng)
|
||||||
assert_true(10 <= len(got) <= 30)
|
assert_true(10 <= len(got) <= 30)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x in string.printable for x in got]))
|
||||||
|
@ -203,7 +204,7 @@ def test_expand_random_no_charset():
|
||||||
|
|
||||||
def test_expand_random_exact_length():
|
def test_expand_random_exact_length():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
got = expand({}, '{random 10 digits}', prng)
|
got = expand({}, '{random 10 digits}', prng)
|
||||||
assert_true(len(got) == 10)
|
assert_true(len(got) == 10)
|
||||||
assert_true(reduce(lambda x, y: x and y, [x in string.digits for x in got]))
|
assert_true(reduce(lambda x, y: x and y, [x in string.digits for x in got]))
|
||||||
|
@ -300,9 +301,9 @@ def test_weighted_choices():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
|
|
||||||
choices_made = {}
|
choices_made = {}
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
choice = make_choice(graph['weighted_node']['choices'], prng)
|
choice = make_choice(graph['weighted_node']['choices'], prng)
|
||||||
if choices_made.has_key(choice):
|
if choice in choices_made:
|
||||||
choices_made[choice] += 1
|
choices_made[choice] += 1
|
||||||
else:
|
else:
|
||||||
choices_made[choice] = 1
|
choices_made[choice] = 1
|
||||||
|
@ -344,9 +345,9 @@ def test_weighted_set():
|
||||||
prng = random.Random(1)
|
prng = random.Random(1)
|
||||||
|
|
||||||
choices_made = {}
|
choices_made = {}
|
||||||
for _ in xrange(1000):
|
for _ in range(1000):
|
||||||
choice = make_choice(graph['weighted_node']['set']['k1'], prng)
|
choice = make_choice(graph['weighted_node']['set']['k1'], prng)
|
||||||
if choices_made.has_key(choice):
|
if choice in choices_made:
|
||||||
choices_made[choice] += 1
|
choices_made[choice] += 1
|
||||||
else:
|
else:
|
||||||
choices_made[choice] = 1
|
choices_made[choice] = 1
|
||||||
|
@ -392,7 +393,7 @@ def test_expand_headers():
|
||||||
decision = descend_graph(graph, 'node1', prng)
|
decision = descend_graph(graph, 'node1', prng)
|
||||||
expanded_headers = expand_headers(decision, prng)
|
expanded_headers = expand_headers(decision, prng)
|
||||||
|
|
||||||
for header, value in expanded_headers.iteritems():
|
for header, value in expanded_headers.items():
|
||||||
if header == 'my-header':
|
if header == 'my-header':
|
||||||
assert_true(value in ['h1', 'h2', 'h3'])
|
assert_true(value in ['h1', 'h2', 'h3'])
|
||||||
elif header.startswith('random-header-'):
|
elif header.startswith('random-header-'):
|
||||||
|
|
|
@ -27,7 +27,7 @@ def get_random_files(quantity, mean, stddev, seed):
|
||||||
list of file handles
|
list of file handles
|
||||||
"""
|
"""
|
||||||
file_generator = realistic.files(mean, stddev, seed)
|
file_generator = realistic.files(mean, stddev, seed)
|
||||||
return [file_generator.next() for _ in xrange(quantity)]
|
return [next(file_generator) for _ in range(quantity)]
|
||||||
|
|
||||||
|
|
||||||
def upload_objects(bucket, files, seed):
|
def upload_objects(bucket, files, seed):
|
||||||
|
@ -43,9 +43,9 @@ def upload_objects(bucket, files, seed):
|
||||||
name_generator = realistic.names(15, 4, seed=seed)
|
name_generator = realistic.names(15, 4, seed=seed)
|
||||||
|
|
||||||
for fp in files:
|
for fp in files:
|
||||||
print >> sys.stderr, 'sending file with size %dB' % fp.size
|
print('sending file with size %dB' % fp.size, file=sys.stderr)
|
||||||
key = Key(bucket)
|
key = Key(bucket)
|
||||||
key.key = name_generator.next()
|
key.key = next(name_generator)
|
||||||
key.set_contents_from_file(fp, rewind=True)
|
key.set_contents_from_file(fp, rewind=True)
|
||||||
key.set_acl('public-read')
|
key.set_acl('public-read')
|
||||||
keys.append(key)
|
keys.append(key)
|
||||||
|
@ -94,18 +94,18 @@ def _main():
|
||||||
|
|
||||||
bucket.set_acl('public-read')
|
bucket.set_acl('public-read')
|
||||||
keys = []
|
keys = []
|
||||||
print >> OUTFILE, 'bucket: %s' % bucket.name
|
print('bucket: %s' % bucket.name, file=OUTFILE)
|
||||||
print >> sys.stderr, 'setup complete, generating files'
|
print('setup complete, generating files', file=sys.stderr)
|
||||||
for profile in common.config.file_generation.groups:
|
for profile in common.config.file_generation.groups:
|
||||||
seed = random.random()
|
seed = random.random()
|
||||||
files = get_random_files(profile[0], profile[1], profile[2], seed)
|
files = get_random_files(profile[0], profile[1], profile[2], seed)
|
||||||
keys += upload_objects(bucket, files, seed)
|
keys += upload_objects(bucket, files, seed)
|
||||||
|
|
||||||
print >> sys.stderr, 'finished sending files. generating urls'
|
print('finished sending files. generating urls', file=sys.stderr)
|
||||||
for key in keys:
|
for key in keys:
|
||||||
print >> OUTFILE, key.generate_url(0, query_auth=False)
|
print(key.generate_url(0, query_auth=False), file=OUTFILE)
|
||||||
|
|
||||||
print >> sys.stderr, 'done'
|
print('done', file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -11,8 +11,8 @@ import traceback
|
||||||
import random
|
import random
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
import realistic
|
from . import realistic
|
||||||
import common
|
from . import common
|
||||||
|
|
||||||
NANOSECOND = int(1e9)
|
NANOSECOND = int(1e9)
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ def reader(bucket, worker_id, file_names, queue, rand):
|
||||||
traceback=traceback.format_exc(),
|
traceback=traceback.format_exc(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
print "ERROR:", m
|
print("ERROR:", m)
|
||||||
else:
|
else:
|
||||||
elapsed = end - start
|
elapsed = end - start
|
||||||
result.update(
|
result.update(
|
||||||
|
@ -158,16 +158,16 @@ def main():
|
||||||
for name in ['names', 'contents', 'writer', 'reader']:
|
for name in ['names', 'contents', 'writer', 'reader']:
|
||||||
seeds.setdefault(name, rand.randrange(2**32))
|
seeds.setdefault(name, rand.randrange(2**32))
|
||||||
|
|
||||||
print 'Using random seeds: {seeds}'.format(seeds=seeds)
|
print('Using random seeds: {seeds}'.format(seeds=seeds))
|
||||||
|
|
||||||
# setup bucket and other objects
|
# setup bucket and other objects
|
||||||
bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
|
bucket_name = common.choose_bucket_prefix(config.readwrite.bucket, max_len=30)
|
||||||
bucket = conn.create_bucket(bucket_name)
|
bucket = conn.create_bucket(bucket_name)
|
||||||
print "Created bucket: {name}".format(name=bucket.name)
|
print("Created bucket: {name}".format(name=bucket.name))
|
||||||
|
|
||||||
# check flag for deterministic file name creation
|
# check flag for deterministic file name creation
|
||||||
if not config.readwrite.get('deterministic_file_names'):
|
if not config.readwrite.get('deterministic_file_names'):
|
||||||
print 'Creating random file names'
|
print('Creating random file names')
|
||||||
file_names = realistic.names(
|
file_names = realistic.names(
|
||||||
mean=15,
|
mean=15,
|
||||||
stddev=4,
|
stddev=4,
|
||||||
|
@ -176,9 +176,9 @@ def main():
|
||||||
file_names = itertools.islice(file_names, config.readwrite.files.num)
|
file_names = itertools.islice(file_names, config.readwrite.files.num)
|
||||||
file_names = list(file_names)
|
file_names = list(file_names)
|
||||||
else:
|
else:
|
||||||
print 'Creating file names that are deterministic'
|
print('Creating file names that are deterministic')
|
||||||
file_names = []
|
file_names = []
|
||||||
for x in xrange(config.readwrite.files.num):
|
for x in range(config.readwrite.files.num):
|
||||||
file_names.append('test_file_{num}'.format(num=x))
|
file_names.append('test_file_{num}'.format(num=x))
|
||||||
|
|
||||||
files = realistic.files2(
|
files = realistic.files2(
|
||||||
|
@ -191,7 +191,7 @@ def main():
|
||||||
|
|
||||||
# warmup - get initial set of files uploaded if there are any writers specified
|
# warmup - get initial set of files uploaded if there are any writers specified
|
||||||
if config.readwrite.writers > 0:
|
if config.readwrite.writers > 0:
|
||||||
print "Uploading initial set of {num} files".format(num=config.readwrite.files.num)
|
print("Uploading initial set of {num} files".format(num=config.readwrite.files.num))
|
||||||
warmup_pool = gevent.pool.Pool(size=100)
|
warmup_pool = gevent.pool.Pool(size=100)
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
fp = next(files)
|
fp = next(files)
|
||||||
|
@ -204,15 +204,15 @@ def main():
|
||||||
warmup_pool.join()
|
warmup_pool.join()
|
||||||
|
|
||||||
# main work
|
# main work
|
||||||
print "Starting main worker loop."
|
print("Starting main worker loop.")
|
||||||
print "Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev)
|
print("Using file size: {size} +- {stddev}".format(size=config.readwrite.files.size, stddev=config.readwrite.files.stddev))
|
||||||
print "Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers)
|
print("Spawning {w} writers and {r} readers...".format(w=config.readwrite.writers, r=config.readwrite.readers))
|
||||||
group = gevent.pool.Group()
|
group = gevent.pool.Group()
|
||||||
rand_writer = random.Random(seeds['writer'])
|
rand_writer = random.Random(seeds['writer'])
|
||||||
|
|
||||||
# Don't create random files if deterministic_files_names is set and true
|
# Don't create random files if deterministic_files_names is set and true
|
||||||
if not config.readwrite.get('deterministic_file_names'):
|
if not config.readwrite.get('deterministic_file_names'):
|
||||||
for x in xrange(config.readwrite.writers):
|
for x in range(config.readwrite.writers):
|
||||||
this_rand = random.Random(rand_writer.randrange(2**32))
|
this_rand = random.Random(rand_writer.randrange(2**32))
|
||||||
group.spawn(
|
group.spawn(
|
||||||
writer,
|
writer,
|
||||||
|
@ -229,7 +229,7 @@ def main():
|
||||||
# this loop needs no additional qualifiers. If zero readers are specified,
|
# this loop needs no additional qualifiers. If zero readers are specified,
|
||||||
# it will behave as expected (no data is read)
|
# it will behave as expected (no data is read)
|
||||||
rand_reader = random.Random(seeds['reader'])
|
rand_reader = random.Random(seeds['reader'])
|
||||||
for x in xrange(config.readwrite.readers):
|
for x in range(config.readwrite.readers):
|
||||||
this_rand = random.Random(rand_reader.randrange(2**32))
|
this_rand = random.Random(rand_reader.randrange(2**32))
|
||||||
group.spawn(
|
group.spawn(
|
||||||
reader,
|
reader,
|
||||||
|
@ -246,7 +246,7 @@ def main():
|
||||||
|
|
||||||
# wait for all the tests to finish
|
# wait for all the tests to finish
|
||||||
group.join()
|
group.join()
|
||||||
print 'post-join, queue size {size}'.format(size=q.qsize())
|
print('post-join, queue size {size}'.format(size=q.qsize()))
|
||||||
|
|
||||||
if q.qsize() > 0:
|
if q.qsize() > 0:
|
||||||
for temp_dict in q:
|
for temp_dict in q:
|
||||||
|
|
|
@ -47,9 +47,9 @@ class FileValidator(object):
|
||||||
self.original_hash, binary = contents[-40:], contents[:-40]
|
self.original_hash, binary = contents[-40:], contents[:-40]
|
||||||
self.new_hash = hashlib.sha1(binary).hexdigest()
|
self.new_hash = hashlib.sha1(binary).hexdigest()
|
||||||
if not self.new_hash == self.original_hash:
|
if not self.new_hash == self.original_hash:
|
||||||
print 'original hash: ', self.original_hash
|
print('original hash: ', self.original_hash)
|
||||||
print 'new hash: ', self.new_hash
|
print('new hash: ', self.new_hash)
|
||||||
print 'size: ', self._file.tell()
|
print('size: ', self._file.tell())
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ class RandomContentFile(object):
|
||||||
size = min(self.size, 1*1024*1024) # generate at most 1 MB at a time
|
size = min(self.size, 1*1024*1024) # generate at most 1 MB at a time
|
||||||
chunks = int(math.ceil(size/8.0)) # number of 8-byte chunks to create
|
chunks = int(math.ceil(size/8.0)) # number of 8-byte chunks to create
|
||||||
|
|
||||||
l = [self.random.getrandbits(64) for _ in xrange(chunks)]
|
l = [self.random.getrandbits(64) for _ in range(chunks)]
|
||||||
s = struct.pack(chunks*'Q', *l)
|
s = struct.pack(chunks*'Q', *l)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
@ -252,7 +252,7 @@ def files2(mean, stddev, seed=None, numfiles=10):
|
||||||
"""
|
"""
|
||||||
# pre-compute all the files (and save with TemporaryFiles)
|
# pre-compute all the files (and save with TemporaryFiles)
|
||||||
fs = []
|
fs = []
|
||||||
for _ in xrange(numfiles):
|
for _ in range(numfiles):
|
||||||
t = tempfile.SpooledTemporaryFile()
|
t = tempfile.SpooledTemporaryFile()
|
||||||
t.write(generate_file_contents(random.normalvariate(mean, stddev)))
|
t.write(generate_file_contents(random.normalvariate(mean, stddev)))
|
||||||
t.seek(0)
|
t.seek(0)
|
||||||
|
@ -277,5 +277,5 @@ def names(mean, stddev, charset=None, seed=None):
|
||||||
length = int(rand.normalvariate(mean, stddev))
|
length = int(rand.normalvariate(mean, stddev))
|
||||||
if length > 0:
|
if length > 0:
|
||||||
break
|
break
|
||||||
name = ''.join(rand.choice(charset) for _ in xrange(length))
|
name = ''.join(rand.choice(charset) for _ in range(length))
|
||||||
yield name
|
yield name
|
||||||
|
|
|
@ -11,8 +11,8 @@ import traceback
|
||||||
import random
|
import random
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
import realistic
|
from . import realistic
|
||||||
import common
|
from . import common
|
||||||
|
|
||||||
NANOSECOND = int(1e9)
|
NANOSECOND = int(1e9)
|
||||||
|
|
||||||
|
@ -141,12 +141,12 @@ def main():
|
||||||
for name in ['names', 'contents', 'writer', 'reader']:
|
for name in ['names', 'contents', 'writer', 'reader']:
|
||||||
seeds.setdefault(name, rand.randrange(2**32))
|
seeds.setdefault(name, rand.randrange(2**32))
|
||||||
|
|
||||||
print 'Using random seeds: {seeds}'.format(seeds=seeds)
|
print('Using random seeds: {seeds}'.format(seeds=seeds))
|
||||||
|
|
||||||
# setup bucket and other objects
|
# setup bucket and other objects
|
||||||
bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
|
bucket_name = common.choose_bucket_prefix(config.roundtrip.bucket, max_len=30)
|
||||||
bucket = conn.create_bucket(bucket_name)
|
bucket = conn.create_bucket(bucket_name)
|
||||||
print "Created bucket: {name}".format(name=bucket.name)
|
print("Created bucket: {name}".format(name=bucket.name))
|
||||||
objnames = realistic.names(
|
objnames = realistic.names(
|
||||||
mean=15,
|
mean=15,
|
||||||
stddev=4,
|
stddev=4,
|
||||||
|
@ -163,10 +163,10 @@ def main():
|
||||||
|
|
||||||
logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)
|
logger_g = gevent.spawn(yaml.safe_dump_all, q, stream=real_stdout)
|
||||||
|
|
||||||
print "Writing {num} objects with {w} workers...".format(
|
print("Writing {num} objects with {w} workers...".format(
|
||||||
num=config.roundtrip.files.num,
|
num=config.roundtrip.files.num,
|
||||||
w=config.roundtrip.writers,
|
w=config.roundtrip.writers,
|
||||||
)
|
))
|
||||||
pool = gevent.pool.Pool(size=config.roundtrip.writers)
|
pool = gevent.pool.Pool(size=config.roundtrip.writers)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for objname in objnames:
|
for objname in objnames:
|
||||||
|
@ -186,10 +186,10 @@ def main():
|
||||||
duration=int(round(elapsed * NANOSECOND)),
|
duration=int(round(elapsed * NANOSECOND)),
|
||||||
))
|
))
|
||||||
|
|
||||||
print "Reading {num} objects with {w} workers...".format(
|
print("Reading {num} objects with {w} workers...".format(
|
||||||
num=config.roundtrip.files.num,
|
num=config.roundtrip.files.num,
|
||||||
w=config.roundtrip.readers,
|
w=config.roundtrip.readers,
|
||||||
)
|
))
|
||||||
# avoid accessing them in the same order as the writing
|
# avoid accessing them in the same order as the writing
|
||||||
rand.shuffle(objnames)
|
rand.shuffle(objnames)
|
||||||
pool = gevent.pool.Pool(size=config.roundtrip.readers)
|
pool = gevent.pool.Pool(size=config.roundtrip.readers)
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -16,7 +16,7 @@ setup(
|
||||||
'boto >=2.0b4',
|
'boto >=2.0b4',
|
||||||
'boto3 >=1.0.0',
|
'boto3 >=1.0.0',
|
||||||
'PyYAML',
|
'PyYAML',
|
||||||
'bunch >=1.0.0',
|
'munch >=2.0.0',
|
||||||
'gevent >=1.0',
|
'gevent >=1.0',
|
||||||
'isodate >=0.4.4',
|
'isodate >=0.4.4',
|
||||||
],
|
],
|
||||||
|
|
Loading…
Reference in a new issue