2011-06-29 18:16:42 +00:00
|
|
|
from boto.s3.key import Key
|
|
|
|
from optparse import OptionParser
|
2011-07-13 20:52:54 +00:00
|
|
|
from . import realistic
|
2011-07-08 18:18:52 +00:00
|
|
|
import traceback
|
2011-06-29 18:16:42 +00:00
|
|
|
import random
|
2011-07-13 20:52:54 +00:00
|
|
|
from . import common
|
2011-06-29 18:16:42 +00:00
|
|
|
import sys
|
|
|
|
|
|
|
|
|
|
|
|
def parse_opts():
|
2011-07-11 20:21:36 +00:00
|
|
|
parser = OptionParser()
|
2011-07-11 20:19:54 +00:00
|
|
|
parser.add_option('-O', '--outfile', help='write output to FILE. Defaults to STDOUT', metavar='FILE')
|
|
|
|
parser.add_option('-b', '--bucket', dest='bucket', help='push objects to BUCKET', metavar='BUCKET')
|
2011-06-29 18:16:42 +00:00
|
|
|
parser.add_option('--seed', dest='seed', help='optional seed for the random number generator')
|
|
|
|
|
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
|
2011-07-08 21:38:12 +00:00
|
|
|
def get_random_files(quantity, mean, stddev, seed):
|
|
|
|
"""Create file-like objects with pseudorandom contents.
|
|
|
|
IN:
|
|
|
|
number of files to create
|
|
|
|
mean file size in bytes
|
|
|
|
standard deviation from mean file size
|
|
|
|
seed for PRNG
|
|
|
|
OUT:
|
|
|
|
list of file handles
|
|
|
|
"""
|
|
|
|
file_generator = realistic.files(mean, stddev, seed)
|
2019-03-22 17:58:30 +00:00
|
|
|
return [next(file_generator) for _ in range(quantity)]
|
2011-07-08 21:38:12 +00:00
|
|
|
|
|
|
|
|
|
|
|
def upload_objects(bucket, files, seed):
|
|
|
|
"""Upload a bunch of files to an S3 bucket
|
2011-06-29 18:16:42 +00:00
|
|
|
IN:
|
|
|
|
boto S3 bucket object
|
2011-07-08 21:38:12 +00:00
|
|
|
list of file handles to upload
|
|
|
|
seed for PRNG
|
2011-06-29 18:16:42 +00:00
|
|
|
OUT:
|
2011-07-08 21:38:12 +00:00
|
|
|
list of boto S3 key objects
|
2011-06-29 18:16:42 +00:00
|
|
|
"""
|
2011-07-08 21:38:12 +00:00
|
|
|
keys = []
|
2011-07-11 20:19:54 +00:00
|
|
|
name_generator = realistic.names(15, 4, seed=seed)
|
2011-07-08 20:00:09 +00:00
|
|
|
|
2011-07-08 21:38:12 +00:00
|
|
|
for fp in files:
|
2019-03-22 17:58:30 +00:00
|
|
|
print('sending file with size %dB' % fp.size, file=sys.stderr)
|
2011-06-29 18:16:42 +00:00
|
|
|
key = Key(bucket)
|
2019-03-22 17:58:30 +00:00
|
|
|
key.key = next(name_generator)
|
2013-02-28 04:02:25 +00:00
|
|
|
key.set_contents_from_file(fp, rewind=True)
|
2011-07-14 18:36:13 +00:00
|
|
|
key.set_acl('public-read')
|
2011-07-08 21:38:12 +00:00
|
|
|
keys.append(key)
|
2011-06-29 18:16:42 +00:00
|
|
|
|
2011-07-08 21:38:12 +00:00
|
|
|
return keys
|
2011-06-29 18:16:42 +00:00
|
|
|
|
|
|
|
|
2011-07-13 20:52:54 +00:00
|
|
|
def _main():
|
2011-07-08 18:18:52 +00:00
|
|
|
'''To run the static content load test, make sure you've bootstrapped your
|
2011-07-26 16:47:06 +00:00
|
|
|
test environment and set up your config.yaml file, then run the following:
|
2011-10-19 21:01:55 +00:00
|
|
|
S3TEST_CONF=config.yaml virtualenv/bin/s3tests-generate-objects.py --seed 1234
|
2011-07-08 21:38:12 +00:00
|
|
|
|
2011-07-26 16:47:06 +00:00
|
|
|
This creates a bucket with your S3 credentials (from config.yaml) and
|
2011-10-19 21:01:55 +00:00
|
|
|
fills it with garbage objects as described in the
|
|
|
|
file_generation.groups section of config.yaml. It writes a list of
|
|
|
|
URLS to those objects to the file listed in file_generation.url_file
|
|
|
|
in config.yaml.
|
2011-07-08 21:38:12 +00:00
|
|
|
|
|
|
|
Once you have objcts in your bucket, run the siege benchmarking program:
|
2011-07-07 19:19:53 +00:00
|
|
|
siege --rc ./siege.conf -r 5
|
2011-07-08 21:38:12 +00:00
|
|
|
|
|
|
|
This tells siege to read the ./siege.conf config file which tells it to
|
|
|
|
use the urls in ./urls.txt and log to ./siege.log. It hits each url in
|
|
|
|
urls.txt 5 times (-r flag).
|
|
|
|
|
2011-07-06 19:53:11 +00:00
|
|
|
Results are printed to the terminal and written in CSV format to
|
|
|
|
./siege.log
|
|
|
|
'''
|
2011-07-11 20:21:36 +00:00
|
|
|
(options, args) = parse_opts()
|
2011-06-29 18:16:42 +00:00
|
|
|
|
|
|
|
#SETUP
|
|
|
|
random.seed(options.seed if options.seed else None)
|
2011-07-08 21:38:12 +00:00
|
|
|
conn = common.s3.main
|
|
|
|
|
2011-06-29 18:16:42 +00:00
|
|
|
if options.outfile:
|
|
|
|
OUTFILE = open(options.outfile, 'w')
|
2011-07-08 18:18:52 +00:00
|
|
|
elif common.config.file_generation.url_file:
|
|
|
|
OUTFILE = open(common.config.file_generation.url_file, 'w')
|
2011-06-29 18:16:42 +00:00
|
|
|
else:
|
|
|
|
OUTFILE = sys.stdout
|
|
|
|
|
2011-07-08 18:18:52 +00:00
|
|
|
if options.bucket:
|
2011-07-08 21:38:12 +00:00
|
|
|
bucket = conn.create_bucket(options.bucket)
|
2011-07-08 18:18:52 +00:00
|
|
|
else:
|
|
|
|
bucket = common.get_new_bucket()
|
|
|
|
|
2011-07-14 18:36:13 +00:00
|
|
|
bucket.set_acl('public-read')
|
2011-07-08 21:38:12 +00:00
|
|
|
keys = []
|
2019-03-22 17:58:30 +00:00
|
|
|
print('bucket: %s' % bucket.name, file=OUTFILE)
|
|
|
|
print('setup complete, generating files', file=sys.stderr)
|
2011-07-08 18:18:52 +00:00
|
|
|
for profile in common.config.file_generation.groups:
|
2011-06-29 18:16:42 +00:00
|
|
|
seed = random.random()
|
2011-07-08 21:38:12 +00:00
|
|
|
files = get_random_files(profile[0], profile[1], profile[2], seed)
|
|
|
|
keys += upload_objects(bucket, files, seed)
|
|
|
|
|
2019-03-22 17:58:30 +00:00
|
|
|
print('finished sending files. generating urls', file=sys.stderr)
|
2011-07-08 21:38:12 +00:00
|
|
|
for key in keys:
|
2019-03-22 17:58:30 +00:00
|
|
|
print(key.generate_url(0, query_auth=False), file=OUTFILE)
|
2011-07-08 21:38:12 +00:00
|
|
|
|
2019-03-22 17:58:30 +00:00
|
|
|
print('done', file=sys.stderr)
|
2011-06-29 18:16:42 +00:00
|
|
|
|
|
|
|
|
2011-07-13 20:52:54 +00:00
|
|
|
def main():
|
2011-07-08 18:18:52 +00:00
|
|
|
common.setup()
|
|
|
|
try:
|
2011-07-13 20:52:54 +00:00
|
|
|
_main()
|
2011-07-08 18:18:52 +00:00
|
|
|
except Exception as e:
|
|
|
|
traceback.print_exc()
|
|
|
|
common.teardown()
|