Remove dead code.

This commit is contained in:
Tommi Virtanen 2011-07-26 16:26:09 -07:00
parent 700dd3f880
commit 3f49775ce3
3 changed files with 0 additions and 250 deletions

View file

@ -1,92 +0,0 @@
import bunch
import collections
import gevent
import random
import time
from ..common import context, get_next_key
from ..common.results import TransferGreenletResult
from ..realistic import FileVerifier
# Make sure context has somewhere to store what we need
context.update(bunch.Bunch(
needs_first_read=collections.deque(),
all_keys=[],
files_iter=None,
))
class SafeTransferGreenlet(gevent.Greenlet):
def __init__(self, timeout=120):
gevent.Greenlet.__init__(self)
self.timeout = timeout
self.result = None
def _run(self):
result = self.result = TransferGreenletResult(self.type)
result.markStarted()
try:
with gevent.Timeout(self.timeout, False):
result.success = self._doit()
except gevent.GreenletExit:
return
except:
result.setError(show_traceback=True)
result.markFinished()
class ReaderGreenlet(SafeTransferGreenlet):
type = 'reader'
def _doit(self):
if context.needs_first_read:
key = context.needs_first_read.popleft()
elif context.all_keys:
key = random.choice(context.all_keys)
else:
time.sleep(1)
return self.result.setError('No available keys to test with reader. Try again later.')
# Copynew the key object
key = key.bucket.new_key(key.name)
self.result.setKey(key)
fp = FileVerifier()
key.get_contents_to_file(fp)
self.result.request_finish = time.time()
self.result.request_start = fp.created_at
self.result.chunks = fp.chunks
self.result.size = fp.size
if not fp.valid():
return self.result.setError('Failed to validate key {name!s}'.format(name=key.name))
return True
class WriterGreenlet(SafeTransferGreenlet):
type = 'writer'
def _doit(self):
key = get_next_key(context.bucket)
self.result.setKey(key)
fp = next(context.files_iter)
self.result.size = fp.size
key.set_contents_from_file(fp)
self.result.request_finish = time.time()
self.result.request_start = fp.start_time
self.result.chunks = fp.last_chunks
# And at the end, add to needs_first_read and shuffle
context.needs_first_read.append(key)
context.all_keys.append(key)
return True

View file

@ -1,110 +0,0 @@
import bunch
import collections
import gevent
import time
import traceback
import yaml
from ..common import context
context.update(bunch.Bunch(
result_queue=collections.deque(),
))
class TransferGreenletResult(object):
""" Generic container object. Weeeeeeeeeeeeeee *short* """
def __init__(self, type):
# About the Greenlet
self.type = type
# About the key
self.bucket = None
self.key = None
self.size = None
# About the job
self.success = False
self.error = None
self.start_time = None
self.finish_time = None
self.duration = None
self.latency = None
self.request_start = None
self.request_finish = None
self.chunks = None
def markStarted(self):
self.start_time = time.time()
def markFinished(self):
self.finish_time = time.time()
self.duration = self.finish_time - self.start_time
context.result_queue.append(self)
def setKey(self, key):
self.key = key.name
self.bucket = key.bucket.name
def setError(self, message='Unhandled Exception', show_traceback=False):
""" Sets an error state in the result, and returns False... example usage:
return self.result.setError('Something happened', traceback=True)
"""
self.error = dict()
self.error['msg'] = message
if show_traceback:
self.error['traceback'] = traceback.format_exc()
return False
@classmethod
def repr_yaml(c, dumper, self):
data = dict()
for x in ('type', 'bucket', 'key', 'chunks'):
data[x] = self.__dict__[x]
# reader => r, writer => w
data['type'] = data['type'][0]#chunks
# the error key must be present ONLY on failure.
assert not (self.success and self.error)
if self.success:
assert self.error == None
else:
assert self.error != None
data['error'] = self.error
data['start'] = self.request_start
if self.request_finish:
data['duration'] = 1000000000 * (self.request_finish - self.request_start)
return dumper.represent_dict(data)
# And a representer for dumping a TransferGreenletResult as a YAML dict()
yaml.add_representer(TransferGreenletResult, TransferGreenletResult.repr_yaml)
class ResultsLogger(gevent.Greenlet):
""" A quick little greenlet to always run and dump results. """
def __init__(self):
gevent.Greenlet.__init__(self)
self.outfile = context.real_stdout
def _run(self):
while True:
try:
self._doit()
except:
print "An exception was encountered while dumping the results... this shouldn't happen!"
traceback.print_exc()
time.sleep(0.1)
def _doit(self):
while context.result_queue:
result = context.result_queue.popleft()
yrep = yaml.dump(result)
self.outfile.write(yrep + "---\n")

View file

@ -1,4 +1,3 @@
import bunch
import hashlib import hashlib
import random import random
import string import string
@ -157,50 +156,3 @@ def names(mean, stddev, charset=None, seed=None):
break break
name = ''.join(rand.choice(charset) for _ in xrange(length)) name = ''.join(rand.choice(charset) for _ in xrange(length))
yield name yield name
def files_varied(groups, unlimited=False):
""" Yields a weighted-random selection of file-like objects. """
# Quick data type sanity.
assert groups and isinstance(groups, (list, tuple))
total_num = 0
file_sets = []
rand = random.Random(time.time())
# Build the sets for our yield
for num, size, stddev in groups:
assert num and size #TODO
file_sets.append(bunch.Bunch(
num = num,
size = size,
stddev = stddev,
files = files(size, stddev, time.time()),
))
total_num += num
while True:
if not total_num:
raise StopIteration
num = rand.randrange(total_num)
ok = 0
for file_set in file_sets:
if num > file_set.num:
num -= file_set.num
continue
if not unlimited:
total_num -= 1
file_set.num -= 1
# None left in this set!
if file_set.num == 0:
file_sets.remove(file_set)
ok = 1
yield next(file_set.files)
if not ok:
raise RuntimeError("Couldn't find a match.")