s3-tests/s3tests/common/greenlets.py

93 lines
2.4 KiB
Python
Raw Normal View History

2011-07-15 19:06:02 +00:00
import bunch
import collections
import gevent
import random
import time
from ..common import context, get_next_key
from ..common.results import TransferGreenletResult
from ..realistic import FileVerifier
2011-07-18 20:13:08 +00:00
2011-07-15 19:06:02 +00:00
# Make sure context has somewhere to store what we need
context.update(bunch.Bunch(
2011-07-19 16:09:51 +00:00
needs_first_read=collections.deque(),
all_keys=[],
files_iter=None,
2011-07-15 19:06:02 +00:00
))
2011-07-18 20:13:08 +00:00
2011-07-15 19:06:02 +00:00
class SafeTransferGreenlet(gevent.Greenlet):
def __init__(self, timeout=120):
gevent.Greenlet.__init__(self)
self.timeout = timeout
self.result = None
def _run(self):
result = self.result = TransferGreenletResult(self.type)
result.markStarted()
2011-07-15 19:06:02 +00:00
try:
with gevent.Timeout(self.timeout, False):
result.success = self._doit()
except gevent.GreenletExit:
return
except:
result.setError(show_traceback=True)
2011-07-15 19:06:02 +00:00
result.markFinished()
2011-07-15 19:06:02 +00:00
class ReaderGreenlet(SafeTransferGreenlet):
type = 'reader'
def _doit(self):
if context.needs_first_read:
key = context.needs_first_read.popleft()
2011-07-15 19:06:02 +00:00
elif context.all_keys:
key = random.choice(context.all_keys)
else:
time.sleep(1)
return self.result.setError('No available keys to test with reader. Try again later.')
2011-07-15 19:06:02 +00:00
# Copynew the key object
key = key.bucket.new_key(key.name)
self.result.setKey(key)
2011-07-15 19:06:02 +00:00
fp = FileVerifier()
key.get_contents_to_file(fp)
self.result.request_finish = time.time()
self.result.request_start = fp.created_at
self.result.chunks = fp.chunks
2011-07-15 19:06:02 +00:00
self.result.size = fp.size
if not fp.valid():
return self.result.setError('Failed to validate key {name!s}'.format(name=key.name))
2011-07-15 19:06:02 +00:00
return True
class WriterGreenlet(SafeTransferGreenlet):
type = 'writer'
def _doit(self):
key = get_next_key(context.bucket)
self.result.setKey(key)
2011-07-15 19:06:02 +00:00
fp = next(context.files_iter)
self.result.size = fp.size
key.set_contents_from_file(fp)
self.result.request_finish = time.time()
self.result.request_start = fp.start_time
self.result.chunks = fp.last_chunks
2011-07-15 19:06:02 +00:00
# And at the end, add to needs_first_read and shuffle
context.needs_first_read.append(key)
2011-07-15 19:06:02 +00:00
context.all_keys.append(key)
return True