pep8 cleanup.

This commit is contained in:
Tommi Virtanen 2011-07-19 09:09:51 -07:00
parent e0575e4efe
commit fe749adebf
6 changed files with 21 additions and 23 deletions

View file

@ -13,11 +13,11 @@ prefix = ''
# For those scripts that use a context, these are pretty univerally needed.
context = bunch.Bunch(
bucket = None,
bucket=None,
# Save stdout/stderr in case they get fudged with.
real_stdout = sys.stdout,
real_stderr = sys.stderr,
real_stdout=sys.stdout,
real_stderr=sys.stderr,
)
bucket_counter = itertools.count(1)
@ -181,4 +181,3 @@ def fill_pools(*args):
def get_next_key(bucket=None):
return bucket.new_key("seqkey-{num}".format(num=next(key_counter)))

View file

@ -11,9 +11,9 @@ from ..realistic import FileVerifier
# Make sure context has somewhere to store what we need
context.update(bunch.Bunch(
needs_first_read = collections.deque(),
all_keys = [],
files_iter = None,
needs_first_read=collections.deque(),
all_keys=[],
files_iter=None,
))

View file

@ -8,7 +8,7 @@ import yaml
from ..common import context
context.update(bunch.Bunch(
result_queue = collections.deque(),
result_queue=collections.deque(),
))
@ -108,4 +108,3 @@ class ResultsLogger(gevent.Greenlet):
result = context.result_queue.popleft()
yrep = yaml.dump(result)
self.outfile.write(yrep + "---\n")

View file

@ -44,7 +44,7 @@ class Result:
size=self.size,
time=self.time,
mbps=mbps,
details=self.details
details=self.details,
)
def reader(seconds, bucket, name=None, queue=None):
@ -144,11 +144,11 @@ def main():
file_size=options.file_size,
file_stddev=options.stddev,
quantity=options.quantity,
file_name_seed=r
file_name_seed=r,
) for x in xrange(options.num_writers)]
greenlets += [gevent.spawn(reader, options.duration, bucket,
name=x,
queue=q
queue=q,
) for x in xrange(options.num_readers)]
gevent.spawn_later(options.duration, lambda: q.put(StopIteration))
@ -177,19 +177,19 @@ def main():
print "--- Stats ---"
print "Total Read: {read} MB ({mbps} MB/s)".format(
read=(total_read/1024.0),
mbps=(total_read/1024.0/options.duration)
mbps=(total_read/1024.0/options.duration),
)
print "Total Write: {write} MB ({mbps} MB/s)".format(
write=(total_write/1024.0),
mbps=(total_write/1024.0/options.duration)
mbps=(total_write/1024.0/options.duration),
)
print "Read filures: {num} ({percent}%)".format(
num=read_failure,
percent=(100.0*read_failure/max(read_failure+read_success, 1))
percent=(100.0*read_failure/max(read_failure+read_success, 1)),
)
print "Write failures: {num} ({percent}%)".format(
num=write_failure,
percent=(100.0*write_failure/max(write_failure+write_success, 1))
percent=(100.0*write_failure/max(write_failure+write_success, 1)),
)
gevent.joinall(greenlets, timeout=1)

View file

@ -165,13 +165,13 @@ def files_varied(groups, unlimited=False):
# Build the sets for our yield
for num, size, stddev in groups:
assert num and size
assert num and size #TODO
file_sets.append(bunch.Bunch(
num = num,
size = size,
stddev = stddev,
files = files(size, stddev, time.time())
files = files(size, stddev, time.time()),
))
total_num += num
@ -199,4 +199,4 @@ def files_varied(groups, unlimited=False):
yield next(file_set.files)
if not ok:
raise RuntimeError, "Couldn't find a match."
raise RuntimeError("Couldn't find a match.")

View file

@ -17,16 +17,16 @@ from common.results import ResultsLogger
# Set up the common context to use our information. Wee.
context.update(bunch.Bunch(
# Set to False when it's time to exit main loop.
running = True,
running=True,
# The pools our tasks run in.
greenlet_pools = bunch.Bunch(
greenlet_pools=bunch.Bunch(
writer=None,
reader=None,
),
# The greenlet that keeps logs going.
results_logger = None,
results_logger=None,
))