Compare commits

...

5 commits

Author SHA1 Message Date
Kyle Marsh
c234981a7c moving code around after Tv reorganized master
verify_client.py and its sample blueprint file moved into s3tests
2011-07-13 16:40:39 -07:00
Kyle Marsh
1865ec50d1 dho-qa: add blueprint to client verification
Client verification script should be able to populate buckets automatically
to get a good baseline for comparing similar client-created buckets against.

A yaml blueprint provides a reasonable structure to use to populate objects
and should also be easy enough for a human to follow to create test buckets
from a client.
2011-07-13 15:59:28 -07:00
Kyle Marsh
83dc04c0f2 dho-qa: remove unused libraries from verify_client
Removes several superfluous imports from verify_client.py
2011-07-13 15:59:28 -07:00
Kyle Marsh
d9f3324b5e dho-qa: Verify multiple buckets
Updates client verification tool to accept multiple bucket names on the
command line and dump information for each in case we need to test different
buckets.
2011-07-13 15:59:28 -07:00
Kyle Marsh
ebef1e5a0c dho-qa: Begin writing Client Verification tool
After testing that a client works with an S3-like object store, use this
tool to print out information about the bucket and objects uploaded to
verify that the client worked correctly.
2011-07-13 15:59:28 -07:00
2 changed files with 159 additions and 0 deletions

View file

@ -0,0 +1,26 @@
- name: foo
perms:
null: READ
kylmar4: FULL_CONTROL
objects:
- name: bar
content: asdf
metadata:
a: b
c: d
perms:
kylmar4: FULL_CONTROL
- name: baz
perms:
null: WRITE
kylmar4: FULL_CONTROL
objects:
- content: aoeu
metadata:
x: y
z: w
name: oof
perms:
null: FULL_CONTROL
kylmar4: FULL_CONTROL

133
s3tests/verify_client.py Executable file
View file

@ -0,0 +1,133 @@
#! /usr/bin/python
from boto.s3.key import Key
from optparse import OptionParser
import traceback
import common
import bunch
import yaml
import sys
def parse_opts():
parser = OptionParser();
parser.add_option('-O' , '--outfile', help='write output to FILE. Defaults to STDOUT', metavar='FILE')
parser.add_option('-b' , '--blueprint', help='populate buckets according to blueprint file BLUEPRINT. Used to get baseline results to compare client results against.', metavar='BLUEPRINT')
return parser.parse_args()
def get_bucket_properties(bucket):
"""Get and return the following properties from bucket:
Name
All Grants
"""
grants = [(grant.display_name, grant.permission) for grant in bucket.list_grants()]
return (bucket.name, grants)
def get_key_properties(key):
"""Get and return the following properties from key:
Name
Size
All Grants
All Metadata
"""
grants = [(grant.display_name, grant.permission) for grant in key.get_acl().acl.grants]
return (key.name, key.size, grants, key.metadata)
def read_blueprint(infile):
"""Takes a filename as input and returns a "bunch" describing buckets
and objects to upload to an S3-like object store. This can be used
to confirm that buckets created by another client match those created
by boto.
"""
try:
INFILE = open(infile, 'r')
blueprint = bunch.bunchify(yaml.safe_load(INFILE))
except Exception as e:
print >> sys.stderr, "There was an error reading the blueprint file, %s:" %infile
print >> sys.stderr, traceback.print_exc()
return blueprint
def populate_from_blueprint(conn, blueprint, prefix=''):
"""Take a connection and a blueprint. Create buckets and upload objects
according to the blueprint. Prefix will be added to each bucket name.
"""
buckets = []
for bucket in blueprint:
b = conn.create_bucket(prefix + bucket.name)
for user in bucket.perms:
b.add_user_grant(bucket.perms[user], user)
for key in bucket.objects:
k = Key(b)
k.key = key.name
k.metadata = bunch.unbunchify(key.metadata)
k.set_contents_from_string(key.content)
for user in key.perms:
k.add_user_grant(key.perms[user], user)
buckets.append(b)
return buckets
def main():
"""Client results validation tool make sure you've bootstrapped your
test environment and set up your config.yml file, then run the
following:
S3TEST_CONF=config.yml virtualenv/bin/python verify_client.py -O output.txt test-bucket-name
S3 authentication information for the bucket's owner must be in
config.yml to create the connection.
"""
(options, args) = parse_opts()
#SETUP
conn = common.s3.main
if options.outfile:
OUTFILE = open(options.outfile, 'w')
else:
OUTFILE = sys.stdout
blueprint = None
if options.blueprint:
blueprint = read_blueprint(options.blueprint)
if blueprint:
populate_from_blueprint(conn, blueprint, common.prefix)
for bucket_name in args:
try:
bucket = conn.get_bucket(bucket_name)
except S3ResponseError as e:
print >> sys.stderr, "S3 claims %s isn't a valid bucket...maybe the user you specified in config.yml doesn't have access to it?" %bucket_name
common.teardown()
return
(name, grants) = get_bucket_properties(bucket)
print >> OUTFILE, "Bucket Name: %s" % name
for grant in grants:
print >> OUTFILE, "\tgrant %s %s" %(grant)
for key in bucket.list():
full_key = bucket.get_key(key.name)
(name, size, grants, metadata) = get_key_properties(full_key)
print >> OUTFILE, name
print >> OUTFILE, "\tsize: %s" %size
for grant in grants:
print >> OUTFILE, "\tgrant %s %s" %(grant)
for metadata_key in metadata:
print >> OUTFILE, "\tmetadata %s: %s" %(metadata_key, metadata[metadata_key])
if __name__ == '__main__':
common.setup()
try:
main()
except Exception as e:
traceback.print_exc()
common.teardown()