[#38] Move common function to helpers for preset

Signed-off-by: a.chetaev <alex.chetaev@gmail.com>
pull/1/head
a.chetaev 2022-11-17 20:08:20 +03:00 committed by Aleksey Chetaev
parent 97fbe9af82
commit ed25e553b2
6 changed files with 163 additions and 168 deletions

View File

@ -2,13 +2,12 @@
import argparse
import json
import shlex
from subprocess import check_output, CalledProcessError, STDOUT
from helpers.neofs_cli import get_object
parser = argparse.ArgumentParser()
parser.add_argument('--endpoint', help='Node address')
parser.add_argument('--preset_json', help='JSON file path with preset')
parser.add_argument('--print_success', help='Print objects that was successfully read', default=False)
args = parser.parse_args()
@ -26,36 +25,16 @@ def main():
oid = obj.get('object')
cid = obj.get('container')
cmd_line = f"neofs-cli object get -r {args.endpoint} -g" \
f" --cid {cid} --oid {oid} --file /dev/null"
rst = get_object(cid, oid, args.endpoint, "/dev/null")
output, success = execute_cmd(cmd_line)
if success:
if rst:
success_objs += 1
if args.print_success:
print(f'Object: {oid} from {cid}: {"Ok" if success else "False"}')
else:
failed_objs += 1
print(f'Object: {oid} from {cid}: {"Ok" if success else "False"}')
print(f'Success objects: {success_objs}')
print(f'Failed objects: {failed_objs}')
def execute_cmd(cmd_line):
cmd_args = shlex.split(cmd_line)
try:
output = check_output(cmd_args, stderr=STDOUT).decode()
success = True
except CalledProcessError as e:
output = e.output.decode()
success = False
return output, success
if __name__ == "__main__":
main()
main()

View File

@ -0,0 +1,50 @@
import uuid
from helpers.cmd import execute_cmd
def create_bucket(endpoint, versioning, location):
bucket_create_marker = False
if location:
location = f"--create-bucket-configuration 'LocationConstraint={location}'"
bucket_name = str(uuid.uuid4())
cmd_line = f"aws --no-verify-ssl s3api create-bucket --bucket {bucket_name} " \
f"--endpoint http://{endpoint} {location}"
cmd_line_ver = f"aws --no-verify-ssl s3api put-bucket-versioning --bucket {bucket_name} " \
f"--versioning-configuration Status=Enabled --endpoint http://{endpoint} "
out, success = execute_cmd(cmd_line)
if not success:
if "succeeded and you already own it" in out:
bucket_create_marker = True
else:
print(f" > Bucket {bucket_name} has not been created.")
else:
bucket_create_marker = True
print(f"cmd: {cmd_line}")
if bucket_create_marker and versioning == "True":
out, success = execute_cmd(cmd_line_ver)
if not success:
print(f" > Bucket versioning has not been applied for bucket {bucket_name}.")
else:
print(f" > Bucket versioning has been applied.")
return bucket_name
def upload_object(bucket, payload_filepath, endpoint):
object_name = str(uuid.uuid4())
cmd_line = f"aws s3api put-object --bucket {bucket} --key {object_name} " \
f"--body {payload_filepath} --endpoint http://{endpoint}"
out, success = execute_cmd(cmd_line)
if not success:
print(f" > Object {object_name} has not been uploaded.")
return False
else:
return object_name

View File

@ -0,0 +1,23 @@
import os
import shlex
from subprocess import check_output, CalledProcessError, STDOUT
def execute_cmd(cmd_line):
cmd_args = shlex.split(cmd_line)
try:
output = check_output(cmd_args, stderr=STDOUT).decode()
success = True
except CalledProcessError as e:
output = e.output.decode()
success = False
return output, success
def random_payload(payload_filepath, size):
with open('%s' % payload_filepath, 'w+b') as fout:
fout.write(os.urandom(1024 * int(size)))

View File

@ -0,0 +1,61 @@
from helpers.cmd import execute_cmd
def create_container(endpoint, policy):
cmd_line = f"neofs-cli --rpc-endpoint {endpoint} container create -g" \
f" --policy '{policy}' --basic-acl public-read-write --await"
output, success = execute_cmd(cmd_line)
if not success:
print(f" > Container has not been created:\n{output}")
return False
else:
try:
fst_str = output.split('\n')[0]
except Exception:
print(f"Got empty output: {output}")
return False
splitted = fst_str.split(": ")
if len(splitted) != 2:
raise ValueError(f"no CID was parsed from command output: \t{fst_str}")
print(f"Created container: {splitted[1]}")
return splitted[1]
def upload_object(container, payload_filepath, endpoint):
object_name = ""
cmd_line = f"neofs-cli --rpc-endpoint {endpoint} object put -g --file {payload_filepath} " \
f"--cid {container} --no-progress"
out, success = execute_cmd(cmd_line)
if not success:
print(f" > Object {object_name} has not been uploaded:\n{out}")
return False
else:
try:
# taking second string from command output
snd_str = out.split('\n')[1]
except Exception:
print(f"Got empty input: {out}")
return False
splitted = snd_str.split(": ")
if len(splitted) != 2:
raise Exception(f"no OID was parsed from command output: \t{snd_str}")
return splitted[1]
def get_object(cid, oid, endpoint, out_filepath):
cmd_line = f"neofs-cli object get -r {endpoint} -g --cid {cid} --oid {oid} " \
f"--file {out_filepath}"
out, success = execute_cmd(cmd_line)
if not success:
print(f" > Failed to get object {oid} from container {cid} \r\n"
f" > Error: {out}")
return False
return True

View File

@ -2,10 +2,12 @@
import argparse
import json
import os
import shlex
from argparse import Namespace
from concurrent.futures import ProcessPoolExecutor
from subprocess import check_output, CalledProcessError, STDOUT
from helpers.cmd import random_payload
from helpers.neofs_cli import create_container, upload_object
parser = argparse.ArgumentParser()
parser.add_argument('--size', help='Upload objects size in kb')
@ -15,12 +17,12 @@ parser.add_argument('--preload_obj', help='Number of pre-loaded objects')
parser.add_argument(
"--policy",
help="Container placement policy",
default="REP 1 IN X CBF 1 SELECT 1 FROM * AS X"
default="REP 2 IN X CBF 2 SELECT 2 FROM * AS X"
)
parser.add_argument('--endpoint', help='Node address')
parser.add_argument('--update', help='Save existed containers')
args = parser.parse_args()
args: Namespace = parser.parse_args()
print(args)
@ -37,10 +39,11 @@ def main():
else:
print(f"Create containers: {args.containers}")
with ProcessPoolExecutor(max_workers=10) as executor:
containers_runs = {executor.submit(create_container): _ for _ in range(int(args.containers))}
containers_runs = {executor.submit(create_container, args.endpoint, args.policy): _ for _ in
range(int(args.containers))}
for run in containers_runs:
if run.result() is not None:
if run.result():
container_list.append(run.result())
print("Create containers: Completed")
@ -50,17 +53,17 @@ def main():
return
print(f"Upload objects to each container: {args.preload_obj} ")
random_payload(payload_filepath)
random_payload(payload_filepath, args.size)
print(" > Create random payload: Completed")
for container in container_list:
print(f" > Upload objects for container {container}")
with ProcessPoolExecutor(max_workers=50) as executor:
objects_runs = {executor.submit(upload_object, container, payload_filepath): _ for _ in
objects_runs = {executor.submit(upload_object, container, payload_filepath, args.endpoint): _ for _ in
range(int(args.preload_obj))}
for run in objects_runs:
if run.result() is not None:
if run.result():
objects_struct.append({'container': container, 'object': run.result()})
print(f" > Upload objects for container {container}: Completed")
@ -76,65 +79,5 @@ def main():
print(f" > Total Objects has been created: {len(objects_struct)}.")
def random_payload(payload_filepath):
with open('%s' % payload_filepath, 'w+b') as fout:
fout.write(os.urandom(1024 * int(args.size)))
def execute_cmd(cmd_line):
args = shlex.split(cmd_line)
output = ""
try:
output = check_output(args, stderr=STDOUT).decode()
success = True
except CalledProcessError as e:
output = e.output.decode()
success = False
return output, success
def create_container():
cmd_line = f"neofs-cli --rpc-endpoint {args.endpoint} container create -g --policy '{args.policy}' --basic-acl public-read-write --await"
output, success = execute_cmd(cmd_line)
if not success:
print(f" > Container has not been created:\n{output}")
else:
try:
fst_str = output.split('\n')[0]
except Exception:
print(f"Got empty output: {output}")
return
splitted = fst_str.split(": ")
if len(splitted) != 2:
raise ValueError(f"no CID was parsed from command output: \t{fst_str}")
print(f"Created container: {splitted[1]}")
return splitted[1]
def upload_object(container, payload_filepath):
object_name = ""
cmd_line = f"neofs-cli --rpc-endpoint {args.endpoint} object put -g --file {payload_filepath} --cid {container} --no-progress"
out, success = execute_cmd(cmd_line)
if not success:
print(f" > Object {object_name} has not been uploaded:\n{out}")
else:
try:
# taking second string from command output
snd_str = out.split('\n')[1]
except:
print(f"Got empty input: {out}")
return
splitted = snd_str.split(": ")
if len(splitted) != 2:
raise Exception(f"no OID was parsed from command output: \t{snd_str}")
return splitted[1]
if __name__ == "__main__":
main()

View File

@ -2,11 +2,10 @@
import argparse
import json
import os
import shlex
import uuid
from concurrent.futures import ProcessPoolExecutor
from subprocess import check_output, CalledProcessError, STDOUT
from helpers.cmd import random_payload
from helpers.aws_cli import create_bucket, upload_object
parser = argparse.ArgumentParser()
@ -15,8 +14,9 @@ parser.add_argument('--buckets', help='Number of buckets to create.')
parser.add_argument('--out', help='JSON file with output.')
parser.add_argument('--preload_obj', help='Number of pre-loaded objects.')
parser.add_argument('--endpoint', help='S3 Gateway address.')
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). New buckets will not be created.')
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.')
parser.add_argument('--update', help='True/False, False by default. Save existed buckets from target file (--out). '
'New buckets will not be created.')
parser.add_argument('--location', help='AWS location. Will be empty, if has not be declared.', default="")
parser.add_argument('--versioning', help='True/False, False by default.')
args = parser.parse_args()
@ -38,7 +38,8 @@ def main():
print(f"Create buckets: {args.buckets}")
with ProcessPoolExecutor(max_workers=10) as executor:
buckets_runs = {executor.submit(create_bucket): _ for _ in range(int(args.buckets))}
buckets_runs = {executor.submit(create_bucket, args.endpoint, args.versioning,
args.location): _ for _ in range(int(args.buckets))}
for run in buckets_runs:
if run.result() is not None:
@ -49,13 +50,14 @@ def main():
print(f" > Buckets: {bucket_list}")
print(f"Upload objects to each bucket: {args.preload_obj} ")
random_payload(payload_filepath)
random_payload(payload_filepath, args.size)
print(" > Create random payload: Completed")
for bucket in bucket_list:
print(f" > Upload objects for bucket {bucket}")
with ProcessPoolExecutor(max_workers=50) as executor:
objects_runs = {executor.submit(upload_object, bucket, payload_filepath): _ for _ in range(int(args.preload_obj))}
objects_runs = {executor.submit(upload_object, bucket, payload_filepath,
args.endpoint): _ for _ in range(int(args.preload_obj))}
for run in objects_runs:
if run.result() is not None:
@ -74,68 +76,5 @@ def main():
print(f" > Total Objects has been created: {len(objects_struct)}.")
def random_payload(payload_filepath):
with open('%s' % payload_filepath, 'w+b') as fout:
fout.write(os.urandom(1024 * int(args.size)))
def execute_cmd(cmd_line):
args = shlex.split(cmd_line)
output = ""
try:
output = check_output(args, stderr=STDOUT).decode()
success = True
except CalledProcessError as e:
output = e.output.decode()
success = False
return output, success
def create_bucket():
bucket_create_marker = False
location = ""
if args.location:
location = f"--create-bucket-configuration 'LocationConstraint={args.location}'"
bucket_name = str(uuid.uuid4())
cmd_line = f"aws --no-verify-ssl s3api create-bucket --bucket {bucket_name} --endpoint http://{args.endpoint} {location}"
cmd_line_ver = f"aws --no-verify-ssl s3api put-bucket-versioning --bucket {bucket_name} --versioning-configuration Status=Enabled --endpoint http://{args.endpoint} "
out, success = execute_cmd(cmd_line)
if not success:
if "succeeded and you already own it" in out:
bucket_create_marker = True
else:
print(f" > Bucket {bucket_name} has not been created.")
else:
bucket_create_marker = True
print(f"cmd: {cmd_line}")
if bucket_create_marker and args.versioning == "True":
out, success = execute_cmd(cmd_line_ver)
if not success:
print(f" > Bucket versioning has not been applied for bucket {bucket_name}.")
else:
print(f" > Bucket versioning has been applied.")
return bucket_name
def upload_object(bucket, payload_filepath):
object_name = str(uuid.uuid4())
cmd_line = f"aws s3api put-object --bucket {bucket} --key {object_name} --body {payload_filepath} --endpoint http://{args.endpoint}"
out, success = execute_cmd(cmd_line)
if not success:
print(f" > Object {object_name} has not been uploaded.")
else:
return object_name
if __name__ == "__main__":
main()