import logging from 'k6/x/frostfs/logging'; import registry from 'k6/x/frostfs/registry'; import s3local from 'k6/x/frostfs/s3local'; import { SharedArray } from 'k6/data'; import { textSummary } from './libs/k6-summary-0.0.2.js'; import { parseEnv } from './libs/env-parser.js'; import { uuidv4 } from './libs/k6-utils-1.4.0.js'; import exec from 'k6/execution'; import { newGenerator } from './libs/datagen.js'; parseEnv(); const obj_list = new SharedArray('obj_list', function () { return JSON.parse(open(__ENV.PREGEN_JSON)).objects; }); const container_list = new SharedArray('container_list', function () { return JSON.parse(open(__ENV.PREGEN_JSON)).containers; }); const bucket_list = new SharedArray('bucket_list', function () { return JSON.parse(open(__ENV.PREGEN_JSON)).buckets; }); function bucket_mapping() { if (container_list.length != bucket_list.length) { throw 'The number of containers and buckets in the preset file must be the same.'; } let mapping = {}; for (let i = 0; i < container_list.length; ++i) { mapping[bucket_list[i]] = container_list[i]; } return mapping; } const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size; const summary_json = __ENV.SUMMARY_JSON || "/tmp/summary.json"; const config_file = __ENV.CONFIG_FILE; const config_dir = __ENV.CONFIG_DIR; const max_total_size_gb = __ENV.MAX_TOTAL_SIZE_GB ? parseInt(__ENV.MAX_TOTAL_SIZE_GB) : 0; const s3_client = s3local.connect(config_file, config_dir, { 'debug_logger': __ENV.DEBUG_LOGGER || 'false', }, bucket_mapping(), max_total_size_gb); const log = logging.new().withFields({"config_file": config_file,"config_dir": config_dir}); const registry_enabled = !!__ENV.REGISTRY_FILE; const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined; let obj_to_read_selector = undefined; if (registry_enabled) { obj_to_read_selector = registry.getSelector( __ENV.REGISTRY_FILE, "obj_to_read", __ENV.SELECTION_SIZE ? parseInt(__ENV.SELECTION_SIZE) : 0, { status: "created", } ) } const duration = __ENV.DURATION; const scenarios = {}; const write_vu_count = parseInt(__ENV.WRITERS || '0'); const generator = newGenerator(write_vu_count > 0); if (write_vu_count > 0) { scenarios.write = { executor: 'constant-vus', vus: write_vu_count, duration: `${duration}s`, exec: 'obj_write', gracefulStop: '5s', }; } const read_vu_count = parseInt(__ENV.READERS || '0'); if (read_vu_count > 0) { scenarios.read = { executor: 'constant-vus', vus: read_vu_count, duration: `${duration}s`, exec: 'obj_read', gracefulStop: '5s', }; } export const options = { scenarios, setupTimeout: '5s', }; export function setup() { const total_vu_count = write_vu_count + read_vu_count; console.log(`Pregenerated buckets: ${bucket_list.length}`); console.log(`Pregenerated read object size: ${read_size}`); console.log(`Pregenerated total objects: ${obj_list.length}`); console.log(`Reading VUs: ${read_vu_count}`); console.log(`Writing VUs: ${write_vu_count}`); console.log(`Total VUs: ${total_vu_count}`); const start_timestamp = Date.now() console.log(`Load started at: ${Date(start_timestamp).toString()}`) } export function teardown(data) { if (obj_registry) { obj_registry.close(); } const end_timestamp = Date.now() console.log(`Load finished at: ${Date(end_timestamp).toString()}`) } export function handleSummary(data) { return { 'stdout': textSummary(data, { indent: ' ', enableColors: false }), [summary_json]: JSON.stringify(data), }; } export function obj_write() { const key = __ENV.OBJ_NAME || uuidv4(); const bucket = bucket_list[Math.floor(Math.random() * bucket_list.length)]; const { payload, hash } = generator.genPayload(registry_enabled); const resp = s3_client.put(bucket, key, payload); if (!resp.success) { if (resp.abort) { exec.test.abort(resp.error); } log.withFields({bucket: bucket, key: key}).error(resp.error); return; } if (obj_registry) { obj_registry.addObject("", "", bucket, key, hash); } } export function obj_read() { if(obj_to_read_selector) { const obj = obj_to_read_selector.nextObject(); if (!obj) { return; } const resp = s3_client.get(obj.s3_bucket, obj.s3_key) if (!resp.success) { log.withFields({bucket: obj.s3_bucket, key: obj.s3_key}).error(resp.error); } return } const obj = obj_list[Math.floor(Math.random() * obj_list.length)]; const resp = s3_client.get(obj.bucket, obj.object); if (!resp.success) { log.withFields({bucket: obj.bucket, key: obj.object}).error(resp.error); } }