Compare commits
2 commits
master
...
lorem-ipsu
Author | SHA1 | Date | |
---|---|---|---|
025facee96 | |||
c56bbf04d5 |
9 changed files with 58 additions and 1570 deletions
1
go.mod
1
go.mod
|
@ -11,6 +11,7 @@ require (
|
||||||
github.com/aws/aws-sdk-go-v2/config v1.15.5
|
github.com/aws/aws-sdk-go-v2/config v1.15.5
|
||||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.26.9
|
github.com/aws/aws-sdk-go-v2/service/s3 v1.26.9
|
||||||
github.com/dop251/goja v0.0.0-20230427124612-428fc442ff5f
|
github.com/dop251/goja v0.0.0-20230427124612-428fc442ff5f
|
||||||
|
github.com/go-loremipsum/loremipsum v1.1.3
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/joho/godotenv v1.5.1
|
github.com/joho/godotenv v1.5.1
|
||||||
github.com/nspcc-dev/neo-go v0.101.1
|
github.com/nspcc-dev/neo-go v0.101.1
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package datagen
|
package datagen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -36,7 +38,7 @@ func (d *Datagen) Exports() modules.Exports {
|
||||||
return modules.Exports{Default: d}
|
return modules.Exports{Default: d}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Datagen) Generator(size int) *Generator {
|
func (d *Datagen) Generator(size int, typ string) *Generator {
|
||||||
g := NewGenerator(d.vu, size)
|
g := NewGenerator(d.vu, size, strings.ToLower(typ))
|
||||||
return &g
|
return &g
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
package datagen
|
package datagen
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/dop251/goja"
|
"github.com/dop251/goja"
|
||||||
|
"github.com/go-loremipsum/loremipsum"
|
||||||
"go.k6.io/k6/js/modules"
|
"go.k6.io/k6/js/modules"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -24,6 +25,7 @@ type (
|
||||||
size int
|
size int
|
||||||
rand *rand.Rand
|
rand *rand.Rand
|
||||||
buf []byte
|
buf []byte
|
||||||
|
typ string
|
||||||
offset int
|
offset int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,20 +38,27 @@ type (
|
||||||
// TailSize specifies number of extra random bytes in the buffer tail.
|
// TailSize specifies number of extra random bytes in the buffer tail.
|
||||||
const TailSize = 1024
|
const TailSize = 1024
|
||||||
|
|
||||||
func NewGenerator(vu modules.VU, size int) Generator {
|
var payloadTypes = []string{
|
||||||
|
"text",
|
||||||
|
"random",
|
||||||
|
"",
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGenerator(vu modules.VU, size int, typ string) Generator {
|
||||||
if size <= 0 {
|
if size <= 0 {
|
||||||
panic("size should be positive")
|
panic("size should be positive")
|
||||||
}
|
}
|
||||||
|
|
||||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
var found bool
|
||||||
buf := make([]byte, size+TailSize)
|
for i := range payloadTypes {
|
||||||
r.Read(buf)
|
if payloadTypes[i] == typ {
|
||||||
return Generator{
|
found = true
|
||||||
vu: vu,
|
}
|
||||||
size: size,
|
|
||||||
rand: r,
|
|
||||||
buf: buf,
|
|
||||||
}
|
}
|
||||||
|
if !found {
|
||||||
|
vu.InitEnv().Logger.Info("Unknown payload type '%s', random will be used.", typ)
|
||||||
|
}
|
||||||
|
return Generator{vu: vu, size: size, buf: nil, typ: typ, offset: 0}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *Generator) GenPayload(calcHash bool) GenPayloadResponse {
|
func (g *Generator) GenPayload(calcHash bool) GenPayloadResponse {
|
||||||
|
@ -66,9 +75,24 @@ func (g *Generator) GenPayload(calcHash bool) GenPayloadResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *Generator) nextSlice() []byte {
|
func (g *Generator) nextSlice() []byte {
|
||||||
if g.offset >= TailSize {
|
if g.buf == nil {
|
||||||
g.offset = 0
|
// Allocate buffer with extra tail for sliding and populate it with random bytes
|
||||||
g.rand.Read(g.buf) // Per docs, err is always nil here
|
g.buf = make([]byte, g.size+TailSize)
|
||||||
|
rand.Read(g.buf) // Per docs, err is always nil here
|
||||||
|
switch g.typ {
|
||||||
|
case "text":
|
||||||
|
li := loremipsum.New()
|
||||||
|
b := bytes.NewBuffer(nil)
|
||||||
|
for b.Len() < g.size+TailSize {
|
||||||
|
b.WriteString(li.Paragraph())
|
||||||
|
b.WriteRune('\n')
|
||||||
|
}
|
||||||
|
g.buf = b.Bytes()
|
||||||
|
default:
|
||||||
|
// Allocate buffer with extra tail for sliding and populate it with random bytes
|
||||||
|
g.buf = make([]byte, g.size+TailSize)
|
||||||
|
rand.Read(g.buf) // Per docs, err is always nil here
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result := g.buf[g.offset : g.offset+g.size]
|
result := g.buf[g.offset : g.offset+g.size]
|
||||||
|
@ -76,5 +100,10 @@ func (g *Generator) nextSlice() []byte {
|
||||||
// Shift the offset for the next call. If we've used our entire tail, then erase
|
// Shift the offset for the next call. If we've used our entire tail, then erase
|
||||||
// the buffer so that on the next call it is regenerated anew
|
// the buffer so that on the next call it is regenerated anew
|
||||||
g.offset += 1
|
g.offset += 1
|
||||||
|
if g.offset+g.size >= len(g.buf) {
|
||||||
|
g.buf = nil
|
||||||
|
g.offset = 0
|
||||||
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,25 +16,25 @@ func TestGenerator(t *testing.T) {
|
||||||
|
|
||||||
t.Run("fails on negative size", func(t *testing.T) {
|
t.Run("fails on negative size", func(t *testing.T) {
|
||||||
require.Panics(t, func() {
|
require.Panics(t, func() {
|
||||||
_ = NewGenerator(vu, -1)
|
_ = NewGenerator(vu, -1, "")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("fails on zero size", func(t *testing.T) {
|
t.Run("fails on zero size", func(t *testing.T) {
|
||||||
require.Panics(t, func() {
|
require.Panics(t, func() {
|
||||||
_ = NewGenerator(vu, 0)
|
_ = NewGenerator(vu, 0, "")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("creates slice of specified size", func(t *testing.T) {
|
t.Run("creates slice of specified size", func(t *testing.T) {
|
||||||
size := 10
|
size := 10
|
||||||
g := NewGenerator(vu, size)
|
g := NewGenerator(vu, size, "")
|
||||||
slice := g.nextSlice()
|
slice := g.nextSlice()
|
||||||
require.Len(t, slice, size)
|
require.Len(t, slice, size)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("creates a different slice on each call", func(t *testing.T) {
|
t.Run("creates a different slice on each call", func(t *testing.T) {
|
||||||
g := NewGenerator(vu, 1000)
|
g := NewGenerator(vu, 1000, "")
|
||||||
slice1 := g.nextSlice()
|
slice1 := g.nextSlice()
|
||||||
slice2 := g.nextSlice()
|
slice2 := g.nextSlice()
|
||||||
// Each slice should be unique (assuming that 1000 random bytes will never coincide
|
// Each slice should be unique (assuming that 1000 random bytes will never coincide
|
||||||
|
@ -43,7 +43,7 @@ func TestGenerator(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("keeps generating slices after consuming entire tail", func(t *testing.T) {
|
t.Run("keeps generating slices after consuming entire tail", func(t *testing.T) {
|
||||||
g := NewGenerator(vu, 1000)
|
g := NewGenerator(vu, 1000, "")
|
||||||
initialSlice := g.nextSlice()
|
initialSlice := g.nextSlice()
|
||||||
for i := 0; i < TailSize; i++ {
|
for i := 0; i < TailSize; i++ {
|
||||||
g.nextSlice()
|
g.nextSlice()
|
||||||
|
|
|
@ -47,7 +47,7 @@ if (registry_enabled && delete_age) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "");
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : und
|
||||||
|
|
||||||
const duration = __ENV.DURATION;
|
const duration = __ENV.DURATION;
|
||||||
|
|
||||||
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "");
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ Scenarios `grpc.js`, `local.js`, `http.js` and `s3.js` support the following opt
|
||||||
* `SLEEP_WRITE` - time interval (in seconds) between writing VU iterations.
|
* `SLEEP_WRITE` - time interval (in seconds) between writing VU iterations.
|
||||||
* `SLEEP_READ` - time interval (in seconds) between reading VU iterations.
|
* `SLEEP_READ` - time interval (in seconds) between reading VU iterations.
|
||||||
* `SELECTION_SIZE` - size of batch to select for deletion (default: 1000).
|
* `SELECTION_SIZE` - size of batch to select for deletion (default: 1000).
|
||||||
|
* `PAYLOAD_TYPE` - type of an object payload ("random" or "text", default: "random").
|
||||||
|
|
||||||
Additionally, the profiling extension can be enabled to generate CPU and memory profiles which can be inspected with `go tool pprof file.prof`:
|
Additionally, the profiling extension can be enabled to generate CPU and memory profiles which can be inspected with `go tool pprof file.prof`:
|
||||||
```shell
|
```shell
|
||||||
|
|
|
@ -46,7 +46,7 @@ if (registry_enabled && delete_age) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE));
|
const generator = datagen.generator(1024 * parseInt(__ENV.WRITE_OBJ_SIZE), __ENV.PAYLOAD_TYPE || "");
|
||||||
|
|
||||||
const scenarios = {};
|
const scenarios = {};
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue