[#9] logging: Make logger more functional

Previous iteration was bad, because `Logger` instance was shared and
endpoints in different VUs were overriding each other.

New interface is much better, it supports logger extension in any
context with arbitrary fields.
```
const l = logging.new().withFields({endpoint: "my endpoint"});
...
l.withField("cid", container).info("not found");
```

Signed-off-by: Evgenii Stratonikov <e.stratonikov@yadro.com>
This commit is contained in:
Evgenii Stratonikov 2023-02-16 18:42:45 +03:00 committed by fyrchik
parent 4a2b01f5a4
commit 4ea872d6c3
8 changed files with 82 additions and 65 deletions

View file

@ -2,6 +2,7 @@ package xk6_frostfs
import ( import (
_ "github.com/TrueCloudLab/xk6-frostfs/internal/datagen" _ "github.com/TrueCloudLab/xk6-frostfs/internal/datagen"
_ "github.com/TrueCloudLab/xk6-frostfs/internal/logging"
_ "github.com/TrueCloudLab/xk6-frostfs/internal/native" _ "github.com/TrueCloudLab/xk6-frostfs/internal/native"
_ "github.com/TrueCloudLab/xk6-frostfs/internal/registry" _ "github.com/TrueCloudLab/xk6-frostfs/internal/registry"
_ "github.com/TrueCloudLab/xk6-frostfs/internal/s3" _ "github.com/TrueCloudLab/xk6-frostfs/internal/s3"

View file

@ -1,47 +0,0 @@
package logging
import (
"github.com/sirupsen/logrus"
"go.k6.io/k6/js/modules"
)
// InitTimestamp makes `console.log` print full timestamp instead of raw seconds.
func InitTimestamp(vu modules.VU) {
lg, ok := vu.InitEnv().Logger.(*logrus.Logger)
if !ok {
return
}
format := lg.Formatter
switch f := format.(type) {
case *logrus.TextFormatter:
f.ForceColors = true
f.FullTimestamp = true
f.TimestampFormat = "15:04:05"
case *logrus.JSONFormatter:
f.TimestampFormat = "15:04:05"
}
}
// LogWithField adds default field to a modules.VU logger.
func LogWithField(vu modules.VU, name string, value interface{}) {
lg, ok := vu.InitEnv().Logger.(*logrus.Logger)
if !ok {
return
}
lg.AddHook(defaultFieldHook{name: name, value: value})
}
type defaultFieldHook struct {
name string
value interface{}
}
func (defaultFieldHook) Levels() []logrus.Level {
return []logrus.Level{logrus.InfoLevel}
}
func (h defaultFieldHook) Fire(e *logrus.Entry) error {
e.Data[h.name] = h.value
return nil
}

View file

@ -0,0 +1,69 @@
package logging
import (
"github.com/dop251/goja"
"github.com/sirupsen/logrus"
"go.k6.io/k6/js/modules"
)
// Logging represents an instance of the module for every VU.
type Logging struct {
vu modules.VU
}
// RootModule is the global module object type. It is instantiated once per test
// run and will be used to create k6/x/frostfs/logging module instances for each VU.
type RootModule struct{}
// Ensure the interfaces are implemented correctly.
var (
_ modules.Instance = &Logging{}
_ modules.Module = &RootModule{}
)
func init() {
modules.Register("k6/x/frostfs/logging", new(RootModule))
}
// Exports implements the modules.Instance interface and returns the exports
// of the JS module.
func (n *Logging) Exports() modules.Exports {
return modules.Exports{Default: n}
}
type logger struct {
logrus.FieldLogger
}
func (n *Logging) New() logger {
return logger{FieldLogger: n.vu.InitEnv().Logger}
}
func (l logger) WithFields(fields *goja.Object) logger {
lg := l.FieldLogger
for _, k := range fields.Keys() {
lg = lg.WithField(k, fields.Get(k))
}
return logger{FieldLogger: lg}
}
// NewModuleInstance implements the modules.Module interface and returns
// a new instance for each VU.
func (r *RootModule) NewModuleInstance(vu modules.VU) modules.Instance {
lg, ok := vu.InitEnv().Logger.(*logrus.Logger)
if !ok {
return &Logging{vu: vu}
}
format := lg.Formatter
switch f := format.(type) {
case *logrus.TextFormatter:
f.ForceColors = true
f.FullTimestamp = true
f.TimestampFormat = "15:04:05"
case *logrus.JSONFormatter:
f.TimestampFormat = "15:04:05"
}
return &Logging{vu: vu}
}

View file

@ -8,7 +8,6 @@ import (
"github.com/TrueCloudLab/frostfs-sdk-go/client" "github.com/TrueCloudLab/frostfs-sdk-go/client"
frostfsecdsa "github.com/TrueCloudLab/frostfs-sdk-go/crypto/ecdsa" frostfsecdsa "github.com/TrueCloudLab/frostfs-sdk-go/crypto/ecdsa"
"github.com/TrueCloudLab/frostfs-sdk-go/session" "github.com/TrueCloudLab/frostfs-sdk-go/session"
"github.com/TrueCloudLab/xk6-frostfs/internal/logging"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/nspcc-dev/neo-go/pkg/crypto/keys" "github.com/nspcc-dev/neo-go/pkg/crypto/keys"
"go.k6.io/k6/js/modules" "go.k6.io/k6/js/modules"
@ -42,8 +41,6 @@ func init() {
// NewModuleInstance implements the modules.Module interface and returns // NewModuleInstance implements the modules.Module interface and returns
// a new instance for each VU. // a new instance for each VU.
func (r *RootModule) NewModuleInstance(vu modules.VU) modules.Instance { func (r *RootModule) NewModuleInstance(vu modules.VU) modules.Instance {
logging.InitTimestamp(vu)
mi := &Native{vu: vu} mi := &Native{vu: vu}
return mi return mi
} }
@ -55,8 +52,6 @@ func (n *Native) Exports() modules.Exports {
} }
func (n *Native) Connect(endpoint, hexPrivateKey string, dialTimeout, streamTimeout int) (*Client, error) { func (n *Native) Connect(endpoint, hexPrivateKey string, dialTimeout, streamTimeout int) (*Client, error) {
logging.LogWithField(n.vu, "endpoint", endpoint)
var ( var (
cli client.Client cli client.Client
pk *keys.PrivateKey pk *keys.PrivateKey

View file

@ -7,7 +7,6 @@ import (
"strconv" "strconv"
"time" "time"
"github.com/TrueCloudLab/xk6-frostfs/internal/logging"
"github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go-v2/service/s3"
@ -42,8 +41,6 @@ func init() {
// NewModuleInstance implements the modules.Module interface and returns // NewModuleInstance implements the modules.Module interface and returns
// a new instance for each VU. // a new instance for each VU.
func (r *RootModule) NewModuleInstance(vu modules.VU) modules.Instance { func (r *RootModule) NewModuleInstance(vu modules.VU) modules.Instance {
logging.InitTimestamp(vu)
mi := &S3{vu: vu} mi := &S3{vu: vu}
return mi return mi
} }
@ -55,8 +52,6 @@ func (s *S3) Exports() modules.Exports {
} }
func (s *S3) Connect(endpoint string, params map[string]string) (*Client, error) { func (s *S3) Connect(endpoint string, params map[string]string) (*Client, error) {
logging.LogWithField(s.vu, "endpoint", endpoint)
resolver := aws.EndpointResolverWithOptionsFunc(func(_, _ string, _ ...interface{}) (aws.Endpoint, error) { resolver := aws.EndpointResolverWithOptionsFunc(func(_, _ string, _ ...interface{}) (aws.Endpoint, error) {
return aws.Endpoint{ return aws.Endpoint{
URL: endpoint, URL: endpoint,

View file

@ -1,5 +1,6 @@
import datagen from 'k6/x/frostfs/datagen'; import datagen from 'k6/x/frostfs/datagen';
import native from 'k6/x/frostfs/native'; import native from 'k6/x/frostfs/native';
import logging from 'k6/x/frostfs/logging';
import registry from 'k6/x/frostfs/registry'; import registry from 'k6/x/frostfs/registry';
import { SharedArray } from 'k6/data'; import { SharedArray } from 'k6/data';
import { sleep } from 'k6'; import { sleep } from 'k6';
@ -18,6 +19,7 @@ const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
const grpc_endpoints = __ENV.GRPC_ENDPOINTS.split(','); const grpc_endpoints = __ENV.GRPC_ENDPOINTS.split(',');
const grpc_endpoint = grpc_endpoints[Math.floor(Math.random() * grpc_endpoints.length)]; const grpc_endpoint = grpc_endpoints[Math.floor(Math.random() * grpc_endpoints.length)];
const grpc_client = native.connect(grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 15); const grpc_client = native.connect(grpc_endpoint, '', __ENV.DIAL_TIMEOUT ? parseInt(__ENV.DIAL_TIMEOUT) : 5, __ENV.STREAM_TIMEOUT ? parseInt(__ENV.STREAM_TIMEOUT) : 15);
const log = logging.new().withField("endpoint", grpc_endpoint);
const registry_enabled = !!__ENV.REGISTRY_FILE; const registry_enabled = !!__ENV.REGISTRY_FILE;
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined; const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
@ -116,7 +118,7 @@ export function obj_write() {
const { payload, hash } = generator.genPayload(registry_enabled); const { payload, hash } = generator.genPayload(registry_enabled);
const resp = grpc_client.put(container, headers, payload); const resp = grpc_client.put(container, headers, payload);
if (!resp.success) { if (!resp.success) {
console.log({cid: container, error: resp.error}); log.withField("cid", container).info(resp.error);
return; return;
} }
@ -133,7 +135,7 @@ export function obj_read() {
const obj = obj_list[Math.floor(Math.random() * obj_list.length)]; const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
const resp = grpc_client.get(obj.container, obj.object) const resp = grpc_client.get(obj.container, obj.object)
if (!resp.success) { if (!resp.success) {
console.log({cid: obj.container, oid: obj.object, error: resp.error}); log.withFields({cid: obj.container, oid: obj.object}).info(resp.error);
} }
} }
@ -150,7 +152,7 @@ export function obj_delete() {
const resp = grpc_client.delete(obj.c_id, obj.o_id); const resp = grpc_client.delete(obj.c_id, obj.o_id);
if (!resp.success) { if (!resp.success) {
// Log errors except (2052 - object already deleted) // Log errors except (2052 - object already deleted)
console.log({cid: obj.c_id, oid: obj.o_id, error: resp.error}); log.withFields({cid: obj.c_id, oid: obj.o_id}).info(resp.error);
return; return;
} }

View file

@ -17,6 +17,7 @@ const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
// Select random HTTP endpoint for current VU // Select random HTTP endpoint for current VU
const http_endpoints = __ENV.HTTP_ENDPOINTS.split(','); const http_endpoints = __ENV.HTTP_ENDPOINTS.split(',');
const http_endpoint = http_endpoints[Math.floor(Math.random() * http_endpoints.length)]; const http_endpoint = http_endpoints[Math.floor(Math.random() * http_endpoints.length)];
const log = logging.new().withField("endpoint", http_endpoint);
const registry_enabled = !!__ENV.REGISTRY_FILE; const registry_enabled = !!__ENV.REGISTRY_FILE;
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined; const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
@ -86,7 +87,7 @@ export function obj_write() {
const resp = http.post(`http://${http_endpoint}/upload/${container}`, data); const resp = http.post(`http://${http_endpoint}/upload/${container}`, data);
if (resp.status != 200) { if (resp.status != 200) {
console.log(`ERROR: ${resp.status} ${resp.error}`); log.info(`ERROR: ${resp.status} ${resp.error}`);
return; return;
} }
const object_id = JSON.parse(resp.body).object_id; const object_id = JSON.parse(resp.body).object_id;
@ -103,7 +104,7 @@ export function obj_read() {
const obj = obj_list[Math.floor(Math.random() * obj_list.length)]; const obj = obj_list[Math.floor(Math.random() * obj_list.length)];
const resp = http.get(`http://${http_endpoint}/get/${obj.container}/${obj.object}`); const resp = http.get(`http://${http_endpoint}/get/${obj.container}/${obj.object}`);
if (resp.status != 200) { if (resp.status != 200) {
console.log(`ERROR reading ${obj.object}: ${resp.status}`); log.info(`ERROR reading ${obj.object}: ${resp.status}`);
} }
} }

View file

@ -18,6 +18,7 @@ const read_size = JSON.parse(open(__ENV.PREGEN_JSON)).obj_size;
const s3_endpoints = __ENV.S3_ENDPOINTS.split(','); const s3_endpoints = __ENV.S3_ENDPOINTS.split(',');
const s3_endpoint = s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)]; const s3_endpoint = s3_endpoints[Math.floor(Math.random() * s3_endpoints.length)];
const s3_client = s3.connect(`http://${s3_endpoint}`); const s3_client = s3.connect(`http://${s3_endpoint}`);
const log = logging.new().withField("endpoint", s3_endpoint);
const registry_enabled = !!__ENV.REGISTRY_FILE; const registry_enabled = !!__ENV.REGISTRY_FILE;
const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined; const obj_registry = registry_enabled ? registry.open(__ENV.REGISTRY_FILE) : undefined;
@ -113,7 +114,7 @@ export function obj_write() {
const { payload, hash } = generator.genPayload(registry_enabled); const { payload, hash } = generator.genPayload(registry_enabled);
const resp = s3_client.put(bucket, key, payload); const resp = s3_client.put(bucket, key, payload);
if (!resp.success) { if (!resp.success) {
console.log(resp.error); log.info(resp.error);
return; return;
} }
@ -131,7 +132,7 @@ export function obj_read() {
const resp = s3_client.get(obj.bucket, obj.object); const resp = s3_client.get(obj.bucket, obj.object);
if (!resp.success) { if (!resp.success) {
console.log(resp.error); log.info(resp.error);
} }
} }
@ -147,7 +148,7 @@ export function obj_delete() {
const resp = s3_client.delete(obj.s3_bucket, obj.s3_key); const resp = s3_client.delete(obj.s3_bucket, obj.s3_key);
if (!resp.success) { if (!resp.success) {
console.log(`Error deleting object ${obj.id}: ${resp.error}`); log.info(`Error deleting object ${obj.id}: ${resp.error}`);
return; return;
} }