forked from TrueCloudLab/frostfs-node
Initial commit
Initial public review release v0.10.0
This commit is contained in:
commit
dadfd90dcd
276 changed files with 46331 additions and 0 deletions
35
lib/localstore/alias.go
Normal file
35
lib/localstore/alias.go
Normal file
|
@ -0,0 +1,35 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"github.com/nspcc-dev/neofs-api-go/hash"
|
||||
"github.com/nspcc-dev/neofs-api-go/object"
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
)
|
||||
|
||||
// CID is a type alias of
|
||||
// CID from refs package of neofs-api-go.
|
||||
type CID = refs.CID
|
||||
|
||||
// SGID is a type alias of
|
||||
// SGID from refs package of neofs-api-go.
|
||||
type SGID = refs.ObjectID
|
||||
|
||||
// Header is a type alias of
|
||||
// Header from object package of neofs-api-go.
|
||||
type Header = object.Header
|
||||
|
||||
// Object is a type alias of
|
||||
// Object from object package of neofs-api-go.
|
||||
type Object = object.Object
|
||||
|
||||
// ObjectID is a type alias of
|
||||
// ObjectID from refs package of neofs-api-go.
|
||||
type ObjectID = refs.ObjectID
|
||||
|
||||
// Address is a type alias of
|
||||
// Address from refs package of neofs-api-go.
|
||||
type Address = refs.Address
|
||||
|
||||
// Hash is a type alias of
|
||||
// Hash from hash package of neofs-api-go.
|
||||
type Hash = hash.Hash
|
38
lib/localstore/del.go
Normal file
38
lib/localstore/del.go
Normal file
|
@ -0,0 +1,38 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/nspcc-dev/neofs-node/lib/metrics"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (l *localstore) Del(key refs.Address) error {
|
||||
k, err := key.Hash()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "Localstore Del failed on key.Marshal")
|
||||
}
|
||||
|
||||
// try to fetch object for metrics
|
||||
obj, err := l.Get(key)
|
||||
if err != nil {
|
||||
l.log.Warn("localstore Del failed on localstore.Get", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := l.blobBucket.Del(k); err != nil {
|
||||
l.log.Warn("Localstore Del failed on BlobBucket.Del", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := l.metaBucket.Del(k); err != nil {
|
||||
return errors.Wrap(err, "Localstore Del failed on MetaBucket.Del")
|
||||
}
|
||||
|
||||
if obj != nil {
|
||||
l.col.UpdateContainer(
|
||||
key.CID,
|
||||
obj.SystemHeader.PayloadLength,
|
||||
metrics.RemSpace)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
306
lib/localstore/filter.go
Normal file
306
lib/localstore/filter.go
Normal file
|
@ -0,0 +1,306 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"sort"
|
||||
"sync"
|
||||
|
||||
"github.com/nspcc-dev/neofs-node/internal"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type (
|
||||
// FilterCode is an enumeration of filter return codes.
|
||||
FilterCode int
|
||||
|
||||
// PriorityFlag is an enumeration of priority flags.
|
||||
PriorityFlag int
|
||||
|
||||
filterPipelineSet []FilterPipeline
|
||||
|
||||
// FilterFunc is a function that checks whether an ObjectMeta matches a specific criterion.
|
||||
FilterFunc func(ctx context.Context, meta *ObjectMeta) *FilterResult
|
||||
|
||||
// FilterResult groups of ObjectMeta filter result values.
|
||||
FilterResult struct {
|
||||
c FilterCode
|
||||
|
||||
e error
|
||||
}
|
||||
|
||||
// FilterPipeline is an interface of ObjectMeta filtering tool with sub-filters and priorities.
|
||||
FilterPipeline interface {
|
||||
Pass(ctx context.Context, meta *ObjectMeta) *FilterResult
|
||||
PutSubFilter(params SubFilterParams) error
|
||||
GetPriority() uint64
|
||||
SetPriority(uint64)
|
||||
GetName() string
|
||||
}
|
||||
|
||||
// FilterParams groups the parameters of FilterPipeline constructor.
|
||||
FilterParams struct {
|
||||
Name string
|
||||
Priority uint64
|
||||
FilterFunc FilterFunc
|
||||
}
|
||||
|
||||
// SubFilterParams groups the parameters of sub-filter registration.
|
||||
SubFilterParams struct {
|
||||
PriorityFlag
|
||||
FilterPipeline
|
||||
OnIgnore FilterCode
|
||||
OnPass FilterCode
|
||||
OnFail FilterCode
|
||||
}
|
||||
|
||||
filterPipeline struct {
|
||||
*sync.RWMutex
|
||||
|
||||
name string
|
||||
pri uint64
|
||||
filterFn FilterFunc
|
||||
|
||||
maxSubPri uint64
|
||||
mSubResult map[string]map[FilterCode]FilterCode
|
||||
subFilters []FilterPipeline
|
||||
}
|
||||
)
|
||||
|
||||
const (
|
||||
// PriorityValue is a PriorityFlag of the sub-filter registration with GetPriority() value.
|
||||
PriorityValue PriorityFlag = iota
|
||||
|
||||
// PriorityMax is a PriorityFlag of the sub-filter registration with maximum priority.
|
||||
PriorityMax
|
||||
|
||||
// PriorityMin is a PriorityFlag of the sub-filter registration with minimum priority.
|
||||
PriorityMin
|
||||
)
|
||||
|
||||
const (
|
||||
// CodeUndefined is a undefined FilterCode.
|
||||
CodeUndefined FilterCode = iota
|
||||
|
||||
// CodePass is a FilterCode of filter passage.
|
||||
CodePass
|
||||
|
||||
// CodeFail is a FilterCode of filter failure.
|
||||
CodeFail
|
||||
|
||||
// CodeIgnore is a FilterCode of filter ignoring.
|
||||
CodeIgnore
|
||||
)
|
||||
|
||||
var (
|
||||
rPass = &FilterResult{
|
||||
c: CodePass,
|
||||
}
|
||||
|
||||
rFail = &FilterResult{
|
||||
c: CodeFail,
|
||||
}
|
||||
|
||||
rIgnore = &FilterResult{
|
||||
c: CodeIgnore,
|
||||
}
|
||||
|
||||
rUndefined = &FilterResult{
|
||||
c: CodeUndefined,
|
||||
}
|
||||
)
|
||||
|
||||
// ResultPass returns the FilterResult with CodePass code and nil error.
|
||||
func ResultPass() *FilterResult {
|
||||
return rPass
|
||||
}
|
||||
|
||||
// ResultFail returns the FilterResult with CodeFail code and nil error.
|
||||
func ResultFail() *FilterResult {
|
||||
return rFail
|
||||
}
|
||||
|
||||
// ResultIgnore returns the FilterResult with CodeIgnore code and nil error.
|
||||
func ResultIgnore() *FilterResult {
|
||||
return rIgnore
|
||||
}
|
||||
|
||||
// ResultUndefined returns the FilterResult with CodeUndefined code and nil error.
|
||||
func ResultUndefined() *FilterResult {
|
||||
return rUndefined
|
||||
}
|
||||
|
||||
// ResultWithError returns the FilterResult with passed code and error.
|
||||
func ResultWithError(c FilterCode, e error) *FilterResult {
|
||||
return &FilterResult{
|
||||
e: e,
|
||||
c: c,
|
||||
}
|
||||
}
|
||||
|
||||
// Code returns the filter result code.
|
||||
func (s *FilterResult) Code() FilterCode {
|
||||
return s.c
|
||||
}
|
||||
|
||||
// Err returns the filter result error.
|
||||
func (s *FilterResult) Err() error {
|
||||
return s.e
|
||||
}
|
||||
|
||||
func (f filterPipelineSet) Len() int { return len(f) }
|
||||
func (f filterPipelineSet) Less(i, j int) bool { return f[i].GetPriority() > f[j].GetPriority() }
|
||||
func (f filterPipelineSet) Swap(i, j int) { f[i], f[j] = f[j], f[i] }
|
||||
|
||||
func (r FilterCode) String() string {
|
||||
switch r {
|
||||
case CodePass:
|
||||
return "PASSED"
|
||||
case CodeFail:
|
||||
return "FAILED"
|
||||
case CodeIgnore:
|
||||
return "IGNORED"
|
||||
default:
|
||||
return "UNDEFINED"
|
||||
}
|
||||
}
|
||||
|
||||
// NewFilter is a FilterPipeline constructor.
|
||||
func NewFilter(p *FilterParams) FilterPipeline {
|
||||
return &filterPipeline{
|
||||
RWMutex: new(sync.RWMutex),
|
||||
name: p.Name,
|
||||
pri: p.Priority,
|
||||
filterFn: p.FilterFunc,
|
||||
mSubResult: make(map[string]map[FilterCode]FilterCode),
|
||||
}
|
||||
}
|
||||
|
||||
// AllPassIncludingFilter returns FilterPipeline with sub-filters composed from parameters.
|
||||
// Result filter fails with CodeFail code if any of the sub-filters returns not a CodePass code.
|
||||
func AllPassIncludingFilter(name string, params ...*FilterParams) (FilterPipeline, error) {
|
||||
res := NewFilter(&FilterParams{
|
||||
Name: name,
|
||||
FilterFunc: SkippingFilterFunc,
|
||||
})
|
||||
|
||||
for i := range params {
|
||||
if err := res.PutSubFilter(SubFilterParams{
|
||||
FilterPipeline: NewFilter(params[i]),
|
||||
OnIgnore: CodeFail,
|
||||
OnFail: CodeFail,
|
||||
}); err != nil {
|
||||
return nil, errors.Wrap(err, "could not create all pass including filter")
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (p *filterPipeline) Pass(ctx context.Context, meta *ObjectMeta) *FilterResult {
|
||||
p.RLock()
|
||||
defer p.RUnlock()
|
||||
|
||||
for i := range p.subFilters {
|
||||
subResult := p.subFilters[i].Pass(ctx, meta)
|
||||
subName := p.subFilters[i].GetName()
|
||||
|
||||
cSub := subResult.Code()
|
||||
|
||||
if cSub <= CodeUndefined {
|
||||
return ResultUndefined()
|
||||
}
|
||||
|
||||
if cFin := p.mSubResult[subName][cSub]; cFin != CodeIgnore {
|
||||
return ResultWithError(cFin, subResult.Err())
|
||||
}
|
||||
}
|
||||
|
||||
if p.filterFn == nil {
|
||||
return ResultUndefined()
|
||||
}
|
||||
|
||||
return p.filterFn(ctx, meta)
|
||||
}
|
||||
|
||||
func (p *filterPipeline) PutSubFilter(params SubFilterParams) error {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
|
||||
if params.FilterPipeline == nil {
|
||||
return internal.Error("could not put sub filter: empty filter pipeline")
|
||||
}
|
||||
|
||||
name := params.FilterPipeline.GetName()
|
||||
if _, ok := p.mSubResult[name]; ok {
|
||||
return errors.Errorf("filter %s is already in pipeline %s", name, p.GetName())
|
||||
}
|
||||
|
||||
if params.PriorityFlag != PriorityMin {
|
||||
if pri := params.FilterPipeline.GetPriority(); pri < math.MaxUint64 {
|
||||
params.FilterPipeline.SetPriority(pri + 1)
|
||||
}
|
||||
} else {
|
||||
params.FilterPipeline.SetPriority(0)
|
||||
}
|
||||
|
||||
switch pri := params.FilterPipeline.GetPriority(); params.PriorityFlag {
|
||||
case PriorityMax:
|
||||
if p.maxSubPri < math.MaxUint64 {
|
||||
p.maxSubPri++
|
||||
}
|
||||
|
||||
params.FilterPipeline.SetPriority(p.maxSubPri)
|
||||
case PriorityValue:
|
||||
if pri > p.maxSubPri {
|
||||
p.maxSubPri = pri
|
||||
}
|
||||
}
|
||||
|
||||
if params.OnFail <= 0 {
|
||||
params.OnFail = CodeIgnore
|
||||
}
|
||||
|
||||
if params.OnIgnore <= 0 {
|
||||
params.OnIgnore = CodeIgnore
|
||||
}
|
||||
|
||||
if params.OnPass <= 0 {
|
||||
params.OnPass = CodeIgnore
|
||||
}
|
||||
|
||||
p.mSubResult[name] = map[FilterCode]FilterCode{
|
||||
CodePass: params.OnPass,
|
||||
CodeIgnore: params.OnIgnore,
|
||||
CodeFail: params.OnFail,
|
||||
}
|
||||
|
||||
p.subFilters = append(p.subFilters, params.FilterPipeline)
|
||||
|
||||
sort.Sort(filterPipelineSet(p.subFilters))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *filterPipeline) GetPriority() uint64 {
|
||||
p.RLock()
|
||||
defer p.RUnlock()
|
||||
|
||||
return p.pri
|
||||
}
|
||||
func (p *filterPipeline) SetPriority(pri uint64) {
|
||||
p.Lock()
|
||||
p.pri = pri
|
||||
p.Unlock()
|
||||
}
|
||||
|
||||
func (p *filterPipeline) GetName() string {
|
||||
p.RLock()
|
||||
defer p.RUnlock()
|
||||
|
||||
if p.name == "" {
|
||||
return "FILTER_UNNAMED"
|
||||
}
|
||||
|
||||
return p.name
|
||||
}
|
39
lib/localstore/filter_funcs.go
Normal file
39
lib/localstore/filter_funcs.go
Normal file
|
@ -0,0 +1,39 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
// SkippingFilterFunc is a FilterFunc that always returns result with
|
||||
// CodePass code and nil error.
|
||||
func SkippingFilterFunc(_ context.Context, _ *ObjectMeta) *FilterResult {
|
||||
return ResultPass()
|
||||
}
|
||||
|
||||
// ContainerFilterFunc returns a FilterFunc that returns:
|
||||
// - result with CodePass code and nil error if CID of ObjectMeta if from the CID list;
|
||||
// - result with CodeFail code an nil error otherwise.
|
||||
func ContainerFilterFunc(cidList []CID) FilterFunc {
|
||||
return func(_ context.Context, meta *ObjectMeta) *FilterResult {
|
||||
for i := range cidList {
|
||||
if meta.Object.SystemHeader.CID.Equal(cidList[i]) {
|
||||
return ResultPass()
|
||||
}
|
||||
}
|
||||
|
||||
return ResultFail()
|
||||
}
|
||||
}
|
||||
|
||||
// StoredEarlierThanFilterFunc returns a FilterFunc that returns:
|
||||
// - result with CodePass code and nil error if StoreEpoch is less that argument;
|
||||
// - result with CodeFail code and nil error otherwise.
|
||||
func StoredEarlierThanFilterFunc(epoch uint64) FilterFunc {
|
||||
return func(_ context.Context, meta *ObjectMeta) *FilterResult {
|
||||
if meta.StoreEpoch < epoch {
|
||||
return ResultPass()
|
||||
}
|
||||
|
||||
return ResultFail()
|
||||
}
|
||||
}
|
38
lib/localstore/filter_test.go
Normal file
38
lib/localstore/filter_test.go
Normal file
|
@ -0,0 +1,38 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/nspcc-dev/neofs-node/internal"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestSkippingFilterFunc(t *testing.T) {
|
||||
res := SkippingFilterFunc(context.TODO(), &ObjectMeta{})
|
||||
require.Equal(t, CodePass, res.Code())
|
||||
}
|
||||
|
||||
func TestFilterResult(t *testing.T) {
|
||||
var (
|
||||
r *FilterResult
|
||||
c = CodePass
|
||||
e = internal.Error("test error")
|
||||
)
|
||||
|
||||
r = ResultPass()
|
||||
require.Equal(t, CodePass, r.Code())
|
||||
require.NoError(t, r.Err())
|
||||
|
||||
r = ResultFail()
|
||||
require.Equal(t, CodeFail, r.Code())
|
||||
require.NoError(t, r.Err())
|
||||
|
||||
r = ResultIgnore()
|
||||
require.Equal(t, CodeIgnore, r.Code())
|
||||
require.NoError(t, r.Err())
|
||||
|
||||
r = ResultWithError(c, e)
|
||||
require.Equal(t, c, r.Code())
|
||||
require.EqualError(t, r.Err(), e.Error())
|
||||
}
|
30
lib/localstore/get.go
Normal file
30
lib/localstore/get.go
Normal file
|
@ -0,0 +1,30 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func (l *localstore) Get(key refs.Address) (*Object, error) {
|
||||
var (
|
||||
err error
|
||||
k, v []byte
|
||||
o = new(Object)
|
||||
)
|
||||
|
||||
k, err = key.Hash()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Get failed on key.Marshal")
|
||||
}
|
||||
|
||||
v, err = l.blobBucket.Get(k)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Get failed on blobBucket.Get")
|
||||
}
|
||||
|
||||
if err = o.Unmarshal(v); err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Get failed on Object.Unmarshal")
|
||||
}
|
||||
|
||||
return o, nil
|
||||
}
|
20
lib/localstore/has.go
Normal file
20
lib/localstore/has.go
Normal file
|
@ -0,0 +1,20 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func (l *localstore) Has(key refs.Address) (bool, error) {
|
||||
var (
|
||||
err error
|
||||
k []byte
|
||||
)
|
||||
|
||||
k, err = key.Hash()
|
||||
if err != nil {
|
||||
return false, errors.Wrap(err, "localstore.Has failed on key.Marshal")
|
||||
}
|
||||
|
||||
return l.metaBucket.Has(k) && l.blobBucket.Has(k), nil
|
||||
}
|
102
lib/localstore/interface.go
Normal file
102
lib/localstore/interface.go
Normal file
|
@ -0,0 +1,102 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/nspcc-dev/neofs-api-go/object"
|
||||
"github.com/nspcc-dev/neofs-node/lib/core"
|
||||
"github.com/nspcc-dev/neofs-node/lib/metrics"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type (
|
||||
// Localstore is an interface of local object storage.
|
||||
Localstore interface {
|
||||
Put(context.Context, *Object) error
|
||||
Get(Address) (*Object, error)
|
||||
Del(Address) error
|
||||
Meta(Address) (*ObjectMeta, error)
|
||||
Iterator
|
||||
Has(Address) (bool, error)
|
||||
ObjectsCount() (uint64, error)
|
||||
|
||||
object.PositionReader
|
||||
Size() int64
|
||||
}
|
||||
|
||||
// MetaHandler is a function that handles ObjectMeta.
|
||||
MetaHandler func(*ObjectMeta) bool
|
||||
|
||||
// Iterator is an interface of the iterator over local object storage.
|
||||
Iterator interface {
|
||||
Iterate(FilterPipeline, MetaHandler) error
|
||||
}
|
||||
|
||||
// ListItem is an ObjectMeta wrapper.
|
||||
ListItem struct {
|
||||
ObjectMeta
|
||||
}
|
||||
|
||||
// Params groups the parameters of
|
||||
// local object storage constructor.
|
||||
Params struct {
|
||||
BlobBucket core.Bucket
|
||||
MetaBucket core.Bucket
|
||||
Logger *zap.Logger
|
||||
Collector metrics.Collector
|
||||
}
|
||||
|
||||
localstore struct {
|
||||
metaBucket core.Bucket
|
||||
blobBucket core.Bucket
|
||||
|
||||
log *zap.Logger
|
||||
col metrics.Collector
|
||||
}
|
||||
)
|
||||
|
||||
// ErrOutOfRange is returned when requested object payload range is
|
||||
// out of object payload bounds.
|
||||
var ErrOutOfRange = errors.New("range is out of payload bounds")
|
||||
|
||||
// ErrEmptyMetaHandler is returned by functions that expect
|
||||
// a non-nil MetaHandler, but received nil.
|
||||
var ErrEmptyMetaHandler = errors.New("meta handler is nil")
|
||||
|
||||
var errNilLogger = errors.New("logger is nil")
|
||||
|
||||
var errNilCollector = errors.New("metrics collector is nil")
|
||||
|
||||
// New is a local object storage constructor.
|
||||
func New(p Params) (Localstore, error) {
|
||||
switch {
|
||||
case p.MetaBucket == nil:
|
||||
return nil, errors.Errorf("%s bucket is nil", core.MetaStore)
|
||||
case p.BlobBucket == nil:
|
||||
return nil, errors.Errorf("%s bucket is nil", core.BlobStore)
|
||||
case p.Logger == nil:
|
||||
return nil, errNilLogger
|
||||
case p.Collector == nil:
|
||||
return nil, errNilCollector
|
||||
}
|
||||
|
||||
return &localstore{
|
||||
metaBucket: p.MetaBucket,
|
||||
blobBucket: p.BlobBucket,
|
||||
log: p.Logger,
|
||||
col: p.Collector,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (l localstore) Size() int64 { return l.blobBucket.Size() }
|
||||
|
||||
// TODO: implement less costly method of counting.
|
||||
func (l localstore) ObjectsCount() (uint64, error) {
|
||||
items, err := l.metaBucket.List()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return uint64(len(items)), nil
|
||||
}
|
41
lib/localstore/list.go
Normal file
41
lib/localstore/list.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func (l *localstore) Iterate(filter FilterPipeline, handler MetaHandler) error {
|
||||
if handler == nil {
|
||||
return ErrEmptyMetaHandler
|
||||
} else if filter == nil {
|
||||
filter = NewFilter(&FilterParams{
|
||||
Name: "SKIPPING_FILTER",
|
||||
FilterFunc: SkippingFilterFunc,
|
||||
})
|
||||
}
|
||||
|
||||
return l.metaBucket.Iterate(func(_, v []byte) bool {
|
||||
meta := new(ObjectMeta)
|
||||
if err := meta.Unmarshal(v); err != nil {
|
||||
l.log.Error("unmarshal meta bucket item failure", zap.Error(err))
|
||||
} else if filter.Pass(context.TODO(), meta).Code() == CodePass {
|
||||
return !handler(meta)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// ListItems iterates over Iterator with FilterPipeline and returns all passed items.
|
||||
func ListItems(it Iterator, f FilterPipeline) ([]ListItem, error) {
|
||||
res := make([]ListItem, 0)
|
||||
err := it.Iterate(f, func(meta *ObjectMeta) (stop bool) {
|
||||
res = append(res, ListItem{
|
||||
ObjectMeta: *meta,
|
||||
})
|
||||
return
|
||||
})
|
||||
|
||||
return res, err
|
||||
}
|
462
lib/localstore/localstore.pb.go
Normal file
462
lib/localstore/localstore.pb.go
Normal file
|
@ -0,0 +1,462 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: lib/localstore/localstore.proto
|
||||
|
||||
package localstore
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
_ "github.com/gogo/protobuf/gogoproto"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
object "github.com/nspcc-dev/neofs-api-go/object"
|
||||
io "io"
|
||||
math "math"
|
||||
math_bits "math/bits"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
type ObjectMeta struct {
|
||||
Object *object.Object `protobuf:"bytes,1,opt,name=Object,proto3" json:"Object,omitempty"`
|
||||
PayloadHash Hash `protobuf:"bytes,2,opt,name=PayloadHash,proto3,customtype=Hash" json:"PayloadHash"`
|
||||
PayloadSize uint64 `protobuf:"varint,3,opt,name=PayloadSize,proto3" json:"PayloadSize,omitempty"`
|
||||
StoreEpoch uint64 `protobuf:"varint,4,opt,name=StoreEpoch,proto3" json:"StoreEpoch,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ObjectMeta) Reset() { *m = ObjectMeta{} }
|
||||
func (m *ObjectMeta) String() string { return proto.CompactTextString(m) }
|
||||
func (*ObjectMeta) ProtoMessage() {}
|
||||
func (*ObjectMeta) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_3236d71280f5b180, []int{0}
|
||||
}
|
||||
func (m *ObjectMeta) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *ObjectMeta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_ObjectMeta.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *ObjectMeta) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ObjectMeta.Merge(m, src)
|
||||
}
|
||||
func (m *ObjectMeta) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *ObjectMeta) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ObjectMeta.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ObjectMeta proto.InternalMessageInfo
|
||||
|
||||
func (m *ObjectMeta) GetObject() *object.Object {
|
||||
if m != nil {
|
||||
return m.Object
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ObjectMeta) GetPayloadSize() uint64 {
|
||||
if m != nil {
|
||||
return m.PayloadSize
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *ObjectMeta) GetStoreEpoch() uint64 {
|
||||
if m != nil {
|
||||
return m.StoreEpoch
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ObjectMeta)(nil), "localstore.ObjectMeta")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("lib/localstore/localstore.proto", fileDescriptor_3236d71280f5b180) }
|
||||
|
||||
var fileDescriptor_3236d71280f5b180 = []byte{
|
||||
// 257 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcf, 0xc9, 0x4c, 0xd2,
|
||||
0xcf, 0xc9, 0x4f, 0x4e, 0xcc, 0x29, 0x2e, 0xc9, 0x2f, 0x4a, 0x45, 0x62, 0xea, 0x15, 0x14, 0xe5,
|
||||
0x97, 0xe4, 0x0b, 0x71, 0x21, 0x44, 0xa4, 0x84, 0xf2, 0x93, 0xb2, 0x52, 0x93, 0x4b, 0xf4, 0x4b,
|
||||
0x2a, 0x0b, 0x52, 0x8b, 0x21, 0xf2, 0x52, 0xba, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9,
|
||||
0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0xe9, 0xf9, 0xfa, 0x60, 0xe1, 0xa4, 0xd2, 0x34, 0x30, 0x0f, 0xcc,
|
||||
0x01, 0xb3, 0x20, 0xca, 0x95, 0x96, 0x31, 0x72, 0x71, 0xf9, 0x83, 0x4d, 0xf1, 0x4d, 0x2d, 0x49,
|
||||
0x14, 0x52, 0xe3, 0x62, 0x83, 0xf0, 0x24, 0x18, 0x15, 0x18, 0x35, 0xb8, 0x8d, 0xf8, 0xf4, 0x20,
|
||||
0x56, 0xe8, 0x41, 0x44, 0x83, 0xa0, 0xb2, 0x42, 0x7a, 0x5c, 0xdc, 0x01, 0x89, 0x95, 0x39, 0xf9,
|
||||
0x89, 0x29, 0x1e, 0x89, 0xc5, 0x19, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0x3c, 0x4e, 0x3c, 0x27, 0xee,
|
||||
0xc9, 0x33, 0xdc, 0xba, 0x27, 0xcf, 0x02, 0x12, 0x0b, 0x42, 0x56, 0x20, 0xa4, 0x00, 0x57, 0x1f,
|
||||
0x9c, 0x59, 0x95, 0x2a, 0xc1, 0xac, 0xc0, 0xa8, 0xc1, 0x12, 0x84, 0x2c, 0x24, 0x24, 0xc7, 0xc5,
|
||||
0x15, 0x0c, 0xf2, 0x94, 0x6b, 0x41, 0x7e, 0x72, 0x86, 0x04, 0x0b, 0x58, 0x01, 0x92, 0x88, 0x93,
|
||||
0xc3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x38, 0xe3, 0xb1,
|
||||
0x1c, 0x43, 0x94, 0x1e, 0x92, 0x4f, 0xf3, 0x8a, 0x0b, 0x92, 0x93, 0x75, 0x53, 0x52, 0xcb, 0xf4,
|
||||
0xf3, 0x52, 0xf3, 0xd3, 0x8a, 0x75, 0xf3, 0xf2, 0x53, 0x52, 0xf5, 0x51, 0x83, 0x32, 0x89, 0x0d,
|
||||
0xec, 0x63, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc8, 0x20, 0xb3, 0xa4, 0x63, 0x01, 0x00,
|
||||
0x00,
|
||||
}
|
||||
|
||||
func (m *ObjectMeta) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dAtA[:n], nil
|
||||
}
|
||||
|
||||
func (m *ObjectMeta) MarshalTo(dAtA []byte) (int, error) {
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *ObjectMeta) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
if m.XXX_unrecognized != nil {
|
||||
i -= len(m.XXX_unrecognized)
|
||||
copy(dAtA[i:], m.XXX_unrecognized)
|
||||
}
|
||||
if m.StoreEpoch != 0 {
|
||||
i = encodeVarintLocalstore(dAtA, i, uint64(m.StoreEpoch))
|
||||
i--
|
||||
dAtA[i] = 0x20
|
||||
}
|
||||
if m.PayloadSize != 0 {
|
||||
i = encodeVarintLocalstore(dAtA, i, uint64(m.PayloadSize))
|
||||
i--
|
||||
dAtA[i] = 0x18
|
||||
}
|
||||
{
|
||||
size := m.PayloadHash.Size()
|
||||
i -= size
|
||||
if _, err := m.PayloadHash.MarshalTo(dAtA[i:]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i = encodeVarintLocalstore(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0x12
|
||||
if m.Object != nil {
|
||||
{
|
||||
size, err := m.Object.MarshalToSizedBuffer(dAtA[:i])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
i -= size
|
||||
i = encodeVarintLocalstore(dAtA, i, uint64(size))
|
||||
}
|
||||
i--
|
||||
dAtA[i] = 0xa
|
||||
}
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func encodeVarintLocalstore(dAtA []byte, offset int, v uint64) int {
|
||||
offset -= sovLocalstore(v)
|
||||
base := offset
|
||||
for v >= 1<<7 {
|
||||
dAtA[offset] = uint8(v&0x7f | 0x80)
|
||||
v >>= 7
|
||||
offset++
|
||||
}
|
||||
dAtA[offset] = uint8(v)
|
||||
return base
|
||||
}
|
||||
func (m *ObjectMeta) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
if m.Object != nil {
|
||||
l = m.Object.Size()
|
||||
n += 1 + l + sovLocalstore(uint64(l))
|
||||
}
|
||||
l = m.PayloadHash.Size()
|
||||
n += 1 + l + sovLocalstore(uint64(l))
|
||||
if m.PayloadSize != 0 {
|
||||
n += 1 + sovLocalstore(uint64(m.PayloadSize))
|
||||
}
|
||||
if m.StoreEpoch != 0 {
|
||||
n += 1 + sovLocalstore(uint64(m.StoreEpoch))
|
||||
}
|
||||
if m.XXX_unrecognized != nil {
|
||||
n += len(m.XXX_unrecognized)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func sovLocalstore(x uint64) (n int) {
|
||||
return (math_bits.Len64(x|1) + 6) / 7
|
||||
}
|
||||
func sozLocalstore(x uint64) (n int) {
|
||||
return sovLocalstore(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||
}
|
||||
func (m *ObjectMeta) Unmarshal(dAtA []byte) error {
|
||||
l := len(dAtA)
|
||||
iNdEx := 0
|
||||
for iNdEx < l {
|
||||
preIndex := iNdEx
|
||||
var wire uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
fieldNum := int32(wire >> 3)
|
||||
wireType := int(wire & 0x7)
|
||||
if wireType == 4 {
|
||||
return fmt.Errorf("proto: ObjectMeta: wiretype end group for non-group")
|
||||
}
|
||||
if fieldNum <= 0 {
|
||||
return fmt.Errorf("proto: ObjectMeta: illegal tag %d (wire type %d)", fieldNum, wire)
|
||||
}
|
||||
switch fieldNum {
|
||||
case 1:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field Object", wireType)
|
||||
}
|
||||
var msglen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
msglen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if msglen < 0 {
|
||||
return ErrInvalidLengthLocalstore
|
||||
}
|
||||
postIndex := iNdEx + msglen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthLocalstore
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if m.Object == nil {
|
||||
m.Object = &object.Object{}
|
||||
}
|
||||
if err := m.Object.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
case 2:
|
||||
if wireType != 2 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field PayloadHash", wireType)
|
||||
}
|
||||
var byteLen int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
byteLen |= int(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if byteLen < 0 {
|
||||
return ErrInvalidLengthLocalstore
|
||||
}
|
||||
postIndex := iNdEx + byteLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthLocalstore
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
if err := m.PayloadHash.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
|
||||
return err
|
||||
}
|
||||
iNdEx = postIndex
|
||||
case 3:
|
||||
if wireType != 0 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field PayloadSize", wireType)
|
||||
}
|
||||
m.PayloadSize = 0
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.PayloadSize |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case 4:
|
||||
if wireType != 0 {
|
||||
return fmt.Errorf("proto: wrong wireType = %d for field StoreEpoch", wireType)
|
||||
}
|
||||
m.StoreEpoch = 0
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
m.StoreEpoch |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
default:
|
||||
iNdEx = preIndex
|
||||
skippy, err := skipLocalstore(dAtA[iNdEx:])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if skippy < 0 {
|
||||
return ErrInvalidLengthLocalstore
|
||||
}
|
||||
if (iNdEx + skippy) < 0 {
|
||||
return ErrInvalidLengthLocalstore
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
|
||||
iNdEx += skippy
|
||||
}
|
||||
}
|
||||
|
||||
if iNdEx > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func skipLocalstore(dAtA []byte) (n int, err error) {
|
||||
l := len(dAtA)
|
||||
iNdEx := 0
|
||||
depth := 0
|
||||
for iNdEx < l {
|
||||
var wire uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= (uint64(b) & 0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
wireType := int(wire & 0x7)
|
||||
switch wireType {
|
||||
case 0:
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
iNdEx++
|
||||
if dAtA[iNdEx-1] < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
case 1:
|
||||
iNdEx += 8
|
||||
case 2:
|
||||
var length int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowLocalstore
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
length |= (int(b) & 0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
if length < 0 {
|
||||
return 0, ErrInvalidLengthLocalstore
|
||||
}
|
||||
iNdEx += length
|
||||
case 3:
|
||||
depth++
|
||||
case 4:
|
||||
if depth == 0 {
|
||||
return 0, ErrUnexpectedEndOfGroupLocalstore
|
||||
}
|
||||
depth--
|
||||
case 5:
|
||||
iNdEx += 4
|
||||
default:
|
||||
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
|
||||
}
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthLocalstore
|
||||
}
|
||||
if depth == 0 {
|
||||
return iNdEx, nil
|
||||
}
|
||||
}
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
var (
|
||||
ErrInvalidLengthLocalstore = fmt.Errorf("proto: negative length found during unmarshaling")
|
||||
ErrIntOverflowLocalstore = fmt.Errorf("proto: integer overflow")
|
||||
ErrUnexpectedEndOfGroupLocalstore = fmt.Errorf("proto: unexpected end of group")
|
||||
)
|
14
lib/localstore/localstore.proto
Normal file
14
lib/localstore/localstore.proto
Normal file
|
@ -0,0 +1,14 @@
|
|||
syntax = "proto3";
|
||||
option go_package = "github.com/nspcc-dev/neofs-node/lib/localstore";
|
||||
|
||||
package localstore;
|
||||
|
||||
import "object/types.proto";
|
||||
import "github.com/gogo/protobuf/gogoproto/gogo.proto";
|
||||
|
||||
message ObjectMeta {
|
||||
object.Object Object = 1;
|
||||
bytes PayloadHash = 2 [(gogoproto.nullable) = false, (gogoproto.customtype) = "Hash"];
|
||||
uint64 PayloadSize = 3;
|
||||
uint64 StoreEpoch = 4;
|
||||
}
|
501
lib/localstore/localstore_test.go
Normal file
501
lib/localstore/localstore_test.go
Normal file
|
@ -0,0 +1,501 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/nspcc-dev/neofs-api-go/container"
|
||||
"github.com/nspcc-dev/neofs-api-go/hash"
|
||||
"github.com/nspcc-dev/neofs-api-go/object"
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/nspcc-dev/neofs-node/lib/meta"
|
||||
"github.com/nspcc-dev/neofs-node/lib/metrics"
|
||||
"github.com/nspcc-dev/neofs-node/lib/test"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type (
|
||||
testBucket struct {
|
||||
sync.RWMutex
|
||||
items map[string][]byte
|
||||
}
|
||||
|
||||
fakeCollector struct {
|
||||
sync.Mutex
|
||||
items map[refs.CID]uint64
|
||||
}
|
||||
)
|
||||
|
||||
func (f *fakeCollector) Start(_ context.Context) { panic("implement me") }
|
||||
func (f *fakeCollector) UpdateSpaceUsage() { panic("implement me") }
|
||||
func (f *fakeCollector) SetIterator(_ meta.Iterator) { panic("implement me") }
|
||||
func (f *fakeCollector) SetCounter(counter metrics.ObjectCounter) { panic("implement me") }
|
||||
|
||||
func (f *fakeCollector) UpdateContainer(cid refs.CID, size uint64, op metrics.SpaceOp) {
|
||||
f.Lock()
|
||||
defer f.Unlock()
|
||||
|
||||
switch op {
|
||||
case metrics.AddSpace:
|
||||
f.items[cid] += size
|
||||
case metrics.RemSpace:
|
||||
if val, ok := f.items[cid]; !ok || val < size {
|
||||
return
|
||||
}
|
||||
|
||||
f.items[cid] -= size
|
||||
default:
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func newCollector() metrics.Collector {
|
||||
return &fakeCollector{
|
||||
items: make(map[refs.CID]uint64),
|
||||
}
|
||||
}
|
||||
|
||||
func newTestBucket() *testBucket {
|
||||
return &testBucket{
|
||||
items: make(map[string][]byte),
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// func (t *testBucket) Get(key []byte) ([]byte, error) {
|
||||
// t.Lock()
|
||||
// defer t.Unlock()
|
||||
//
|
||||
// val, ok := t.items[base58.Encode(key)]
|
||||
// if !ok {
|
||||
// return nil, errors.New("item not found")
|
||||
// }
|
||||
//
|
||||
// return val, nil
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) Set(key, value []byte) error {
|
||||
// t.Lock()
|
||||
// defer t.Unlock()
|
||||
//
|
||||
// t.items[base58.Encode(key)] = value
|
||||
//
|
||||
// return nil
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) Del(key []byte) error {
|
||||
// t.RLock()
|
||||
// defer t.RUnlock()
|
||||
//
|
||||
// delete(t.items, base58.Encode(key))
|
||||
//
|
||||
// return nil
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) Has(key []byte) bool {
|
||||
// panic("implement me")
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) Size() int64 {
|
||||
// panic("implement me")
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) List() ([]core.BucketItem, error) {
|
||||
// t.Lock()
|
||||
// defer t.Unlock()
|
||||
//
|
||||
// res := make([]core.BucketItem, 0)
|
||||
//
|
||||
// for k, v := range t.items {
|
||||
// sk, err := base58.Decode(k)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
//
|
||||
// res = append(res, core.BucketItem{
|
||||
// Key: sk,
|
||||
// Val: v,
|
||||
// })
|
||||
// }
|
||||
//
|
||||
// return res, nil
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) Filter(core.FilterHandler) ([]core.BucketItem, error) {
|
||||
// panic("implement me")
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) Close() error {
|
||||
// panic("implement me")
|
||||
// }
|
||||
//
|
||||
// func (t *testBucket) PRead(key []byte, rng object.Range) ([]byte, error) {
|
||||
// panic("implement me")
|
||||
// }
|
||||
|
||||
func testObject(t *testing.T) *Object {
|
||||
var (
|
||||
uid refs.UUID
|
||||
cid CID
|
||||
)
|
||||
|
||||
t.Run("Prepare object", func(t *testing.T) {
|
||||
cnr, err := container.NewTestContainer()
|
||||
require.NoError(t, err)
|
||||
|
||||
cid, err = cnr.ID()
|
||||
require.NoError(t, err)
|
||||
|
||||
id, err := uuid.NewRandom()
|
||||
uid = refs.UUID(id)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
obj := &Object{
|
||||
SystemHeader: object.SystemHeader{
|
||||
Version: 1,
|
||||
ID: uid,
|
||||
CID: cid,
|
||||
OwnerID: refs.OwnerID([refs.OwnerIDSize]byte{}), // TODO: avoid hardcode
|
||||
},
|
||||
Headers: []Header{
|
||||
{
|
||||
Value: &object.Header_UserHeader{
|
||||
UserHeader: &object.UserHeader{
|
||||
Key: "Profession",
|
||||
Value: "Developer",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Value: &object.Header_UserHeader{
|
||||
UserHeader: &object.UserHeader{
|
||||
Key: "Language",
|
||||
Value: "GO",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
func newLocalstore(t *testing.T) Localstore {
|
||||
ls, err := New(Params{
|
||||
BlobBucket: test.Bucket(),
|
||||
MetaBucket: test.Bucket(),
|
||||
Logger: zap.L(),
|
||||
Collector: newCollector(),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
return ls
|
||||
}
|
||||
|
||||
func TestNew(t *testing.T) {
|
||||
t.Run("New localstore", func(t *testing.T) {
|
||||
var err error
|
||||
|
||||
_, err = New(Params{})
|
||||
require.Error(t, err)
|
||||
|
||||
_, err = New(Params{
|
||||
BlobBucket: test.Bucket(),
|
||||
MetaBucket: test.Bucket(),
|
||||
Logger: zap.L(),
|
||||
Collector: newCollector(),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestLocalstore_Del(t *testing.T) {
|
||||
t.Run("Del method", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
obj *Object
|
||||
)
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
obj = testObject(t)
|
||||
obj.SetPayload([]byte("Hello, world"))
|
||||
|
||||
k := *obj.Address()
|
||||
|
||||
store, ok := ls.(*localstore)
|
||||
require.True(t, ok)
|
||||
require.NotNil(t, store)
|
||||
|
||||
metric, ok := store.col.(*fakeCollector)
|
||||
require.True(t, ok)
|
||||
require.NotNil(t, metric)
|
||||
|
||||
err = ls.Put(context.Background(), obj)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, obj.Payload)
|
||||
require.Contains(t, metric.items, obj.SystemHeader.CID)
|
||||
require.Equal(t, obj.SystemHeader.PayloadLength, metric.items[obj.SystemHeader.CID])
|
||||
|
||||
err = ls.Del(k)
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, metric.items, obj.SystemHeader.CID)
|
||||
require.Equal(t, uint64(0), metric.items[obj.SystemHeader.CID])
|
||||
|
||||
_, err = ls.Get(k)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestLocalstore_Get(t *testing.T) {
|
||||
t.Run("Get method (default)", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
obj *Object
|
||||
)
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
obj = testObject(t)
|
||||
|
||||
err = ls.Put(context.Background(), obj)
|
||||
require.NoError(t, err)
|
||||
|
||||
k := *obj.Address()
|
||||
|
||||
o, err := ls.Get(k)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, obj, o)
|
||||
})
|
||||
}
|
||||
|
||||
func TestLocalstore_Put(t *testing.T) {
|
||||
t.Run("Put method", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
obj *Object
|
||||
)
|
||||
|
||||
ls = newLocalstore(t)
|
||||
store, ok := ls.(*localstore)
|
||||
require.True(t, ok)
|
||||
require.NotNil(t, store)
|
||||
|
||||
metric, ok := store.col.(*fakeCollector)
|
||||
require.True(t, ok)
|
||||
require.NotNil(t, metric)
|
||||
|
||||
obj = testObject(t)
|
||||
|
||||
err = ls.Put(context.Background(), obj)
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, metric.items, obj.SystemHeader.CID)
|
||||
require.Equal(t, obj.SystemHeader.PayloadLength, metric.items[obj.SystemHeader.CID])
|
||||
|
||||
o, err := ls.Get(*obj.Address())
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, obj, o)
|
||||
})
|
||||
}
|
||||
|
||||
func TestLocalstore_List(t *testing.T) {
|
||||
t.Run("List method (no filters)", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
objCount = 10
|
||||
objs = make([]Object, objCount)
|
||||
)
|
||||
|
||||
for i := range objs {
|
||||
objs[i] = *testObject(t)
|
||||
}
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
for i := range objs {
|
||||
err = ls.Put(context.Background(), &objs[i])
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
items, err := ListItems(ls, nil)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, items, objCount)
|
||||
|
||||
for i := range items {
|
||||
require.Contains(t, objs, *items[i].Object)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("List method ('bad' filter)", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
objCount = 10
|
||||
objs = make([]Object, objCount)
|
||||
)
|
||||
|
||||
for i := range objs {
|
||||
objs[i] = *testObject(t)
|
||||
}
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
for i := range objs {
|
||||
err = ls.Put(context.Background(), &objs[i])
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
items, err := ListItems(ls, NewFilter(&FilterParams{
|
||||
FilterFunc: ContainerFilterFunc([]CID{}),
|
||||
}))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, items, 0)
|
||||
})
|
||||
|
||||
t.Run("List method (filter by cid)", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
objCount = 10
|
||||
objs = make([]Object, objCount)
|
||||
)
|
||||
|
||||
for i := range objs {
|
||||
objs[i] = *testObject(t)
|
||||
}
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
for i := range objs {
|
||||
err = ls.Put(context.Background(), &objs[i])
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
cidVals := []CID{objs[0].SystemHeader.CID}
|
||||
|
||||
items, err := ListItems(ls, NewFilter(&FilterParams{
|
||||
FilterFunc: ContainerFilterFunc(cidVals),
|
||||
}))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, items, 1)
|
||||
|
||||
for i := range items {
|
||||
require.Contains(t, objs, *items[i].Object)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Filter stored earlier", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
objCount = 10
|
||||
objs = make([]Object, objCount)
|
||||
epoch uint64 = 100
|
||||
list []ListItem
|
||||
)
|
||||
|
||||
for i := range objs {
|
||||
objs[i] = *testObject(t)
|
||||
}
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
ctx := context.WithValue(context.Background(), StoreEpochValue, epoch)
|
||||
|
||||
for i := range objs {
|
||||
err = ls.Put(ctx, &objs[i])
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
list, err = ListItems(ls, NewFilter(&FilterParams{
|
||||
FilterFunc: StoredEarlierThanFilterFunc(epoch - 1),
|
||||
}))
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, list)
|
||||
|
||||
list, err = ListItems(ls, NewFilter(&FilterParams{
|
||||
FilterFunc: StoredEarlierThanFilterFunc(epoch),
|
||||
}))
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, list)
|
||||
|
||||
list, err = ListItems(ls, NewFilter(&FilterParams{
|
||||
FilterFunc: StoredEarlierThanFilterFunc(epoch + 1),
|
||||
}))
|
||||
require.NoError(t, err)
|
||||
require.Len(t, list, objCount)
|
||||
})
|
||||
|
||||
t.Run("Filter with complex filter", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
objCount = 10
|
||||
objs = make([]Object, objCount)
|
||||
)
|
||||
|
||||
for i := range objs {
|
||||
objs[i] = *testObject(t)
|
||||
}
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
for i := range objs {
|
||||
err = ls.Put(context.Background(), &objs[i])
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
cidVals := []CID{objs[0].SystemHeader.CID}
|
||||
|
||||
mainF, err := AllPassIncludingFilter("TEST_FILTER", &FilterParams{
|
||||
Name: "CID_LIST",
|
||||
FilterFunc: ContainerFilterFunc(cidVals),
|
||||
})
|
||||
|
||||
items, err := ListItems(ls, mainF)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, items, 1)
|
||||
})
|
||||
|
||||
t.Run("Meta info", func(t *testing.T) {
|
||||
var (
|
||||
err error
|
||||
ls Localstore
|
||||
objCount = 10
|
||||
objs = make([]Object, objCount)
|
||||
epoch uint64 = 100
|
||||
)
|
||||
|
||||
for i := range objs {
|
||||
objs[i] = *testObject(t)
|
||||
}
|
||||
|
||||
ls = newLocalstore(t)
|
||||
|
||||
ctx := context.WithValue(context.Background(), StoreEpochValue, epoch)
|
||||
|
||||
for i := range objs {
|
||||
err = ls.Put(ctx, &objs[i])
|
||||
require.NoError(t, err)
|
||||
|
||||
meta, err := ls.Meta(*objs[i].Address())
|
||||
require.NoError(t, err)
|
||||
|
||||
noPayload := objs[i]
|
||||
noPayload.Payload = nil
|
||||
|
||||
require.Equal(t, *meta.Object, noPayload)
|
||||
require.Equal(t, meta.PayloadHash, hash.Sum(objs[i].Payload))
|
||||
require.Equal(t, meta.PayloadSize, uint64(len(objs[i].Payload)))
|
||||
require.Equal(t, epoch, meta.StoreEpoch)
|
||||
}
|
||||
})
|
||||
}
|
52
lib/localstore/meta.go
Normal file
52
lib/localstore/meta.go
Normal file
|
@ -0,0 +1,52 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/nspcc-dev/neofs-api-go/hash"
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// StoreEpochValue is a context key of object storing epoch number.
|
||||
const StoreEpochValue = "store epoch"
|
||||
|
||||
func (l *localstore) Meta(key refs.Address) (*ObjectMeta, error) {
|
||||
var (
|
||||
err error
|
||||
meta ObjectMeta
|
||||
k, v []byte
|
||||
)
|
||||
|
||||
k, err = key.Hash()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Meta failed on key.Marshal")
|
||||
}
|
||||
|
||||
v, err = l.metaBucket.Get(k)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Meta failed on metaBucket.Get")
|
||||
}
|
||||
|
||||
if err := meta.Unmarshal(v); err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Metafailed on ObjectMeta.Unmarshal")
|
||||
}
|
||||
|
||||
return &meta, nil
|
||||
}
|
||||
|
||||
func metaFromObject(ctx context.Context, obj *Object) *ObjectMeta {
|
||||
meta := new(ObjectMeta)
|
||||
o := *obj
|
||||
meta.Object = &o
|
||||
meta.Object.Payload = nil
|
||||
meta.PayloadSize = uint64(len(obj.Payload))
|
||||
meta.PayloadHash = hash.Sum(obj.Payload)
|
||||
|
||||
storeEpoch, ok := ctx.Value(StoreEpochValue).(uint64)
|
||||
if ok {
|
||||
meta.StoreEpoch = storeEpoch
|
||||
}
|
||||
|
||||
return meta
|
||||
}
|
47
lib/localstore/put.go
Normal file
47
lib/localstore/put.go
Normal file
|
@ -0,0 +1,47 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/nspcc-dev/neofs-node/lib/metrics"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func (l *localstore) Put(ctx context.Context, obj *Object) error {
|
||||
var (
|
||||
oa refs.Address
|
||||
k, v []byte
|
||||
err error
|
||||
)
|
||||
|
||||
oa = *obj.Address()
|
||||
k, err = oa.Hash()
|
||||
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "Localstore Put failed on StorageKey.marshal")
|
||||
}
|
||||
|
||||
if v, err = obj.Marshal(); err != nil {
|
||||
return errors.Wrap(err, "Localstore Put failed on blobValue")
|
||||
}
|
||||
|
||||
if err = l.blobBucket.Set(k, v); err != nil {
|
||||
return errors.Wrap(err, "Localstore Put failed on BlobBucket.Set")
|
||||
}
|
||||
|
||||
if v, err = metaFromObject(ctx, obj).Marshal(); err != nil {
|
||||
return errors.Wrap(err, "Localstore Put failed on metaValue")
|
||||
}
|
||||
|
||||
if err = l.metaBucket.Set(k, v); err != nil {
|
||||
return errors.Wrap(err, "Localstore Put failed on MetaBucket.Set")
|
||||
}
|
||||
|
||||
l.col.UpdateContainer(
|
||||
obj.SystemHeader.CID,
|
||||
obj.SystemHeader.PayloadLength,
|
||||
metrics.AddSpace)
|
||||
|
||||
return nil
|
||||
}
|
36
lib/localstore/range.go
Normal file
36
lib/localstore/range.go
Normal file
|
@ -0,0 +1,36 @@
|
|||
package localstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/nspcc-dev/neofs-api-go/object"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func (l *localstore) PRead(ctx context.Context, key Address, rng object.Range) ([]byte, error) {
|
||||
var (
|
||||
err error
|
||||
k, v []byte
|
||||
obj Object
|
||||
)
|
||||
|
||||
k, err = key.Hash()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Get failed on key.Marshal")
|
||||
}
|
||||
|
||||
v, err = l.blobBucket.Get(k)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Get failed on blobBucket.Get")
|
||||
}
|
||||
|
||||
if err := obj.Unmarshal(v); err != nil {
|
||||
return nil, errors.Wrap(err, "Localstore Get failed on object.Unmarshal")
|
||||
}
|
||||
|
||||
if rng.Offset+rng.Length > uint64(len(obj.Payload)) {
|
||||
return nil, ErrOutOfRange
|
||||
}
|
||||
|
||||
return obj.Payload[rng.Offset : rng.Offset+rng.Length], nil
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue