forked from TrueCloudLab/frostfs-node
[#5] innerring: Use generic LRU cache
Signed-off-by: Evgenii Stratonikov <e.stratonikov@yadro.com>
This commit is contained in:
parent
4155c1bdff
commit
d0a0432a51
2 changed files with 5 additions and 5 deletions
|
@ -42,10 +42,10 @@ func (np *Processor) processDeposit(deposit *frostfsEvent.Deposit) {
|
|||
defer np.mintEmitLock.Unlock()
|
||||
|
||||
val, ok := np.mintEmitCache.Get(receiver.String())
|
||||
if ok && val.(uint64)+np.mintEmitThreshold >= curEpoch {
|
||||
if ok && val+np.mintEmitThreshold >= curEpoch {
|
||||
np.log.Warn("double mint emission declined",
|
||||
zap.String("receiver", receiver.String()),
|
||||
zap.Uint64("last_emission", val.(uint64)),
|
||||
zap.Uint64("last_emission", val),
|
||||
zap.Uint64("current_epoch", curEpoch))
|
||||
|
||||
return
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
"github.com/TrueCloudLab/frostfs-node/pkg/morph/event"
|
||||
frostfsEvent "github.com/TrueCloudLab/frostfs-node/pkg/morph/event/neofs"
|
||||
"github.com/TrueCloudLab/frostfs-node/pkg/util/logger"
|
||||
lru "github.com/hashicorp/golang-lru"
|
||||
lru "github.com/hashicorp/golang-lru/v2"
|
||||
"github.com/nspcc-dev/neo-go/pkg/encoding/fixedn"
|
||||
"github.com/nspcc-dev/neo-go/pkg/util"
|
||||
"github.com/panjf2000/ants/v2"
|
||||
|
@ -47,7 +47,7 @@ type (
|
|||
alphabetState AlphabetState
|
||||
converter PrecisionConverter
|
||||
mintEmitLock *sync.Mutex
|
||||
mintEmitCache *lru.Cache
|
||||
mintEmitCache *lru.Cache[string, uint64]
|
||||
mintEmitThreshold uint64
|
||||
mintEmitValue fixedn.Fixed8
|
||||
gasBalanceThreshold int64
|
||||
|
@ -105,7 +105,7 @@ func New(p *Params) (*Processor, error) {
|
|||
return nil, fmt.Errorf("ir/frostfs: can't create worker pool: %w", err)
|
||||
}
|
||||
|
||||
lruCache, err := lru.New(p.MintEmitCacheSize)
|
||||
lruCache, err := lru.New[string, uint64](p.MintEmitCacheSize)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("ir/frostfs: can't create LRU cache for gas emission: %w", err)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue