state: prepare for NEP-11 data tracking, refactor/rename
There is a lot of similarity, so try reusing common code and use more neutral naming.
This commit is contained in:
parent
3b08b5b19b
commit
c63aeb38bb
5 changed files with 262 additions and 157 deletions
|
@ -194,8 +194,8 @@ type bcEvent struct {
|
|||
|
||||
// transferData is used for transfer caching during storeBlock.
|
||||
type transferData struct {
|
||||
Info state.NEP17TransferInfo
|
||||
Log state.NEP17TransferLog
|
||||
Info state.TokenTransferInfo
|
||||
Log state.TokenTransferLog
|
||||
}
|
||||
|
||||
// NewBlockchain returns a new blockchain object the will use the
|
||||
|
@ -1004,12 +1004,12 @@ func (bc *Blockchain) storeBlock(block *block.Block, txpool *mempool.Pool) error
|
|||
return
|
||||
}
|
||||
for acc, trData := range transCache {
|
||||
err = kvcache.PutNEP17TransferInfo(acc, &trData.Info)
|
||||
err = kvcache.PutTokenTransferInfo(acc, &trData.Info)
|
||||
if err != nil {
|
||||
aerdone <- err
|
||||
return
|
||||
}
|
||||
err = kvcache.PutNEP17TransferLog(acc, trData.Info.NextTransferBatch, &trData.Log)
|
||||
err = kvcache.PutTokenTransferLog(acc, trData.Info.NextNEP17Batch, &trData.Log)
|
||||
if err != nil {
|
||||
aerdone <- err
|
||||
return
|
||||
|
@ -1312,12 +1312,12 @@ func (bc *Blockchain) processNEP17Transfer(cache dao.DAO, transCache map[util.Ui
|
|||
func appendNEP17Transfer(cache dao.DAO, transCache map[util.Uint160]transferData, addr util.Uint160, transfer *state.NEP17Transfer) error {
|
||||
transferData, ok := transCache[addr]
|
||||
if !ok {
|
||||
balances, err := cache.GetNEP17TransferInfo(addr)
|
||||
balances, err := cache.GetTokenTransferInfo(addr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !balances.NewBatch {
|
||||
trLog, err := cache.GetNEP17TransferLog(addr, balances.NextTransferBatch)
|
||||
if !balances.NewNEP17Batch {
|
||||
trLog, err := cache.GetTokenTransferLog(addr, balances.NextNEP17Batch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -1330,14 +1330,14 @@ func appendNEP17Transfer(cache dao.DAO, transCache map[util.Uint160]transferData
|
|||
return err
|
||||
}
|
||||
transferData.Info.LastUpdated[transfer.Asset] = transfer.Block
|
||||
transferData.Info.NewBatch = transferData.Log.Size() >= state.NEP17TransferBatchSize
|
||||
if transferData.Info.NewBatch {
|
||||
err = cache.PutNEP17TransferLog(addr, transferData.Info.NextTransferBatch, &transferData.Log)
|
||||
transferData.Info.NewNEP17Batch = transferData.Log.Size() >= state.TokenTransferBatchSize
|
||||
if transferData.Info.NewNEP17Batch {
|
||||
err = cache.PutTokenTransferLog(addr, transferData.Info.NextNEP17Batch, &transferData.Log)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
transferData.Info.NextTransferBatch++
|
||||
transferData.Log = state.NEP17TransferLog{}
|
||||
transferData.Info.NextNEP17Batch++
|
||||
transferData.Log = state.TokenTransferLog{}
|
||||
}
|
||||
transCache[addr] = transferData
|
||||
return nil
|
||||
|
@ -1345,16 +1345,16 @@ func appendNEP17Transfer(cache dao.DAO, transCache map[util.Uint160]transferData
|
|||
|
||||
// ForEachNEP17Transfer executes f for each nep17 transfer in log.
|
||||
func (bc *Blockchain) ForEachNEP17Transfer(acc util.Uint160, f func(*state.NEP17Transfer) (bool, error)) error {
|
||||
balances, err := bc.dao.GetNEP17TransferInfo(acc)
|
||||
balances, err := bc.dao.GetTokenTransferInfo(acc)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
for i := int(balances.NextTransferBatch); i >= 0; i-- {
|
||||
lg, err := bc.dao.GetNEP17TransferLog(acc, uint32(i))
|
||||
for i := int(balances.NextNEP17Batch); i >= 0; i-- {
|
||||
lg, err := bc.dao.GetTokenTransferLog(acc, uint32(i))
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
cont, err := lg.ForEach(f)
|
||||
cont, err := lg.ForEachNEP17(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -1374,7 +1374,7 @@ func (bc *Blockchain) GetNEP17Contracts() []util.Uint160 {
|
|||
// block indexes. In case of an empty account, latest stored state synchronisation point
|
||||
// is returned under Math.MinInt32 key.
|
||||
func (bc *Blockchain) GetNEP17LastUpdated(acc util.Uint160) (map[int32]uint32, error) {
|
||||
info, err := bc.dao.GetNEP17TransferInfo(acc)
|
||||
info, err := bc.dao.GetTokenTransferInfo(acc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -43,8 +43,8 @@ type DAO interface {
|
|||
GetCurrentBlockHeight() (uint32, error)
|
||||
GetCurrentHeaderHeight() (i uint32, h util.Uint256, err error)
|
||||
GetHeaderHashes() ([]util.Uint256, error)
|
||||
GetNEP17TransferInfo(acc util.Uint160) (*state.NEP17TransferInfo, error)
|
||||
GetNEP17TransferLog(acc util.Uint160, index uint32) (*state.NEP17TransferLog, error)
|
||||
GetTokenTransferInfo(acc util.Uint160) (*state.TokenTransferInfo, error)
|
||||
GetTokenTransferLog(acc util.Uint160, index uint32) (*state.TokenTransferLog, error)
|
||||
GetStateSyncPoint() (uint32, error)
|
||||
GetStateSyncCurrentBlockHeight() (uint32, error)
|
||||
GetStorageItem(id int32, key []byte) state.StorageItem
|
||||
|
@ -58,8 +58,8 @@ type DAO interface {
|
|||
PutAppExecResult(aer *state.AppExecResult, buf *io.BufBinWriter) error
|
||||
PutContractID(id int32, hash util.Uint160) error
|
||||
PutCurrentHeader(hashAndIndex []byte) error
|
||||
PutNEP17TransferInfo(acc util.Uint160, bs *state.NEP17TransferInfo) error
|
||||
PutNEP17TransferLog(acc util.Uint160, index uint32, lg *state.NEP17TransferLog) error
|
||||
PutTokenTransferInfo(acc util.Uint160, bs *state.TokenTransferInfo) error
|
||||
PutTokenTransferLog(acc util.Uint160, index uint32, lg *state.TokenTransferLog) error
|
||||
PutStateSyncPoint(p uint32) error
|
||||
PutStateSyncCurrentBlockHeight(h uint32) error
|
||||
PutStorageItem(id int32, key []byte, si state.StorageItem) error
|
||||
|
@ -69,7 +69,7 @@ type DAO interface {
|
|||
StoreAsBlock(block *block.Block, buf *io.BufBinWriter) error
|
||||
StoreAsCurrentBlock(block *block.Block, buf *io.BufBinWriter) error
|
||||
StoreAsTransaction(tx *transaction.Transaction, index uint32, buf *io.BufBinWriter) error
|
||||
putNEP17TransferInfo(acc util.Uint160, bs *state.NEP17TransferInfo, buf *io.BufBinWriter) error
|
||||
putTokenTransferInfo(acc util.Uint160, bs *state.TokenTransferInfo, buf *io.BufBinWriter) error
|
||||
}
|
||||
|
||||
// Simple is memCached wrapper around DB, simple DAO implementation.
|
||||
|
@ -152,10 +152,10 @@ func (dao *Simple) GetContractScriptHash(id int32) (util.Uint160, error) {
|
|||
|
||||
// -- start nep17 transfer info.
|
||||
|
||||
// GetNEP17TransferInfo retrieves nep17 transfer info from the cache.
|
||||
func (dao *Simple) GetNEP17TransferInfo(acc util.Uint160) (*state.NEP17TransferInfo, error) {
|
||||
key := storage.AppendPrefix(storage.STNEP17TransferInfo, acc.BytesBE())
|
||||
bs := state.NewNEP17TransferInfo()
|
||||
// GetTokenTransferInfo retrieves nep17 transfer info from the cache.
|
||||
func (dao *Simple) GetTokenTransferInfo(acc util.Uint160) (*state.TokenTransferInfo, error) {
|
||||
key := storage.AppendPrefix(storage.STTokenTransferInfo, acc.BytesBE())
|
||||
bs := state.NewTokenTransferInfo()
|
||||
err := dao.GetAndDecode(bs, key)
|
||||
if err != nil && err != storage.ErrKeyNotFound {
|
||||
return nil, err
|
||||
|
@ -163,13 +163,13 @@ func (dao *Simple) GetNEP17TransferInfo(acc util.Uint160) (*state.NEP17TransferI
|
|||
return bs, nil
|
||||
}
|
||||
|
||||
// PutNEP17TransferInfo saves nep17 transfer info in the cache.
|
||||
func (dao *Simple) PutNEP17TransferInfo(acc util.Uint160, bs *state.NEP17TransferInfo) error {
|
||||
return dao.putNEP17TransferInfo(acc, bs, io.NewBufBinWriter())
|
||||
// PutTokenTransferInfo saves nep17 transfer info in the cache.
|
||||
func (dao *Simple) PutTokenTransferInfo(acc util.Uint160, bs *state.TokenTransferInfo) error {
|
||||
return dao.putTokenTransferInfo(acc, bs, io.NewBufBinWriter())
|
||||
}
|
||||
|
||||
func (dao *Simple) putNEP17TransferInfo(acc util.Uint160, bs *state.NEP17TransferInfo, buf *io.BufBinWriter) error {
|
||||
key := storage.AppendPrefix(storage.STNEP17TransferInfo, acc.BytesBE())
|
||||
func (dao *Simple) putTokenTransferInfo(acc util.Uint160, bs *state.TokenTransferInfo, buf *io.BufBinWriter) error {
|
||||
key := storage.AppendPrefix(storage.STTokenTransferInfo, acc.BytesBE())
|
||||
return dao.putWithBuffer(bs, key, buf)
|
||||
}
|
||||
|
||||
|
@ -177,7 +177,7 @@ func (dao *Simple) putNEP17TransferInfo(acc util.Uint160, bs *state.NEP17Transfe
|
|||
|
||||
// -- start transfer log.
|
||||
|
||||
func getNEP17TransferLogKey(acc util.Uint160, index uint32) []byte {
|
||||
func getTokenTransferLogKey(acc util.Uint160, index uint32) []byte {
|
||||
key := make([]byte, 1+util.Uint160Size+4)
|
||||
key[0] = byte(storage.STNEP17Transfers)
|
||||
copy(key[1:], acc.BytesBE())
|
||||
|
@ -185,34 +185,34 @@ func getNEP17TransferLogKey(acc util.Uint160, index uint32) []byte {
|
|||
return key
|
||||
}
|
||||
|
||||
// GetNEP17TransferLog retrieves transfer log from the cache.
|
||||
func (dao *Simple) GetNEP17TransferLog(acc util.Uint160, index uint32) (*state.NEP17TransferLog, error) {
|
||||
key := getNEP17TransferLogKey(acc, index)
|
||||
// GetTokenTransferLog retrieves transfer log from the cache.
|
||||
func (dao *Simple) GetTokenTransferLog(acc util.Uint160, index uint32) (*state.TokenTransferLog, error) {
|
||||
key := getTokenTransferLogKey(acc, index)
|
||||
value, err := dao.Store.Get(key)
|
||||
if err != nil {
|
||||
if err == storage.ErrKeyNotFound {
|
||||
return new(state.NEP17TransferLog), nil
|
||||
return new(state.TokenTransferLog), nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return &state.NEP17TransferLog{Raw: value}, nil
|
||||
return &state.TokenTransferLog{Raw: value}, nil
|
||||
}
|
||||
|
||||
// PutNEP17TransferLog saves given transfer log in the cache.
|
||||
func (dao *Simple) PutNEP17TransferLog(acc util.Uint160, index uint32, lg *state.NEP17TransferLog) error {
|
||||
key := getNEP17TransferLogKey(acc, index)
|
||||
// PutTokenTransferLog saves given transfer log in the cache.
|
||||
func (dao *Simple) PutTokenTransferLog(acc util.Uint160, index uint32, lg *state.TokenTransferLog) error {
|
||||
key := getTokenTransferLogKey(acc, index)
|
||||
return dao.Store.Put(key, lg.Raw)
|
||||
}
|
||||
|
||||
// AppendNEP17Transfer appends a single NEP17 transfer to a log.
|
||||
// First return value signalizes that log size has exceeded batch size.
|
||||
func (dao *Simple) AppendNEP17Transfer(acc util.Uint160, index uint32, isNew bool, tr *state.NEP17Transfer) (bool, error) {
|
||||
var lg *state.NEP17TransferLog
|
||||
var lg *state.TokenTransferLog
|
||||
if isNew {
|
||||
lg = new(state.NEP17TransferLog)
|
||||
lg = new(state.TokenTransferLog)
|
||||
} else {
|
||||
var err error
|
||||
lg, err = dao.GetNEP17TransferLog(acc, index)
|
||||
lg, err = dao.GetTokenTransferLog(acc, index)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ func (dao *Simple) AppendNEP17Transfer(acc util.Uint160, index uint32, isNew boo
|
|||
if err := lg.Append(tr); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return lg.Size() >= state.NEP17TransferBatchSize, dao.PutNEP17TransferLog(acc, index, lg)
|
||||
return lg.Size() >= state.TokenTransferBatchSize, dao.PutTokenTransferLog(acc, index, lg)
|
||||
}
|
||||
|
||||
// -- end transfer log.
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
package state
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/nspcc-dev/neo-go/internal/random"
|
||||
"github.com/nspcc-dev/neo-go/internal/testserdes"
|
||||
"github.com/nspcc-dev/neo-go/pkg/util"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestNEP17TransferLog_Append(t *testing.T) {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
expected := []*NEP17Transfer{
|
||||
randomTransfer(r),
|
||||
randomTransfer(r),
|
||||
randomTransfer(r),
|
||||
randomTransfer(r),
|
||||
}
|
||||
|
||||
lg := new(NEP17TransferLog)
|
||||
for _, tr := range expected {
|
||||
require.NoError(t, lg.Append(tr))
|
||||
}
|
||||
|
||||
require.Equal(t, len(expected), lg.Size())
|
||||
|
||||
i := len(expected) - 1
|
||||
cont, err := lg.ForEach(func(tr *NEP17Transfer) (bool, error) {
|
||||
require.Equal(t, expected[i], tr)
|
||||
i--
|
||||
return true, nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.True(t, cont)
|
||||
}
|
||||
|
||||
func BenchmarkNEP17TransferLog_Append(b *testing.B) {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
ts := make([]*NEP17Transfer, NEP17TransferBatchSize)
|
||||
for i := range ts {
|
||||
ts[i] = randomTransfer(r)
|
||||
}
|
||||
|
||||
lg := new(NEP17TransferLog)
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for _, tr := range ts {
|
||||
err := lg.Append(tr)
|
||||
if err != nil {
|
||||
b.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNEP17Transfer_DecodeBinary(t *testing.T) {
|
||||
expected := &NEP17Transfer{
|
||||
Asset: 123,
|
||||
From: util.Uint160{5, 6, 7},
|
||||
To: util.Uint160{8, 9, 10},
|
||||
Amount: *big.NewInt(42),
|
||||
Block: 12345,
|
||||
Timestamp: 54321,
|
||||
Tx: util.Uint256{8, 5, 3},
|
||||
}
|
||||
|
||||
testserdes.EncodeDecodeBinary(t, expected, new(NEP17Transfer))
|
||||
}
|
||||
|
||||
func randomTransfer(r *rand.Rand) *NEP17Transfer {
|
||||
return &NEP17Transfer{
|
||||
Amount: *big.NewInt(int64(r.Uint64())),
|
||||
Block: r.Uint32(),
|
||||
Asset: int32(random.Int(10, 10000000)),
|
||||
From: random.Uint160(),
|
||||
To: random.Uint160(),
|
||||
Tx: random.Uint256(),
|
||||
}
|
||||
}
|
|
@ -4,16 +4,17 @@ import (
|
|||
"bytes"
|
||||
"math/big"
|
||||
|
||||
"github.com/nspcc-dev/neo-go/pkg/core/storage"
|
||||
"github.com/nspcc-dev/neo-go/pkg/encoding/bigint"
|
||||
"github.com/nspcc-dev/neo-go/pkg/io"
|
||||
"github.com/nspcc-dev/neo-go/pkg/util"
|
||||
)
|
||||
|
||||
// NEP17TransferBatchSize is the maximum number of entries for NEP17TransferLog.
|
||||
const NEP17TransferBatchSize = 128
|
||||
// TokenTransferBatchSize is the maximum number of entries for TokenTransferLog.
|
||||
const TokenTransferBatchSize = 128
|
||||
|
||||
// NEP17TransferLog is a log of NEP17 token transfers for the specific command.
|
||||
type NEP17TransferLog struct {
|
||||
// TokenTransferLog is a serialized log of token transfers.
|
||||
type TokenTransferLog struct {
|
||||
Raw []byte
|
||||
}
|
||||
|
||||
|
@ -36,27 +37,41 @@ type NEP17Transfer struct {
|
|||
Tx util.Uint256
|
||||
}
|
||||
|
||||
// NEP17TransferInfo stores map of the NEP17 contract IDs to the balance's last updated
|
||||
// block trackers along with information about NEP17 transfer batch.
|
||||
type NEP17TransferInfo struct {
|
||||
LastUpdated map[int32]uint32
|
||||
// NextTransferBatch stores an index of the next transfer batch.
|
||||
NextTransferBatch uint32
|
||||
// NewBatch is true if batch with the `NextTransferBatch` index should be created.
|
||||
NewBatch bool
|
||||
// NEP11Transfer represents a single NEP-11 Transfer event.
|
||||
type NEP11Transfer struct {
|
||||
NEP17Transfer
|
||||
|
||||
// ID is a NEP-11 token ID.
|
||||
ID []byte
|
||||
}
|
||||
|
||||
// NewNEP17TransferInfo returns new NEP17TransferInfo.
|
||||
func NewNEP17TransferInfo() *NEP17TransferInfo {
|
||||
return &NEP17TransferInfo{
|
||||
// TokenTransferInfo stores map of the contract IDs to the balance's last updated
|
||||
// block trackers along with information about NEP-17 and NEP-11 transfer batch.
|
||||
type TokenTransferInfo struct {
|
||||
LastUpdated map[int32]uint32
|
||||
// NextNEP11Batch stores the index of the next NEP-17 transfer batch.
|
||||
NextNEP11Batch uint32
|
||||
// NextNEP17Batch stores the index of the next NEP-17 transfer batch.
|
||||
NextNEP17Batch uint32
|
||||
// NewNEP11Batch is true if batch with the `NextNEP11Batch` index should be created.
|
||||
NewNEP11Batch bool
|
||||
// NewNEP17Batch is true if batch with the `NextNEP17Batch` index should be created.
|
||||
NewNEP17Batch bool
|
||||
}
|
||||
|
||||
// NewTokenTransferInfo returns new TokenTransferInfo.
|
||||
func NewTokenTransferInfo() *TokenTransferInfo {
|
||||
return &TokenTransferInfo{
|
||||
LastUpdated: make(map[int32]uint32),
|
||||
}
|
||||
}
|
||||
|
||||
// DecodeBinary implements io.Serializable interface.
|
||||
func (bs *NEP17TransferInfo) DecodeBinary(r *io.BinReader) {
|
||||
bs.NextTransferBatch = r.ReadU32LE()
|
||||
bs.NewBatch = r.ReadBool()
|
||||
func (bs *TokenTransferInfo) DecodeBinary(r *io.BinReader) {
|
||||
bs.NextNEP11Batch = r.ReadU32LE()
|
||||
bs.NextNEP17Batch = r.ReadU32LE()
|
||||
bs.NewNEP11Batch = r.ReadBool()
|
||||
bs.NewNEP17Batch = r.ReadBool()
|
||||
lenBalances := r.ReadVarUint()
|
||||
m := make(map[int32]uint32, lenBalances)
|
||||
for i := 0; i < int(lenBalances); i++ {
|
||||
|
@ -67,9 +82,11 @@ func (bs *NEP17TransferInfo) DecodeBinary(r *io.BinReader) {
|
|||
}
|
||||
|
||||
// EncodeBinary implements io.Serializable interface.
|
||||
func (bs *NEP17TransferInfo) EncodeBinary(w *io.BinWriter) {
|
||||
w.WriteU32LE(bs.NextTransferBatch)
|
||||
w.WriteBool(bs.NewBatch)
|
||||
func (bs *TokenTransferInfo) EncodeBinary(w *io.BinWriter) {
|
||||
w.WriteU32LE(bs.NextNEP11Batch)
|
||||
w.WriteU32LE(bs.NextNEP17Batch)
|
||||
w.WriteBool(bs.NewNEP11Batch)
|
||||
w.WriteBool(bs.NewNEP17Batch)
|
||||
w.WriteVarUint(uint64(len(bs.LastUpdated)))
|
||||
for k, v := range bs.LastUpdated {
|
||||
w.WriteU32LE(uint32(k))
|
||||
|
@ -78,7 +95,7 @@ func (bs *NEP17TransferInfo) EncodeBinary(w *io.BinWriter) {
|
|||
}
|
||||
|
||||
// Append appends single transfer to a log.
|
||||
func (lg *NEP17TransferLog) Append(tr *NEP17Transfer) error {
|
||||
func (lg *TokenTransferLog) Append(tr io.Serializable) error {
|
||||
// The first entry, set up counter.
|
||||
if len(lg.Raw) == 0 {
|
||||
lg.Raw = append(lg.Raw, 0)
|
||||
|
@ -96,8 +113,30 @@ func (lg *NEP17TransferLog) Append(tr *NEP17Transfer) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// ForEach iterates over transfer log returning on first error.
|
||||
func (lg *NEP17TransferLog) ForEach(f func(*NEP17Transfer) (bool, error)) (bool, error) {
|
||||
// ForEachNEP11 iterates over transfer log returning on first error.
|
||||
func (lg *TokenTransferLog) ForEachNEP11(f func(*NEP11Transfer) (bool, error)) (bool, error) {
|
||||
if lg == nil || len(lg.Raw) == 0 {
|
||||
return true, nil
|
||||
}
|
||||
transfers := make([]NEP11Transfer, lg.Size())
|
||||
r := io.NewBinReaderFromBuf(lg.Raw[1:])
|
||||
for i := 0; i < lg.Size(); i++ {
|
||||
transfers[i].DecodeBinary(r)
|
||||
}
|
||||
if r.Err != nil {
|
||||
return false, r.Err
|
||||
}
|
||||
for i := len(transfers) - 1; i >= 0; i-- {
|
||||
cont, err := f(&transfers[i])
|
||||
if err != nil || !cont {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// ForEachNEP17 iterates over transfer log returning on first error.
|
||||
func (lg *TokenTransferLog) ForEachNEP17(f func(*NEP17Transfer) (bool, error)) (bool, error) {
|
||||
if lg == nil || len(lg.Raw) == 0 {
|
||||
return true, nil
|
||||
}
|
||||
|
@ -111,18 +150,15 @@ func (lg *NEP17TransferLog) ForEach(f func(*NEP17Transfer) (bool, error)) (bool,
|
|||
}
|
||||
for i := len(transfers) - 1; i >= 0; i-- {
|
||||
cont, err := f(&transfers[i])
|
||||
if err != nil {
|
||||
if err != nil || !cont {
|
||||
return false, err
|
||||
}
|
||||
if !cont {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Size returns an amount of transfer written in log.
|
||||
func (lg *NEP17TransferLog) Size() int {
|
||||
func (lg *TokenTransferLog) Size() int {
|
||||
if len(lg.Raw) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
@ -152,3 +188,15 @@ func (t *NEP17Transfer) DecodeBinary(r *io.BinReader) {
|
|||
amount := r.ReadVarBytes(bigint.MaxBytesLen)
|
||||
t.Amount = *bigint.FromBytes(amount)
|
||||
}
|
||||
|
||||
// EncodeBinary implements io.Serializable interface.
|
||||
func (t *NEP11Transfer) EncodeBinary(w *io.BinWriter) {
|
||||
t.NEP17Transfer.EncodeBinary(w)
|
||||
w.WriteVarBytes(t.ID)
|
||||
}
|
||||
|
||||
// DecodeBinary implements io.Serializable interface.
|
||||
func (t *NEP11Transfer) DecodeBinary(r *io.BinReader) {
|
||||
t.NEP17Transfer.DecodeBinary(r)
|
||||
t.ID = r.ReadVarBytes(storage.MaxStorageKeyLen)
|
||||
}
|
141
pkg/core/state/tokens_test.go
Normal file
141
pkg/core/state/tokens_test.go
Normal file
|
@ -0,0 +1,141 @@
|
|||
package state
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/nspcc-dev/neo-go/internal/random"
|
||||
"github.com/nspcc-dev/neo-go/internal/testserdes"
|
||||
"github.com/nspcc-dev/neo-go/pkg/util"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTokenTransferLog_Append17(t *testing.T) {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
expected := []*NEP17Transfer{
|
||||
random17Transfer(r),
|
||||
random17Transfer(r),
|
||||
random17Transfer(r),
|
||||
random17Transfer(r),
|
||||
}
|
||||
|
||||
lg := new(TokenTransferLog)
|
||||
for _, tr := range expected {
|
||||
require.NoError(t, lg.Append(tr))
|
||||
}
|
||||
|
||||
require.Equal(t, len(expected), lg.Size())
|
||||
|
||||
i := len(expected) - 1
|
||||
cont, err := lg.ForEachNEP17(func(tr *NEP17Transfer) (bool, error) {
|
||||
require.Equal(t, expected[i], tr)
|
||||
i--
|
||||
return true, nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.True(t, cont)
|
||||
}
|
||||
|
||||
func TestTokenTransferLog_Append(t *testing.T) {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
expected := []*NEP11Transfer{
|
||||
random11Transfer(r),
|
||||
random11Transfer(r),
|
||||
random11Transfer(r),
|
||||
random11Transfer(r),
|
||||
}
|
||||
|
||||
lg := new(TokenTransferLog)
|
||||
for _, tr := range expected {
|
||||
require.NoError(t, lg.Append(tr))
|
||||
}
|
||||
|
||||
require.Equal(t, len(expected), lg.Size())
|
||||
|
||||
i := len(expected) - 1
|
||||
cont, err := lg.ForEachNEP11(func(tr *NEP11Transfer) (bool, error) {
|
||||
require.Equal(t, expected[i], tr)
|
||||
i--
|
||||
return true, nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.True(t, cont)
|
||||
}
|
||||
|
||||
func BenchmarkTokenTransferLog_Append(b *testing.B) {
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
ts := make([]*NEP17Transfer, TokenTransferBatchSize)
|
||||
for i := range ts {
|
||||
ts[i] = random17Transfer(r)
|
||||
}
|
||||
|
||||
lg := new(TokenTransferLog)
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for _, tr := range ts {
|
||||
err := lg.Append(tr)
|
||||
if err != nil {
|
||||
b.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNEP17Transfer_DecodeBinary(t *testing.T) {
|
||||
expected := &NEP17Transfer{
|
||||
Asset: 123,
|
||||
From: util.Uint160{5, 6, 7},
|
||||
To: util.Uint160{8, 9, 10},
|
||||
Amount: *big.NewInt(42),
|
||||
Block: 12345,
|
||||
Timestamp: 54321,
|
||||
Tx: util.Uint256{8, 5, 3},
|
||||
}
|
||||
|
||||
testserdes.EncodeDecodeBinary(t, expected, new(NEP17Transfer))
|
||||
}
|
||||
|
||||
func TestNEP11Transfer_DecodeBinary(t *testing.T) {
|
||||
expected := &NEP11Transfer{
|
||||
NEP17Transfer: NEP17Transfer{
|
||||
Asset: 123,
|
||||
From: util.Uint160{5, 6, 7},
|
||||
To: util.Uint160{8, 9, 10},
|
||||
Amount: *big.NewInt(42),
|
||||
Block: 12345,
|
||||
Timestamp: 54321,
|
||||
Tx: util.Uint256{8, 5, 3},
|
||||
},
|
||||
ID: []byte{42, 42, 42},
|
||||
}
|
||||
|
||||
testserdes.EncodeDecodeBinary(t, expected, new(NEP11Transfer))
|
||||
}
|
||||
|
||||
func random17Transfer(r *rand.Rand) *NEP17Transfer {
|
||||
return &NEP17Transfer{
|
||||
Amount: *big.NewInt(int64(r.Uint64())),
|
||||
Block: r.Uint32(),
|
||||
Asset: int32(random.Int(10, 10000000)),
|
||||
From: random.Uint160(),
|
||||
To: random.Uint160(),
|
||||
Tx: random.Uint256(),
|
||||
}
|
||||
}
|
||||
|
||||
func random11Transfer(r *rand.Rand) *NEP11Transfer {
|
||||
return &NEP11Transfer{
|
||||
NEP17Transfer: NEP17Transfer{
|
||||
Amount: *big.NewInt(int64(r.Uint64())),
|
||||
Block: r.Uint32(),
|
||||
Asset: int32(random.Int(10, 10000000)),
|
||||
From: random.Uint160(),
|
||||
To: random.Uint160(),
|
||||
Tx: random.Uint256(),
|
||||
},
|
||||
ID: random.Uint256().BytesBE(),
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue