Merge pull request #2136 from nspcc-dev/micro-reduce-allocs

Drop some useless allocations
This commit is contained in:
Roman Khimov 2021-08-20 16:48:14 +03:00 committed by GitHub
commit 37c92f5c2f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 45 additions and 58 deletions

View file

@ -107,9 +107,11 @@ func New(stateRootEnabled bool) *Block {
// Notice that only the hashes of the transactions are stored. // Notice that only the hashes of the transactions are stored.
func (b *Block) Trim() ([]byte, error) { func (b *Block) Trim() ([]byte, error) {
buf := io.NewBufBinWriter() buf := io.NewBufBinWriter()
numTx := len(b.Transactions)
buf.Grow(b.GetExpectedBlockSizeWithoutTransactions(numTx) + util.Uint256Size*numTx)
b.Header.EncodeBinary(buf.BinWriter) b.Header.EncodeBinary(buf.BinWriter)
buf.WriteVarUint(uint64(len(b.Transactions))) buf.WriteVarUint(uint64(numTx))
for _, tx := range b.Transactions { for _, tx := range b.Transactions {
h := tx.Hash() h := tx.Hash()
h.EncodeBinary(buf.BinWriter) h.EncodeBinary(buf.BinWriter)

View file

@ -27,7 +27,6 @@ import (
"github.com/nspcc-dev/neo-go/pkg/core/transaction" "github.com/nspcc-dev/neo-go/pkg/core/transaction"
"github.com/nspcc-dev/neo-go/pkg/crypto/hash" "github.com/nspcc-dev/neo-go/pkg/crypto/hash"
"github.com/nspcc-dev/neo-go/pkg/crypto/keys" "github.com/nspcc-dev/neo-go/pkg/crypto/keys"
"github.com/nspcc-dev/neo-go/pkg/encoding/bigint"
"github.com/nspcc-dev/neo-go/pkg/encoding/fixedn" "github.com/nspcc-dev/neo-go/pkg/encoding/fixedn"
"github.com/nspcc-dev/neo-go/pkg/io" "github.com/nspcc-dev/neo-go/pkg/io"
"github.com/nspcc-dev/neo-go/pkg/smartcontract" "github.com/nspcc-dev/neo-go/pkg/smartcontract"
@ -1038,49 +1037,35 @@ func (bc *Blockchain) handleNotification(note *state.NotificationEvent, d dao.DA
if !ok || len(arr) != 3 { if !ok || len(arr) != 3 {
return return
} }
var from []byte from, err := parseUint160(arr[0])
fromValue := arr[0].Value() if err != nil {
// we don't have `from` set when we are minting tokens return
if fromValue != nil {
from, ok = fromValue.([]byte)
if !ok {
return
}
} }
var to []byte to, err := parseUint160(arr[1])
toValue := arr[1].Value() if err != nil {
// we don't have `to` set when we are burning tokens return
if toValue != nil {
to, ok = toValue.([]byte)
if !ok {
return
}
} }
amount, ok := arr[2].Value().(*big.Int) amount, err := arr[2].TryInteger()
if !ok { if err != nil {
bs, ok := arr[2].Value().([]byte) return
if !ok {
return
}
if len(bs) > bigint.MaxBytesLen {
return // Not a proper number.
}
amount = bigint.FromBytes(bs)
} }
bc.processNEP17Transfer(d, transCache, h, b, note.ScriptHash, from, to, amount) bc.processNEP17Transfer(d, transCache, h, b, note.ScriptHash, from, to, amount)
} }
func parseUint160(addr []byte) util.Uint160 { func parseUint160(itm stackitem.Item) (util.Uint160, error) {
if u, err := util.Uint160DecodeBytesBE(addr); err == nil { _, ok := itm.(stackitem.Null) // Minting or burning.
return u if ok {
return util.Uint160{}, nil
} }
return util.Uint160{} bytes, err := itm.TryBytes()
if err != nil {
return util.Uint160{}, err
}
return util.Uint160DecodeBytesBE(bytes)
} }
func (bc *Blockchain) processNEP17Transfer(cache dao.DAO, transCache map[util.Uint160]transferData, func (bc *Blockchain) processNEP17Transfer(cache dao.DAO, transCache map[util.Uint160]transferData,
h util.Uint256, b *block.Block, sc util.Uint160, from, to []byte, amount *big.Int) { h util.Uint256, b *block.Block, sc util.Uint160, from util.Uint160, to util.Uint160, amount *big.Int) {
toAddr := parseUint160(to)
fromAddr := parseUint160(from)
var id int32 var id int32
nativeContract := bc.contracts.ByHash(sc) nativeContract := bc.contracts.ByHash(sc)
if nativeContract != nil { if nativeContract != nil {
@ -1094,21 +1079,21 @@ func (bc *Blockchain) processNEP17Transfer(cache dao.DAO, transCache map[util.Ui
} }
transfer := &state.NEP17Transfer{ transfer := &state.NEP17Transfer{
Asset: id, Asset: id,
From: fromAddr, From: from,
To: toAddr, To: to,
Block: b.Index, Block: b.Index,
Timestamp: b.Timestamp, Timestamp: b.Timestamp,
Tx: h, Tx: h,
} }
if !fromAddr.Equals(util.Uint160{}) { if !from.Equals(util.Uint160{}) {
_ = transfer.Amount.Neg(amount) // We already have the Int. _ = transfer.Amount.Neg(amount) // We already have the Int.
if appendNEP17Transfer(cache, transCache, fromAddr, transfer) != nil { if appendNEP17Transfer(cache, transCache, from, transfer) != nil {
return return
} }
} }
if !toAddr.Equals(util.Uint160{}) { if !to.Equals(util.Uint160{}) {
_ = transfer.Amount.Set(amount) // We already have the Int. _ = transfer.Amount.Set(amount) // We already have the Int.
_ = appendNEP17Transfer(cache, transCache, toAddr, transfer) // Nothing useful we can do. _ = appendNEP17Transfer(cache, transCache, to, transfer) // Nothing useful we can do.
} }
} }

View file

@ -57,23 +57,19 @@ func NewContext(trigger trigger.Type, bc blockchainer.Blockchainer, d dao.DAO,
block *block.Block, tx *transaction.Transaction, log *zap.Logger) *Context { block *block.Block, tx *transaction.Transaction, log *zap.Logger) *Context {
baseExecFee := int64(DefaultBaseExecFee) baseExecFee := int64(DefaultBaseExecFee)
dao := d.GetWrapped() dao := d.GetWrapped()
nes := make([]state.NotificationEvent, 0)
if bc != nil && (block == nil || block.Index != 0) { if bc != nil && (block == nil || block.Index != 0) {
baseExecFee = bc.GetPolicer().GetBaseExecFee() baseExecFee = bc.GetPolicer().GetBaseExecFee()
} }
return &Context{ return &Context{
Chain: bc, Chain: bc,
Network: uint32(bc.GetConfig().Magic), Network: uint32(bc.GetConfig().Magic),
Natives: natives, Natives: natives,
Trigger: trigger, Trigger: trigger,
Block: block, Block: block,
Tx: tx, Tx: tx,
DAO: dao, DAO: dao,
Notifications: nes, Log: log,
Log: log,
// Functions is a slice of interops sorted by ID.
Functions: []Function{},
getContract: getContract, getContract: getContract,
baseExecFee: baseExecFee, baseExecFee: baseExecFee,
} }

View file

@ -5,18 +5,22 @@ import (
"errors" "errors"
) )
// ErrDrained is returned on an attempt to use already drained write buffer.
var ErrDrained = errors.New("buffer already drained")
// BufBinWriter is an additional layer on top of BinWriter that // BufBinWriter is an additional layer on top of BinWriter that
// automatically creates buffer to write into that you can get after all // automatically creates buffer to write into that you can get after all
// writes via Bytes(). // writes via Bytes().
type BufBinWriter struct { type BufBinWriter struct {
*BinWriter *BinWriter
buf *bytes.Buffer buf bytes.Buffer
} }
// NewBufBinWriter makes a BufBinWriter with an empty byte buffer. // NewBufBinWriter makes a BufBinWriter with an empty byte buffer.
func NewBufBinWriter() *BufBinWriter { func NewBufBinWriter() *BufBinWriter {
b := new(bytes.Buffer) b := new(BufBinWriter)
return &BufBinWriter{BinWriter: NewBinWriterFromIO(b), buf: b} b.BinWriter = NewBinWriterFromIO(&b.buf)
return b
} }
// Len returns the number of bytes of the unread portion of the buffer. // Len returns the number of bytes of the unread portion of the buffer.
@ -29,7 +33,7 @@ func (bw *BufBinWriter) Bytes() []byte {
if bw.Err != nil { if bw.Err != nil {
return nil return nil
} }
bw.Err = errors.New("buffer already drained") bw.Err = ErrDrained
return bw.buf.Bytes() return bw.buf.Bytes()
} }