transaction: don't recalculate size when decoding from buffer

name               old time/op    new time/op    delta
DecodeBinary-8       3.17µs ± 6%    3.14µs ± 5%     ~     (p=0.579 n=10+10)
DecodeJSON-8         12.8µs ± 3%    12.6µs ± 3%     ~     (p=0.105 n=10+10)
DecodeFromBytes-8    3.45µs ± 4%    2.73µs ± 2%  -20.70%  (p=0.000 n=10+9)

name               old alloc/op   new alloc/op   delta
DecodeBinary-8       1.82kB ± 0%    1.82kB ± 0%     ~     (all equal)
DecodeJSON-8         3.49kB ± 0%    3.49kB ± 0%     ~     (all equal)
DecodeFromBytes-8    1.82kB ± 0%    1.44kB ± 0%  -21.05%  (p=0.000 n=10+10)

name               old allocs/op  new allocs/op  delta
DecodeBinary-8         29.0 ± 0%      29.0 ± 0%     ~     (all equal)
DecodeJSON-8           58.0 ± 0%      58.0 ± 0%     ~     (all equal)
DecodeFromBytes-8      29.0 ± 0%      21.0 ± 0%  -27.59%  (p=0.000 n=10+10)
This commit is contained in:
Roman Khimov 2021-08-04 23:13:58 +03:00
parent 5e18a6141e
commit d487b54612
2 changed files with 65 additions and 3 deletions

View file

@ -0,0 +1,55 @@
package transaction
import (
"encoding/base64"
"testing"
"github.com/nspcc-dev/neo-go/pkg/io"
"github.com/stretchr/testify/require"
)
// Some typical transfer tx from mainnet.
var (
benchTx []byte
benchTxB64 = "AK9KzFu0P5gAAAAAAIjOEgAAAAAA7jAAAAGIDdjSt7aj2J+dktSobkC9j0/CJwEAWwsCAMLrCwwUtXfkIuockX9HAVMNeEuQMxMlYkMMFIgN2NK3tqPYn52S1KhuQL2PT8InFMAfDAh0cmFuc2ZlcgwUz3bii9AGLEpHjuNVYQETGfPPpNJBYn1bUjkBQgxAUiZNae4OTSu2EOGW+6fwslLIpVsczOAR9o6R796tFf2KG+nLzs709tCQ7NELZOQ7zUzfF19ADLvH/efNT4v9LygMIQNT96/wFdPSBO7NUI9Kpn9EffTRXsS6ZJ9PqRvbenijVEFW57Mn"
benchTxJSON []byte
)
func init() {
var err error
benchTx, err = base64.StdEncoding.DecodeString(benchTxB64)
if err != nil {
panic(err)
}
t, err := NewTransactionFromBytes(benchTx)
if err != nil {
panic(err)
}
benchTxJSON, err = t.MarshalJSON()
if err != nil {
panic(err)
}
}
func BenchmarkDecodeBinary(t *testing.B) {
for n := 0; n < t.N; n++ {
r := io.NewBinReaderFromBuf(benchTx)
tx := &Transaction{}
tx.DecodeBinary(r)
require.NoError(t, r.Err)
}
}
func BenchmarkDecodeJSON(t *testing.B) {
for n := 0; n < t.N; n++ {
tx := &Transaction{}
require.NoError(t, tx.UnmarshalJSON(benchTxJSON))
}
}
func BenchmarkDecodeFromBytes(t *testing.B) {
for n := 0; n < t.N; n++ {
_, err := NewTransactionFromBytes(benchTx)
require.NoError(t, err)
}
}

View file

@ -143,8 +143,7 @@ func (t *Transaction) decodeHashableFields(br *io.BinReader) {
}
}
// DecodeBinary implements Serializable interface.
func (t *Transaction) DecodeBinary(br *io.BinReader) {
func (t *Transaction) decodeBinaryNoSize(br *io.BinReader) {
t.decodeHashableFields(br)
if br.Err != nil {
return
@ -159,6 +158,14 @@ func (t *Transaction) DecodeBinary(br *io.BinReader) {
// to do it anymore.
if br.Err == nil {
br.Err = t.createHash()
}
}
// DecodeBinary implements Serializable interface.
func (t *Transaction) DecodeBinary(br *io.BinReader) {
t.decodeBinaryNoSize(br)
if br.Err == nil {
_ = t.Size()
}
}
@ -240,7 +247,7 @@ func (t *Transaction) Bytes() []byte {
func NewTransactionFromBytes(b []byte) (*Transaction, error) {
tx := &Transaction{}
r := io.NewBinReaderFromBuf(b)
tx.DecodeBinary(r)
tx.decodeBinaryNoSize(r)
if r.Err != nil {
return nil, r.Err
}