state: use more efficient encoding for amount
It's variable-length anyway, so wasting 8 bytes for what typically is 1 byte makes no sense.
This commit is contained in:
parent
806b89db76
commit
ff11a5f990
2 changed files with 11 additions and 10 deletions
|
@ -148,9 +148,8 @@ func (t *NEP5Transfer) EncodeBinary(w *io.BinWriter) {
|
|||
w.WriteBytes(t.To[:])
|
||||
w.WriteU32LE(t.Block)
|
||||
w.WriteU64LE(t.Timestamp)
|
||||
amountBytes := bigint.ToBytes(&t.Amount)
|
||||
w.WriteU64LE(uint64(len(amountBytes)))
|
||||
w.WriteBytes(amountBytes)
|
||||
amount := bigint.ToBytes(&t.Amount)
|
||||
w.WriteVarBytes(amount)
|
||||
}
|
||||
|
||||
// DecodeBinary implements io.Serializable interface.
|
||||
|
@ -166,9 +165,7 @@ func (t *NEP5Transfer) DecodeBinaryReturnCount(r *io.BinReader) int {
|
|||
r.ReadBytes(t.To[:])
|
||||
t.Block = r.ReadU32LE()
|
||||
t.Timestamp = r.ReadU64LE()
|
||||
amountLen := r.ReadU64LE()
|
||||
amountBytes := make([]byte, amountLen)
|
||||
r.ReadBytes(amountBytes)
|
||||
t.Amount = *bigint.FromBytes(amountBytes)
|
||||
return 4 + util.Uint160Size*2 + 8 + 4 + (8 + len(amountBytes)) + +util.Uint256Size
|
||||
amount := r.ReadVarBytes(bigint.MaxBytesLen)
|
||||
t.Amount = *bigint.FromBytes(amount)
|
||||
return 4 + util.Uint160Size*2 + 8 + 4 + (io.GetVarSize(len(amount)) + len(amount)) + +util.Uint256Size
|
||||
}
|
||||
|
|
|
@ -6,8 +6,12 @@ import (
|
|||
"math/bits"
|
||||
)
|
||||
|
||||
// wordSizeBytes is a size of a big.Word (uint) in bytes.`
|
||||
const wordSizeBytes = bits.UintSize / 8
|
||||
const (
|
||||
// MaxBytesLen is the maximum length of serialized integer suitable for Neo VM.
|
||||
MaxBytesLen = 33 // 32 bytes for 256-bit integer plus 1 if padding needed
|
||||
// wordSizeBytes is a size of a big.Word (uint) in bytes.`
|
||||
wordSizeBytes = bits.UintSize / 8
|
||||
)
|
||||
|
||||
// FromBytes converts data in little-endian format to
|
||||
// an integer.
|
||||
|
|
Loading…
Reference in a new issue