forked from TrueCloudLab/neoneo-go
vm: restrict comparable ByteArray length
MaxByteArrayComparableSize should equals to 65535.
This commit is contained in:
parent
e550608ecd
commit
1f9b92c295
2 changed files with 51 additions and 13 deletions
|
@ -7,6 +7,7 @@ import (
|
|||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"unicode/utf8"
|
||||
|
@ -16,14 +17,17 @@ import (
|
|||
"github.com/nspcc-dev/neo-go/pkg/util"
|
||||
)
|
||||
|
||||
// MaxBigIntegerSizeBits is the maximum size of BigInt item in bits.
|
||||
const MaxBigIntegerSizeBits = 32 * 8
|
||||
|
||||
// MaxArraySize is the maximum array size allowed in the VM.
|
||||
const MaxArraySize = 1024
|
||||
|
||||
// MaxSize is the maximum item size allowed in the VM.
|
||||
const MaxSize = 1024 * 1024
|
||||
const (
|
||||
// MaxBigIntegerSizeBits is the maximum size of BigInt item in bits.
|
||||
MaxBigIntegerSizeBits = 32 * 8
|
||||
// MaxArraySize is the maximum array size allowed in the VM.
|
||||
MaxArraySize = 1024
|
||||
// MaxSize is the maximum item size allowed in the VM.
|
||||
MaxSize = 1024 * 1024
|
||||
// MaxByteArrayComparableSize is the maximum allowed length of ByteArray for Equals method.
|
||||
// It is set to be the maximum uint16 value.
|
||||
MaxByteArrayComparableSize = math.MaxUint16
|
||||
)
|
||||
|
||||
// Item represents the "real" value that is pushed on the stack.
|
||||
type Item interface {
|
||||
|
@ -46,7 +50,10 @@ type Item interface {
|
|||
Convert(Type) (Item, error)
|
||||
}
|
||||
|
||||
var errInvalidConversion = errors.New("invalid conversion type")
|
||||
var (
|
||||
errInvalidConversion = errors.New("invalid conversion type")
|
||||
errExceedingMaxComparableSize = errors.New("the operand exceeds the maximum comparable size")
|
||||
)
|
||||
|
||||
// Make tries to make appropriate stack item from provided value.
|
||||
// It will panic if it's not possible.
|
||||
|
@ -536,13 +543,22 @@ func (i *ByteArray) TryInteger() (*big.Int, error) {
|
|||
|
||||
// Equals implements Item interface.
|
||||
func (i *ByteArray) Equals(s Item) bool {
|
||||
if len(i.value) > MaxByteArrayComparableSize {
|
||||
panic(errExceedingMaxComparableSize)
|
||||
}
|
||||
if i == s {
|
||||
return true
|
||||
} else if s == nil {
|
||||
return false
|
||||
}
|
||||
val, ok := s.(*ByteArray)
|
||||
return ok && bytes.Equal(i.value, val.value)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if len(val.value) > MaxByteArrayComparableSize {
|
||||
panic(errExceedingMaxComparableSize)
|
||||
}
|
||||
return bytes.Equal(i.value, val.value)
|
||||
}
|
||||
|
||||
// Dup implements Item interface.
|
||||
|
|
|
@ -143,6 +143,7 @@ var equalsTestCases = map[string][]struct {
|
|||
item1 Item
|
||||
item2 Item
|
||||
result bool
|
||||
panics bool
|
||||
}{
|
||||
"struct": {
|
||||
{
|
||||
|
@ -251,6 +252,21 @@ var equalsTestCases = map[string][]struct {
|
|||
item2: Make([]byte{1, 2, 3}),
|
||||
result: true,
|
||||
},
|
||||
{
|
||||
item1: NewByteArray(make([]byte, MaxByteArrayComparableSize+1)),
|
||||
item2: NewByteArray([]byte{1, 2, 3}),
|
||||
panics: true,
|
||||
},
|
||||
{
|
||||
item1: NewByteArray([]byte{1, 2, 3}),
|
||||
item2: NewByteArray(make([]byte, MaxByteArrayComparableSize+1)),
|
||||
panics: true,
|
||||
},
|
||||
{
|
||||
item1: NewByteArray(make([]byte, MaxByteArrayComparableSize+1)),
|
||||
item2: NewByteArray(make([]byte, MaxByteArrayComparableSize+1)),
|
||||
panics: true,
|
||||
},
|
||||
},
|
||||
"array": {
|
||||
{
|
||||
|
@ -350,9 +366,15 @@ func TestEquals(t *testing.T) {
|
|||
for name, testBatch := range equalsTestCases {
|
||||
for _, testCase := range testBatch {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
if testCase.panics {
|
||||
assert.Panics(t, func() {
|
||||
testCase.item1.Equals(testCase.item2)
|
||||
})
|
||||
} else {
|
||||
assert.Equal(t, testCase.result, testCase.item1.Equals(testCase.item2))
|
||||
// Reference equals
|
||||
assert.Equal(t, true, testCase.item1.Equals(testCase.item1))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue