stackitem: limit deserialized arrays/maps

See neo-project/neo#2531, even though they use MaxStackSize there resulting
element is not valid unless it has <=MaxArraySize elements.
This commit is contained in:
Roman Khimov 2021-07-16 12:28:11 +03:00
parent 654c4a6589
commit fbe8bd2d9c
3 changed files with 34 additions and 0 deletions

View file

@ -63,6 +63,7 @@ var (
// value exceeds MaxSize. // value exceeds MaxSize.
ErrTooBig = errors.New("too big") ErrTooBig = errors.New("too big")
errTooBigArray = fmt.Errorf("%w: array", ErrTooBig)
errTooBigComparable = fmt.Errorf("%w: uncomparable", ErrTooBig) errTooBigComparable = fmt.Errorf("%w: uncomparable", ErrTooBig)
errTooBigInteger = fmt.Errorf("%w: integer", ErrTooBig) errTooBigInteger = fmt.Errorf("%w: integer", ErrTooBig)
errTooBigKey = fmt.Errorf("%w: map key", ErrTooBig) errTooBigKey = fmt.Errorf("%w: map key", ErrTooBig)

View file

@ -210,6 +210,10 @@ func decodeBinary(r *io.BinReader, allowInvalid bool) Item {
return NewBigInteger(num) return NewBigInteger(num)
case ArrayT, StructT: case ArrayT, StructT:
size := int(r.ReadVarUint()) size := int(r.ReadVarUint())
if size > MaxArraySize {
r.Err = errTooBigArray
return nil
}
arr := make([]Item, size) arr := make([]Item, size)
for i := 0; i < size; i++ { for i := 0; i < size; i++ {
arr[i] = decodeBinary(r, allowInvalid) arr[i] = decodeBinary(r, allowInvalid)
@ -221,6 +225,10 @@ func decodeBinary(r *io.BinReader, allowInvalid bool) Item {
return NewStruct(arr) return NewStruct(arr)
case MapT: case MapT:
size := int(r.ReadVarUint()) size := int(r.ReadVarUint())
if size > MaxArraySize {
r.Err = errTooBigArray
return nil
}
m := NewMap() m := NewMap()
for i := 0; i < size; i++ { for i := 0; i < size; i++ {
key := decodeBinary(r, allowInvalid) key := decodeBinary(r, allowInvalid)

View file

@ -39,6 +39,7 @@ func testSerialize(t *testing.T, expectedErr error, item Item) {
func TestSerialize(t *testing.T) { func TestSerialize(t *testing.T) {
bigByteArray := NewByteArray(make([]byte, MaxSize/2)) bigByteArray := NewByteArray(make([]byte, MaxSize/2))
smallByteArray := NewByteArray(make([]byte, MaxSize/4)) smallByteArray := NewByteArray(make([]byte, MaxSize/4))
zeroByteArray := NewByteArray(make([]byte, 0))
testArray := func(t *testing.T, newItem func([]Item) Item) { testArray := func(t *testing.T, newItem func([]Item) Item) {
arr := newItem([]Item{bigByteArray}) arr := newItem([]Item{bigByteArray})
testSerialize(t, nil, arr) testSerialize(t, nil, arr)
@ -50,6 +51,18 @@ func TestSerialize(t *testing.T) {
arr.Value().([]Item)[0] = arr arr.Value().([]Item)[0] = arr
testSerialize(t, ErrRecursive, arr) testSerialize(t, ErrRecursive, arr)
items := make([]Item, 0, MaxArraySize)
for i := 0; i < MaxArraySize; i++ {
items = append(items, zeroByteArray)
}
testSerialize(t, nil, newItem(items))
items = append(items, zeroByteArray)
data, err := Serialize(newItem(items))
require.NoError(t, err)
_, err = Deserialize(data)
require.True(t, errors.Is(err, ErrTooBig), err)
} }
t.Run("array", func(t *testing.T) { t.Run("array", func(t *testing.T) {
testArray(t, func(items []Item) Item { return NewArray(items) }) testArray(t, func(items []Item) Item { return NewArray(items) })
@ -126,6 +139,18 @@ func TestSerialize(t *testing.T) {
m.Add(Make(0), NewByteArray(make([]byte, MaxSize-MaxKeySize))) m.Add(Make(0), NewByteArray(make([]byte, MaxSize-MaxKeySize)))
m.Add(NewByteArray(make([]byte, MaxKeySize)), Make(1)) m.Add(NewByteArray(make([]byte, MaxKeySize)), Make(1))
testSerialize(t, ErrTooBig, m) testSerialize(t, ErrTooBig, m)
m = NewMap()
for i := 0; i < MaxArraySize; i++ {
m.Add(Make(i), zeroByteArray)
}
testSerialize(t, nil, m)
m.Add(Make(100500), zeroByteArray)
data, err := Serialize(m)
require.NoError(t, err)
_, err = Deserialize(data)
require.True(t, errors.Is(err, ErrTooBig), err)
}) })
} }