Merge pull request #505 from nspcc-dev/fix/readarray

io: restrict ReadArray max array size, fIx #503.
This commit is contained in:
Roman Khimov 2019-11-16 10:31:33 +03:00 committed by GitHub
commit 394af5f777
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 29 additions and 2 deletions

View file

@ -3,10 +3,15 @@ package io
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"reflect"
)
// maxArraySize is a maximums size of an array which can be decoded.
// It is taken from https://github.com/neo-project/neo/blob/master/neo/IO/Helper.cs#L130
const maxArraySize = 0x1000000
// BinReader is a convenient wrapper around a io.Reader and err object.
// Used to simplify error handling when reading into a struct with many fields.
type BinReader struct {
@ -36,7 +41,7 @@ func (r *BinReader) ReadLE(v interface{}) {
// ReadArray reads array into value which must be
// a pointer to a slice.
func (r *BinReader) ReadArray(t interface{}) {
func (r *BinReader) ReadArray(t interface{}, maxSize ...int) {
value := reflect.ValueOf(t)
if value.Kind() != reflect.Ptr || value.Elem().Kind() != reflect.Slice {
panic(value.Type().String() + " is not a pointer to a slice")
@ -55,7 +60,18 @@ func (r *BinReader) ReadArray(t interface{}) {
return
}
l := int(r.ReadVarUint())
ms := maxArraySize
if len(maxSize) != 0 {
ms = maxSize[0]
}
lu := r.ReadVarUint()
if lu > uint64(ms) {
r.Err = fmt.Errorf("array is too big (%d)", lu)
return
}
l := int(lu)
arr := reflect.MakeSlice(sliceType, l, l)
for i := 0; i < l; i++ {

View file

@ -266,6 +266,17 @@ func TestBinReader_ReadArray(t *testing.T) {
require.NoError(t, r.Err)
require.Equal(t, elems, arrVal)
r = NewBinReaderFromBuf(data)
arrVal = []testSerializable{}
r.ReadArray(&arrVal, 3)
require.NoError(t, r.Err)
require.Equal(t, elems, arrVal)
r = NewBinReaderFromBuf(data)
arrVal = []testSerializable{}
r.ReadArray(&arrVal, 2)
require.Error(t, r.Err)
r = NewBinReaderFromBuf([]byte{0})
r.ReadArray(&arrVal)
require.NoError(t, r.Err)