stackitem: limit JSON size in ToJSONWithTypes

Also do not limit depth. It was introduced in e34fa2e915 as a simple
solution to OOM problem. In this commit we do exactly the refactoring
described there. Maximum size is the same as stack item size and
can be changed if needed withouth significat refactoring.
`1 MiB` seems sufficient, though.

Signed-off-by: Evgeniy Stratonikov <evgeniy@nspcc.ru>
This commit is contained in:
Evgeniy Stratonikov 2022-03-04 15:03:16 +03:00
parent 6ece74a7c7
commit a8d2df874f
2 changed files with 211 additions and 50 deletions

View file

@ -9,6 +9,7 @@ import (
gio "io" gio "io"
"math" "math"
"math/big" "math/big"
"strconv"
) )
// decoder is a wrapper around json.Decoder helping to mimic C# json decoder behaviour. // decoder is a wrapper around json.Decoder helping to mimic C# json decoder behaviour.
@ -260,72 +261,120 @@ func (d *decoder) decodeMap() (*Map, error) {
// ToJSONWithTypes serializes any stackitem to JSON in a lossless way. // ToJSONWithTypes serializes any stackitem to JSON in a lossless way.
func ToJSONWithTypes(item Item) ([]byte, error) { func ToJSONWithTypes(item Item) ([]byte, error) {
result, err := toJSONWithTypes(item, make(map[Item]bool, typicalNumOfItems)) return toJSONWithTypes(nil, item, make(map[Item]sliceNoPointer, typicalNumOfItems))
}
func toJSONWithTypes(data []byte, item Item, seen map[Item]sliceNoPointer) ([]byte, error) {
if item == nil {
return nil, fmt.Errorf("%w: nil", ErrUnserializable)
}
if old, ok := seen[item]; ok {
if old.end == 0 {
// Compound item marshaling which has not yet finished.
return nil, ErrRecursive
}
if len(data)+old.end-old.start > MaxSize {
return nil, errTooBigSize
}
return append(data, data[old.start:old.end]...), nil
}
var val string
var hasValue bool
switch item.(type) {
case Null:
val = `{"type":"Any"}`
case *Interop:
val = `{"type":"Interop"}`
default:
val = `{"type":"` + item.Type().String() + `","value":`
hasValue = true
}
if len(data)+len(val) > MaxSize {
return nil, errTooBigSize
}
start := len(data)
data = append(data, val...)
if !hasValue {
return data, nil
}
// Primitive stack items are appended after the switch
// to reduce the amount of size checks.
var primitive string
var isBuffer bool
var err error
switch it := item.(type) {
case *Array, *Struct:
seen[item] = sliceNoPointer{}
data = append(data, '[')
for i, elem := range it.Value().([]Item) {
if i != 0 {
data = append(data, ',')
}
data, err = toJSONWithTypes(data, elem, seen)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return json.Marshal(result)
}
func toJSONWithTypes(item Item, seen map[Item]bool) (interface{}, error) {
if len(seen) > MaxJSONDepth {
return "", ErrTooDeep
} }
var value interface{}
switch it := item.(type) {
case *Array, *Struct:
if seen[item] {
return "", ErrRecursive
}
seen[item] = true
arr := []interface{}{}
for _, elem := range it.Value().([]Item) {
s, err := toJSONWithTypes(elem, seen)
if err != nil {
return "", err
}
arr = append(arr, s)
}
value = arr
delete(seen, item)
case Bool: case Bool:
value = bool(it) if it {
case *Buffer, *ByteArray: primitive = "true"
value = base64.StdEncoding.EncodeToString(it.Value().([]byte)) } else {
primitive = "false"
}
case *ByteArray:
primitive = `"` + base64.StdEncoding.EncodeToString(it.Value().([]byte)) + `"`
case *Buffer:
isBuffer = true
primitive = `"` + base64.StdEncoding.EncodeToString(it.Value().([]byte)) + `"`
case *BigInteger: case *BigInteger:
value = it.Big().String() primitive = `"` + it.Big().String() + `"`
case *Map: case *Map:
if seen[item] { seen[item] = sliceNoPointer{}
return "", ErrRecursive data = append(data, '[')
}
seen[item] = true
arr := []interface{}{}
for i := range it.value { for i := range it.value {
// map keys are primitive types and can always be converted to json if i != 0 {
key, _ := toJSONWithTypes(it.value[i].Key, seen) data = append(data, ',')
val, err := toJSONWithTypes(it.value[i].Value, seen) }
data = append(data, `{"key":`...)
data, err = toJSONWithTypes(data, it.value[i].Key, seen)
if err != nil { if err != nil {
return "", err return nil, err
} }
arr = append(arr, map[string]interface{}{ data = append(data, `,"value":`...)
"key": key, data, err = toJSONWithTypes(data, it.value[i].Value, seen)
"value": val, if err != nil {
}) return nil, err
}
data = append(data, '}')
} }
value = arr
delete(seen, item)
case *Pointer: case *Pointer:
value = it.pos primitive = strconv.Itoa(it.pos)
case nil:
return "", fmt.Errorf("%w: nil", ErrUnserializable)
} }
result := map[string]interface{}{ if len(primitive) != 0 {
"type": item.Type().String(), if len(data)+len(primitive)+1 > MaxSize {
return nil, errTooBigSize
} }
if value != nil { data = append(data, primitive...)
result["value"] = value data = append(data, '}')
if isBuffer {
seen[item] = sliceNoPointer{start, len(data)}
} }
return result, nil } else {
if len(data)+2 > MaxSize { // also take care of '}'
return nil, errTooBigSize
}
data = append(data, ']', '}')
seen[item] = sliceNoPointer{start, len(data)}
}
return data, nil
} }
type ( type (

View file

@ -234,6 +234,8 @@ func TestToJSONWithTypes(t *testing.T) {
{"Map", NewMapWithValue([]MapElement{{Key: NewBigInteger(big.NewInt(42)), Value: NewBool(false)}}), {"Map", NewMapWithValue([]MapElement{{Key: NewBigInteger(big.NewInt(42)), Value: NewBool(false)}}),
`{"type":"Map","value":[{"key":{"type":"Integer","value":"42"},` + `{"type":"Map","value":[{"key":{"type":"Integer","value":"42"},` +
`"value":{"type":"Boolean","value":false}}]}`}, `"value":{"type":"Boolean","value":false}}]}`},
{"Interop", NewInterop(nil),
`{"type":"Interop"}`},
} }
for _, tc := range testCases { for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
@ -247,6 +249,40 @@ func TestToJSONWithTypes(t *testing.T) {
}) })
} }
t.Run("shared sub struct", func(t *testing.T) {
t.Run("Buffer", func(t *testing.T) {
shared := NewBuffer([]byte{1, 2, 3})
a := NewArray([]Item{shared, shared})
data, err := ToJSONWithTypes(a)
require.NoError(t, err)
expected := `{"type":"Array","value":[` +
`{"type":"Buffer","value":"AQID"},{"type":"Buffer","value":"AQID"}]}`
require.Equal(t, expected, string(data))
})
t.Run("Array", func(t *testing.T) {
shared := NewArray([]Item{})
a := NewArray([]Item{shared, shared})
data, err := ToJSONWithTypes(a)
require.NoError(t, err)
expected := `{"type":"Array","value":[` +
`{"type":"Array","value":[]},{"type":"Array","value":[]}]}`
require.Equal(t, expected, string(data))
})
t.Run("Map", func(t *testing.T) {
shared := NewMap()
m := NewMapWithValue([]MapElement{
{NewBool(true), shared},
{NewBool(false), shared},
})
data, err := ToJSONWithTypes(m)
require.NoError(t, err)
expected := `{"type":"Map","value":[` +
`{"key":{"type":"Boolean","value":true},"value":{"type":"Map","value":[]}},` +
`{"key":{"type":"Boolean","value":false},"value":{"type":"Map","value":[]}}]}`
require.Equal(t, expected, string(data))
})
})
t.Run("Invalid", func(t *testing.T) { t.Run("Invalid", func(t *testing.T) {
t.Run("RecursiveArray", func(t *testing.T) { t.Run("RecursiveArray", func(t *testing.T) {
arr := NewArray(nil) arr := NewArray(nil)
@ -266,6 +302,82 @@ func TestToJSONWithTypes(t *testing.T) {
}) })
} }
func TestToJSONWithTypesBadCases(t *testing.T) {
bigBuf := make([]byte, MaxSize)
t.Run("issue 2385", func(t *testing.T) {
const maxStackSize = 2 * 1024
items := make([]Item, maxStackSize)
for i := range items {
items[i] = NewBuffer(bigBuf)
}
_, err := ToJSONWithTypes(NewArray(items))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on primitive item", func(t *testing.T) {
_, err := ToJSONWithTypes(NewBuffer(bigBuf))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on array element", func(t *testing.T) {
b := NewBuffer(bigBuf[:MaxSize/2])
_, err := ToJSONWithTypes(NewArray([]Item{b, b}))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on map key", func(t *testing.T) {
m := NewMapWithValue([]MapElement{
{NewBool(true), NewBool(true)},
{NewByteArray(bigBuf), NewBool(true)},
})
_, err := ToJSONWithTypes(m)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on the last byte of array", func(t *testing.T) {
// Construct big enough buffer and pad with integer digits
// until the necessary branch is covered #ididthemath.
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-70]),
NewBigInteger(big.NewInt(1234)),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on the item prefix", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-60]),
NewBool(true),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on null", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-52]),
Null{},
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on interop", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-52]),
NewInterop(42),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on cached item", func(t *testing.T) {
b := NewArray([]Item{NewByteArray(bigBuf[:MaxSize/2])})
arr := NewArray([]Item{b, b})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("invalid type", func(t *testing.T) {
_, err := ToJSONWithTypes(nil)
require.True(t, errors.Is(err, ErrUnserializable), "got: %v", err)
})
}
func TestFromJSONWithTypes(t *testing.T) { func TestFromJSONWithTypes(t *testing.T) {
testCases := []struct { testCases := []struct {
name string name string