Merge pull request #2386 from nspcc-dev/json-types-limit

stackitem: limit JSON size in `ToJSONWithTypes`
This commit is contained in:
Roman Khimov 2022-03-09 11:22:44 +03:00 committed by GitHub
commit 036111d95c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 211 additions and 50 deletions

View file

@ -9,6 +9,7 @@ import (
gio "io" gio "io"
"math" "math"
"math/big" "math/big"
"strconv"
) )
// decoder is a wrapper around json.Decoder helping to mimic C# json decoder behaviour. // decoder is a wrapper around json.Decoder helping to mimic C# json decoder behaviour.
@ -260,72 +261,120 @@ func (d *decoder) decodeMap() (*Map, error) {
// ToJSONWithTypes serializes any stackitem to JSON in a lossless way. // ToJSONWithTypes serializes any stackitem to JSON in a lossless way.
func ToJSONWithTypes(item Item) ([]byte, error) { func ToJSONWithTypes(item Item) ([]byte, error) {
result, err := toJSONWithTypes(item, make(map[Item]bool, typicalNumOfItems)) return toJSONWithTypes(nil, item, make(map[Item]sliceNoPointer, typicalNumOfItems))
if err != nil {
return nil, err
}
return json.Marshal(result)
} }
func toJSONWithTypes(item Item, seen map[Item]bool) (interface{}, error) { func toJSONWithTypes(data []byte, item Item, seen map[Item]sliceNoPointer) ([]byte, error) {
if len(seen) > MaxJSONDepth { if item == nil {
return "", ErrTooDeep return nil, fmt.Errorf("%w: nil", ErrUnserializable)
} }
var value interface{} if old, ok := seen[item]; ok {
if old.end == 0 {
// Compound item marshaling which has not yet finished.
return nil, ErrRecursive
}
if len(data)+old.end-old.start > MaxSize {
return nil, errTooBigSize
}
return append(data, data[old.start:old.end]...), nil
}
var val string
var hasValue bool
switch item.(type) {
case Null:
val = `{"type":"Any"}`
case *Interop:
val = `{"type":"Interop"}`
default:
val = `{"type":"` + item.Type().String() + `","value":`
hasValue = true
}
if len(data)+len(val) > MaxSize {
return nil, errTooBigSize
}
start := len(data)
data = append(data, val...)
if !hasValue {
return data, nil
}
// Primitive stack items are appended after the switch
// to reduce the amount of size checks.
var primitive string
var isBuffer bool
var err error
switch it := item.(type) { switch it := item.(type) {
case *Array, *Struct: case *Array, *Struct:
if seen[item] { seen[item] = sliceNoPointer{}
return "", ErrRecursive data = append(data, '[')
} for i, elem := range it.Value().([]Item) {
seen[item] = true if i != 0 {
arr := []interface{}{} data = append(data, ',')
for _, elem := range it.Value().([]Item) { }
s, err := toJSONWithTypes(elem, seen) data, err = toJSONWithTypes(data, elem, seen)
if err != nil { if err != nil {
return "", err return nil, err
} }
arr = append(arr, s)
} }
value = arr
delete(seen, item)
case Bool: case Bool:
value = bool(it) if it {
case *Buffer, *ByteArray: primitive = "true"
value = base64.StdEncoding.EncodeToString(it.Value().([]byte)) } else {
primitive = "false"
}
case *ByteArray:
primitive = `"` + base64.StdEncoding.EncodeToString(it.Value().([]byte)) + `"`
case *Buffer:
isBuffer = true
primitive = `"` + base64.StdEncoding.EncodeToString(it.Value().([]byte)) + `"`
case *BigInteger: case *BigInteger:
value = it.Big().String() primitive = `"` + it.Big().String() + `"`
case *Map: case *Map:
if seen[item] { seen[item] = sliceNoPointer{}
return "", ErrRecursive data = append(data, '[')
}
seen[item] = true
arr := []interface{}{}
for i := range it.value { for i := range it.value {
// map keys are primitive types and can always be converted to json if i != 0 {
key, _ := toJSONWithTypes(it.value[i].Key, seen) data = append(data, ',')
val, err := toJSONWithTypes(it.value[i].Value, seen)
if err != nil {
return "", err
} }
arr = append(arr, map[string]interface{}{ data = append(data, `{"key":`...)
"key": key, data, err = toJSONWithTypes(data, it.value[i].Key, seen)
"value": val, if err != nil {
}) return nil, err
}
data = append(data, `,"value":`...)
data, err = toJSONWithTypes(data, it.value[i].Value, seen)
if err != nil {
return nil, err
}
data = append(data, '}')
} }
value = arr
delete(seen, item)
case *Pointer: case *Pointer:
value = it.pos primitive = strconv.Itoa(it.pos)
case nil:
return "", fmt.Errorf("%w: nil", ErrUnserializable)
} }
result := map[string]interface{}{ if len(primitive) != 0 {
"type": item.Type().String(), if len(data)+len(primitive)+1 > MaxSize {
return nil, errTooBigSize
}
data = append(data, primitive...)
data = append(data, '}')
if isBuffer {
seen[item] = sliceNoPointer{start, len(data)}
}
} else {
if len(data)+2 > MaxSize { // also take care of '}'
return nil, errTooBigSize
}
data = append(data, ']', '}')
seen[item] = sliceNoPointer{start, len(data)}
} }
if value != nil { return data, nil
result["value"] = value
}
return result, nil
} }
type ( type (

View file

@ -234,6 +234,8 @@ func TestToJSONWithTypes(t *testing.T) {
{"Map", NewMapWithValue([]MapElement{{Key: NewBigInteger(big.NewInt(42)), Value: NewBool(false)}}), {"Map", NewMapWithValue([]MapElement{{Key: NewBigInteger(big.NewInt(42)), Value: NewBool(false)}}),
`{"type":"Map","value":[{"key":{"type":"Integer","value":"42"},` + `{"type":"Map","value":[{"key":{"type":"Integer","value":"42"},` +
`"value":{"type":"Boolean","value":false}}]}`}, `"value":{"type":"Boolean","value":false}}]}`},
{"Interop", NewInterop(nil),
`{"type":"Interop"}`},
} }
for _, tc := range testCases { for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
@ -247,6 +249,40 @@ func TestToJSONWithTypes(t *testing.T) {
}) })
} }
t.Run("shared sub struct", func(t *testing.T) {
t.Run("Buffer", func(t *testing.T) {
shared := NewBuffer([]byte{1, 2, 3})
a := NewArray([]Item{shared, shared})
data, err := ToJSONWithTypes(a)
require.NoError(t, err)
expected := `{"type":"Array","value":[` +
`{"type":"Buffer","value":"AQID"},{"type":"Buffer","value":"AQID"}]}`
require.Equal(t, expected, string(data))
})
t.Run("Array", func(t *testing.T) {
shared := NewArray([]Item{})
a := NewArray([]Item{shared, shared})
data, err := ToJSONWithTypes(a)
require.NoError(t, err)
expected := `{"type":"Array","value":[` +
`{"type":"Array","value":[]},{"type":"Array","value":[]}]}`
require.Equal(t, expected, string(data))
})
t.Run("Map", func(t *testing.T) {
shared := NewMap()
m := NewMapWithValue([]MapElement{
{NewBool(true), shared},
{NewBool(false), shared},
})
data, err := ToJSONWithTypes(m)
require.NoError(t, err)
expected := `{"type":"Map","value":[` +
`{"key":{"type":"Boolean","value":true},"value":{"type":"Map","value":[]}},` +
`{"key":{"type":"Boolean","value":false},"value":{"type":"Map","value":[]}}]}`
require.Equal(t, expected, string(data))
})
})
t.Run("Invalid", func(t *testing.T) { t.Run("Invalid", func(t *testing.T) {
t.Run("RecursiveArray", func(t *testing.T) { t.Run("RecursiveArray", func(t *testing.T) {
arr := NewArray(nil) arr := NewArray(nil)
@ -266,6 +302,82 @@ func TestToJSONWithTypes(t *testing.T) {
}) })
} }
func TestToJSONWithTypesBadCases(t *testing.T) {
bigBuf := make([]byte, MaxSize)
t.Run("issue 2385", func(t *testing.T) {
const maxStackSize = 2 * 1024
items := make([]Item, maxStackSize)
for i := range items {
items[i] = NewBuffer(bigBuf)
}
_, err := ToJSONWithTypes(NewArray(items))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on primitive item", func(t *testing.T) {
_, err := ToJSONWithTypes(NewBuffer(bigBuf))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on array element", func(t *testing.T) {
b := NewBuffer(bigBuf[:MaxSize/2])
_, err := ToJSONWithTypes(NewArray([]Item{b, b}))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on map key", func(t *testing.T) {
m := NewMapWithValue([]MapElement{
{NewBool(true), NewBool(true)},
{NewByteArray(bigBuf), NewBool(true)},
})
_, err := ToJSONWithTypes(m)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on the last byte of array", func(t *testing.T) {
// Construct big enough buffer and pad with integer digits
// until the necessary branch is covered #ididthemath.
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-70]),
NewBigInteger(big.NewInt(1234)),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on the item prefix", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-60]),
NewBool(true),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on null", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-52]),
Null{},
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on interop", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-52]),
NewInterop(42),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on cached item", func(t *testing.T) {
b := NewArray([]Item{NewByteArray(bigBuf[:MaxSize/2])})
arr := NewArray([]Item{b, b})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("invalid type", func(t *testing.T) {
_, err := ToJSONWithTypes(nil)
require.True(t, errors.Is(err, ErrUnserializable), "got: %v", err)
})
}
func TestFromJSONWithTypes(t *testing.T) { func TestFromJSONWithTypes(t *testing.T) {
testCases := []struct { testCases := []struct {
name string name string