Skip to content

Commit

Permalink
stackitem: limit JSON size in ToJSONWithTypes
Browse files Browse the repository at this point in the history
Also do not limit depth. It was introduced in e34fa2e as a simple
solution to OOM problem. In this commit we do exactly the refactoring
described there. Maximum size is the same as stack item size and
can be changed if needed withouth significat refactoring.
`1 MiB` seems sufficient, though.

Signed-off-by: Evgeniy Stratonikov <evgeniy@nspcc.ru>
  • Loading branch information
fyrchik committed Mar 9, 2022
1 parent 6ece74a commit a8d2df8
Show file tree
Hide file tree
Showing 2 changed files with 209 additions and 48 deletions.
145 changes: 97 additions & 48 deletions pkg/vm/stackitem/json.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
gio "io"
"math"
"math/big"
"strconv"
)

// decoder is a wrapper around json.Decoder helping to mimic C# json decoder behaviour.
Expand Down Expand Up @@ -260,72 +261,120 @@ func (d *decoder) decodeMap() (*Map, error) {

// ToJSONWithTypes serializes any stackitem to JSON in a lossless way.
func ToJSONWithTypes(item Item) ([]byte, error) {
result, err := toJSONWithTypes(item, make(map[Item]bool, typicalNumOfItems))
if err != nil {
return nil, err
}
return json.Marshal(result)
return toJSONWithTypes(nil, item, make(map[Item]sliceNoPointer, typicalNumOfItems))
}

func toJSONWithTypes(item Item, seen map[Item]bool) (interface{}, error) {
if len(seen) > MaxJSONDepth {
return "", ErrTooDeep
func toJSONWithTypes(data []byte, item Item, seen map[Item]sliceNoPointer) ([]byte, error) {
if item == nil {
return nil, fmt.Errorf("%w: nil", ErrUnserializable)
}
if old, ok := seen[item]; ok {
if old.end == 0 {
// Compound item marshaling which has not yet finished.
return nil, ErrRecursive
}
if len(data)+old.end-old.start > MaxSize {
return nil, errTooBigSize
}
return append(data, data[old.start:old.end]...), nil
}

var val string
var hasValue bool
switch item.(type) {
case Null:
val = `{"type":"Any"}`
case *Interop:
val = `{"type":"Interop"}`
default:
val = `{"type":"` + item.Type().String() + `","value":`
hasValue = true
}

if len(data)+len(val) > MaxSize {
return nil, errTooBigSize
}
var value interface{}

start := len(data)

data = append(data, val...)
if !hasValue {
return data, nil
}

// Primitive stack items are appended after the switch
// to reduce the amount of size checks.
var primitive string
var isBuffer bool
var err error

switch it := item.(type) {
case *Array, *Struct:
if seen[item] {
return "", ErrRecursive
}
seen[item] = true
arr := []interface{}{}
for _, elem := range it.Value().([]Item) {
s, err := toJSONWithTypes(elem, seen)
seen[item] = sliceNoPointer{}
data = append(data, '[')
for i, elem := range it.Value().([]Item) {
if i != 0 {
data = append(data, ',')
}
data, err = toJSONWithTypes(data, elem, seen)
if err != nil {
return "", err
return nil, err
}
arr = append(arr, s)
}
value = arr
delete(seen, item)
case Bool:
value = bool(it)
case *Buffer, *ByteArray:
value = base64.StdEncoding.EncodeToString(it.Value().([]byte))
if it {
primitive = "true"
} else {
primitive = "false"
}
case *ByteArray:
primitive = `"` + base64.StdEncoding.EncodeToString(it.Value().([]byte)) + `"`
case *Buffer:
isBuffer = true
primitive = `"` + base64.StdEncoding.EncodeToString(it.Value().([]byte)) + `"`
case *BigInteger:
value = it.Big().String()
primitive = `"` + it.Big().String() + `"`
case *Map:
if seen[item] {
return "", ErrRecursive
}
seen[item] = true
arr := []interface{}{}
seen[item] = sliceNoPointer{}
data = append(data, '[')
for i := range it.value {
// map keys are primitive types and can always be converted to json
key, _ := toJSONWithTypes(it.value[i].Key, seen)
val, err := toJSONWithTypes(it.value[i].Value, seen)
if i != 0 {
data = append(data, ',')
}
data = append(data, `{"key":`...)
data, err = toJSONWithTypes(data, it.value[i].Key, seen)
if err != nil {
return nil, err
}
data = append(data, `,"value":`...)
data, err = toJSONWithTypes(data, it.value[i].Value, seen)
if err != nil {
return "", err
return nil, err
}
arr = append(arr, map[string]interface{}{
"key": key,
"value": val,
})
data = append(data, '}')
}
value = arr
delete(seen, item)
case *Pointer:
value = it.pos
case nil:
return "", fmt.Errorf("%w: nil", ErrUnserializable)
}
result := map[string]interface{}{
"type": item.Type().String(),
primitive = strconv.Itoa(it.pos)
}
if value != nil {
result["value"] = value
if len(primitive) != 0 {
if len(data)+len(primitive)+1 > MaxSize {
return nil, errTooBigSize
}
data = append(data, primitive...)
data = append(data, '}')

if isBuffer {
seen[item] = sliceNoPointer{start, len(data)}
}
} else {
if len(data)+2 > MaxSize { // also take care of '}'
return nil, errTooBigSize
}
data = append(data, ']', '}')

seen[item] = sliceNoPointer{start, len(data)}
}
return result, nil
return data, nil
}

type (
Expand Down
112 changes: 112 additions & 0 deletions pkg/vm/stackitem/json_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,8 @@ func TestToJSONWithTypes(t *testing.T) {
{"Map", NewMapWithValue([]MapElement{{Key: NewBigInteger(big.NewInt(42)), Value: NewBool(false)}}),
`{"type":"Map","value":[{"key":{"type":"Integer","value":"42"},` +
`"value":{"type":"Boolean","value":false}}]}`},
{"Interop", NewInterop(nil),
`{"type":"Interop"}`},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
Expand All @@ -247,6 +249,40 @@ func TestToJSONWithTypes(t *testing.T) {
})
}

t.Run("shared sub struct", func(t *testing.T) {
t.Run("Buffer", func(t *testing.T) {
shared := NewBuffer([]byte{1, 2, 3})
a := NewArray([]Item{shared, shared})
data, err := ToJSONWithTypes(a)
require.NoError(t, err)
expected := `{"type":"Array","value":[` +
`{"type":"Buffer","value":"AQID"},{"type":"Buffer","value":"AQID"}]}`
require.Equal(t, expected, string(data))
})
t.Run("Array", func(t *testing.T) {
shared := NewArray([]Item{})
a := NewArray([]Item{shared, shared})
data, err := ToJSONWithTypes(a)
require.NoError(t, err)
expected := `{"type":"Array","value":[` +
`{"type":"Array","value":[]},{"type":"Array","value":[]}]}`
require.Equal(t, expected, string(data))
})
t.Run("Map", func(t *testing.T) {
shared := NewMap()
m := NewMapWithValue([]MapElement{
{NewBool(true), shared},
{NewBool(false), shared},
})
data, err := ToJSONWithTypes(m)
require.NoError(t, err)
expected := `{"type":"Map","value":[` +
`{"key":{"type":"Boolean","value":true},"value":{"type":"Map","value":[]}},` +
`{"key":{"type":"Boolean","value":false},"value":{"type":"Map","value":[]}}]}`
require.Equal(t, expected, string(data))
})
})

t.Run("Invalid", func(t *testing.T) {
t.Run("RecursiveArray", func(t *testing.T) {
arr := NewArray(nil)
Expand All @@ -266,6 +302,82 @@ func TestToJSONWithTypes(t *testing.T) {
})
}

func TestToJSONWithTypesBadCases(t *testing.T) {
bigBuf := make([]byte, MaxSize)

t.Run("issue 2385", func(t *testing.T) {
const maxStackSize = 2 * 1024

items := make([]Item, maxStackSize)
for i := range items {
items[i] = NewBuffer(bigBuf)
}
_, err := ToJSONWithTypes(NewArray(items))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on primitive item", func(t *testing.T) {
_, err := ToJSONWithTypes(NewBuffer(bigBuf))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on array element", func(t *testing.T) {
b := NewBuffer(bigBuf[:MaxSize/2])
_, err := ToJSONWithTypes(NewArray([]Item{b, b}))
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on map key", func(t *testing.T) {
m := NewMapWithValue([]MapElement{
{NewBool(true), NewBool(true)},
{NewByteArray(bigBuf), NewBool(true)},
})
_, err := ToJSONWithTypes(m)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on the last byte of array", func(t *testing.T) {
// Construct big enough buffer and pad with integer digits
// until the necessary branch is covered #ididthemath.
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-70]),
NewBigInteger(big.NewInt(1234)),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on the item prefix", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-60]),
NewBool(true),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on null", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-52]),
Null{},
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on interop", func(t *testing.T) {
arr := NewArray([]Item{
NewByteArray(bigBuf[:MaxSize/4*3-52]),
NewInterop(42),
})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("overflow on cached item", func(t *testing.T) {
b := NewArray([]Item{NewByteArray(bigBuf[:MaxSize/2])})
arr := NewArray([]Item{b, b})
_, err := ToJSONWithTypes(arr)
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
})
t.Run("invalid type", func(t *testing.T) {
_, err := ToJSONWithTypes(nil)
require.True(t, errors.Is(err, ErrUnserializable), "got: %v", err)
})
}

func TestFromJSONWithTypes(t *testing.T) {
testCases := []struct {
name string
Expand Down

0 comments on commit a8d2df8

Please sign in to comment.