implement filter codec
This commit is contained in:
@@ -1,856 +0,0 @@
|
||||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"math"
|
||||
"math/rand"
|
||||
"path"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"encoding/json/internal/jsontest"
|
||||
"encoding/json/internal/jsonwire"
|
||||
)
|
||||
|
||||
func E(err error) *SyntacticError {
|
||||
return &SyntacticError{Err: err}
|
||||
}
|
||||
|
||||
func newInvalidCharacterError(prefix, where string) *SyntacticError {
|
||||
return E(jsonwire.NewInvalidCharacterError(prefix, where))
|
||||
}
|
||||
|
||||
func newInvalidEscapeSequenceError(what string) *SyntacticError {
|
||||
return E(jsonwire.NewInvalidEscapeSequenceError(what))
|
||||
}
|
||||
|
||||
func (e *SyntacticError) withPos(prefix string, pointer Pointer) *SyntacticError {
|
||||
e.ByteOffset = int64(len(prefix))
|
||||
e.JSONPointer = pointer
|
||||
return e
|
||||
}
|
||||
|
||||
func equalError(x, y error) bool {
|
||||
return reflect.DeepEqual(x, y)
|
||||
}
|
||||
|
||||
var (
|
||||
zeroToken Token
|
||||
zeroValue Value
|
||||
)
|
||||
|
||||
// tokOrVal is either a Token or a Value.
|
||||
type tokOrVal interface{ Kind() Kind }
|
||||
|
||||
type coderTestdataEntry struct {
|
||||
name jsontest.CaseName
|
||||
in string
|
||||
outCompacted string
|
||||
outEscaped string // outCompacted if empty; escapes all runes in a string
|
||||
outIndented string // outCompacted if empty; uses " " for indent prefix and "\t" for indent
|
||||
outCanonicalized string // outCompacted if empty
|
||||
tokens []Token
|
||||
pointers []Pointer
|
||||
}
|
||||
|
||||
var coderTestdata = []coderTestdataEntry{{
|
||||
name: jsontest.Name("Null"),
|
||||
in: ` null `,
|
||||
outCompacted: `null`,
|
||||
tokens: []Token{Null},
|
||||
pointers: []Pointer{""},
|
||||
}, {
|
||||
name: jsontest.Name("False"),
|
||||
in: ` false `,
|
||||
outCompacted: `false`,
|
||||
tokens: []Token{False},
|
||||
}, {
|
||||
name: jsontest.Name("True"),
|
||||
in: ` true `,
|
||||
outCompacted: `true`,
|
||||
tokens: []Token{True},
|
||||
}, {
|
||||
name: jsontest.Name("EmptyString"),
|
||||
in: ` "" `,
|
||||
outCompacted: `""`,
|
||||
tokens: []Token{String("")},
|
||||
}, {
|
||||
name: jsontest.Name("SimpleString"),
|
||||
in: ` "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" `,
|
||||
outCompacted: `"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"`,
|
||||
outEscaped: `"\u0061\u0062\u0063\u0064\u0065\u0066\u0067\u0068\u0069\u006a\u006b\u006c\u006d\u006e\u006f\u0070\u0071\u0072\u0073\u0074\u0075\u0076\u0077\u0078\u0079\u007a\u0041\u0042\u0043\u0044\u0045\u0046\u0047\u0048\u0049\u004a\u004b\u004c\u004d\u004e\u004f\u0050\u0051\u0052\u0053\u0054\u0055\u0056\u0057\u0058\u0059\u005a"`,
|
||||
tokens: []Token{String("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")},
|
||||
}, {
|
||||
name: jsontest.Name("ComplicatedString"),
|
||||
in: " \"Hello, 世界 🌟★☆✩🌠 " + "\u0080\u00f6\u20ac\ud799\ue000\ufb33\ufffd\U0001f602" + ` \ud800\udead \"\\\/\b\f\n\r\t \u0022\u005c\u002f\u0008\u000c\u000a\u000d\u0009" `,
|
||||
outCompacted: "\"Hello, 世界 🌟★☆✩🌠 " + "\u0080\u00f6\u20ac\ud799\ue000\ufb33\ufffd\U0001f602" + " 𐊭 \\\"\\\\/\\b\\f\\n\\r\\t \\\"\\\\/\\b\\f\\n\\r\\t\"",
|
||||
outEscaped: `"\u0048\u0065\u006c\u006c\u006f\u002c\u0020\u4e16\u754c\u0020\ud83c\udf1f\u2605\u2606\u2729\ud83c\udf20\u0020\u0080\u00f6\u20ac\ud799\ue000\ufb33\ufffd\ud83d\ude02\u0020\ud800\udead\u0020\u0022\u005c\u002f\u0008\u000c\u000a\u000d\u0009\u0020\u0022\u005c\u002f\u0008\u000c\u000a\u000d\u0009"`,
|
||||
outCanonicalized: `"Hello, 世界 🌟★☆✩🌠 ö€힙דּ<EE8080>😂 𐊭 \"\\/\b\f\n\r\t \"\\/\b\f\n\r\t"`,
|
||||
tokens: []Token{rawToken("\"Hello, 世界 🌟★☆✩🌠 " + "\u0080\u00f6\u20ac\ud799\ue000\ufb33\ufffd\U0001f602" + " 𐊭 \\\"\\\\/\\b\\f\\n\\r\\t \\\"\\\\/\\b\\f\\n\\r\\t\"")},
|
||||
}, {
|
||||
name: jsontest.Name("ZeroNumber"),
|
||||
in: ` 0 `,
|
||||
outCompacted: `0`,
|
||||
tokens: []Token{Uint(0)},
|
||||
}, {
|
||||
name: jsontest.Name("SimpleNumber"),
|
||||
in: ` 123456789 `,
|
||||
outCompacted: `123456789`,
|
||||
tokens: []Token{Uint(123456789)},
|
||||
}, {
|
||||
name: jsontest.Name("NegativeNumber"),
|
||||
in: ` -123456789 `,
|
||||
outCompacted: `-123456789`,
|
||||
tokens: []Token{Int(-123456789)},
|
||||
}, {
|
||||
name: jsontest.Name("FractionalNumber"),
|
||||
in: " 0.123456789 ",
|
||||
outCompacted: `0.123456789`,
|
||||
tokens: []Token{Float(0.123456789)},
|
||||
}, {
|
||||
name: jsontest.Name("ExponentNumber"),
|
||||
in: " 0e12456789 ",
|
||||
outCompacted: `0e12456789`,
|
||||
outCanonicalized: `0`,
|
||||
tokens: []Token{rawToken(`0e12456789`)},
|
||||
}, {
|
||||
name: jsontest.Name("ExponentNumberP"),
|
||||
in: " 0e+12456789 ",
|
||||
outCompacted: `0e+12456789`,
|
||||
outCanonicalized: `0`,
|
||||
tokens: []Token{rawToken(`0e+12456789`)},
|
||||
}, {
|
||||
name: jsontest.Name("ExponentNumberN"),
|
||||
in: " 0e-12456789 ",
|
||||
outCompacted: `0e-12456789`,
|
||||
outCanonicalized: `0`,
|
||||
tokens: []Token{rawToken(`0e-12456789`)},
|
||||
}, {
|
||||
name: jsontest.Name("ComplicatedNumber"),
|
||||
in: ` -123456789.987654321E+0123456789 `,
|
||||
outCompacted: `-123456789.987654321E+0123456789`,
|
||||
outCanonicalized: `-1.7976931348623157e+308`,
|
||||
tokens: []Token{rawToken(`-123456789.987654321E+0123456789`)},
|
||||
}, {
|
||||
name: jsontest.Name("Numbers"),
|
||||
in: ` [
|
||||
0, -0, 0.0, -0.0, 1.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001, 1e1000,
|
||||
-5e-324, 1e+100, 1.7976931348623157e+308,
|
||||
9007199254740990, 9007199254740991, 9007199254740992, 9007199254740993, 9007199254740994,
|
||||
-9223372036854775808, 9223372036854775807, 0, 18446744073709551615
|
||||
] `,
|
||||
outCompacted: "[0,-0,0.0,-0.0,1.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001,1e1000,-5e-324,1e+100,1.7976931348623157e+308,9007199254740990,9007199254740991,9007199254740992,9007199254740993,9007199254740994,-9223372036854775808,9223372036854775807,0,18446744073709551615]",
|
||||
outIndented: `[
|
||||
0,
|
||||
-0,
|
||||
0.0,
|
||||
-0.0,
|
||||
1.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001,
|
||||
1e1000,
|
||||
-5e-324,
|
||||
1e+100,
|
||||
1.7976931348623157e+308,
|
||||
9007199254740990,
|
||||
9007199254740991,
|
||||
9007199254740992,
|
||||
9007199254740993,
|
||||
9007199254740994,
|
||||
-9223372036854775808,
|
||||
9223372036854775807,
|
||||
0,
|
||||
18446744073709551615
|
||||
]`,
|
||||
outCanonicalized: `[0,0,0,0,1,1.7976931348623157e+308,-5e-324,1e+100,1.7976931348623157e+308,9007199254740990,9007199254740991,9007199254740992,9007199254740992,9007199254740994,-9223372036854776000,9223372036854776000,0,18446744073709552000]`,
|
||||
tokens: []Token{
|
||||
BeginArray,
|
||||
Float(0), Float(math.Copysign(0, -1)), rawToken(`0.0`), rawToken(`-0.0`), rawToken(`1.00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001`), rawToken(`1e1000`),
|
||||
Float(-5e-324), Float(1e100), Float(1.7976931348623157e+308),
|
||||
Float(9007199254740990), Float(9007199254740991), Float(9007199254740992), rawToken(`9007199254740993`), rawToken(`9007199254740994`),
|
||||
Int(minInt64), Int(maxInt64), Uint(minUint64), Uint(maxUint64),
|
||||
EndArray,
|
||||
},
|
||||
pointers: []Pointer{
|
||||
"", "/0", "/1", "/2", "/3", "/4", "/5", "/6", "/7", "/8", "/9", "/10", "/11", "/12", "/13", "/14", "/15", "/16", "/17", "",
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("ObjectN0"),
|
||||
in: ` { } `,
|
||||
outCompacted: `{}`,
|
||||
tokens: []Token{BeginObject, EndObject},
|
||||
pointers: []Pointer{"", ""},
|
||||
}, {
|
||||
name: jsontest.Name("ObjectN1"),
|
||||
in: ` { "0" : 0 } `,
|
||||
outCompacted: `{"0":0}`,
|
||||
outEscaped: `{"\u0030":0}`,
|
||||
outIndented: `{
|
||||
"0": 0
|
||||
}`,
|
||||
tokens: []Token{BeginObject, String("0"), Uint(0), EndObject},
|
||||
pointers: []Pointer{"", "/0", "/0", ""},
|
||||
}, {
|
||||
name: jsontest.Name("ObjectN2"),
|
||||
in: ` { "0" : 0 , "1" : 1 } `,
|
||||
outCompacted: `{"0":0,"1":1}`,
|
||||
outEscaped: `{"\u0030":0,"\u0031":1}`,
|
||||
outIndented: `{
|
||||
"0": 0,
|
||||
"1": 1
|
||||
}`,
|
||||
tokens: []Token{BeginObject, String("0"), Uint(0), String("1"), Uint(1), EndObject},
|
||||
pointers: []Pointer{"", "/0", "/0", "/1", "/1", ""},
|
||||
}, {
|
||||
name: jsontest.Name("ObjectNested"),
|
||||
in: ` { "0" : { "1" : { "2" : { "3" : { "4" : { } } } } } } `,
|
||||
outCompacted: `{"0":{"1":{"2":{"3":{"4":{}}}}}}`,
|
||||
outEscaped: `{"\u0030":{"\u0031":{"\u0032":{"\u0033":{"\u0034":{}}}}}}`,
|
||||
outIndented: `{
|
||||
"0": {
|
||||
"1": {
|
||||
"2": {
|
||||
"3": {
|
||||
"4": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`,
|
||||
tokens: []Token{BeginObject, String("0"), BeginObject, String("1"), BeginObject, String("2"), BeginObject, String("3"), BeginObject, String("4"), BeginObject, EndObject, EndObject, EndObject, EndObject, EndObject, EndObject},
|
||||
pointers: []Pointer{
|
||||
"",
|
||||
"/0", "/0",
|
||||
"/0/1", "/0/1",
|
||||
"/0/1/2", "/0/1/2",
|
||||
"/0/1/2/3", "/0/1/2/3",
|
||||
"/0/1/2/3/4", "/0/1/2/3/4",
|
||||
"/0/1/2/3/4",
|
||||
"/0/1/2/3",
|
||||
"/0/1/2",
|
||||
"/0/1",
|
||||
"/0",
|
||||
"",
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("ObjectSuperNested"),
|
||||
in: `{"": {
|
||||
"44444": {
|
||||
"6666666": "ccccccc",
|
||||
"77777777": "bb",
|
||||
"555555": "aaaa"
|
||||
},
|
||||
"0": {
|
||||
"3333": "bbb",
|
||||
"11": "",
|
||||
"222": "aaaaa"
|
||||
}
|
||||
}}`,
|
||||
outCompacted: `{"":{"44444":{"6666666":"ccccccc","77777777":"bb","555555":"aaaa"},"0":{"3333":"bbb","11":"","222":"aaaaa"}}}`,
|
||||
outEscaped: `{"":{"\u0034\u0034\u0034\u0034\u0034":{"\u0036\u0036\u0036\u0036\u0036\u0036\u0036":"\u0063\u0063\u0063\u0063\u0063\u0063\u0063","\u0037\u0037\u0037\u0037\u0037\u0037\u0037\u0037":"\u0062\u0062","\u0035\u0035\u0035\u0035\u0035\u0035":"\u0061\u0061\u0061\u0061"},"\u0030":{"\u0033\u0033\u0033\u0033":"\u0062\u0062\u0062","\u0031\u0031":"","\u0032\u0032\u0032":"\u0061\u0061\u0061\u0061\u0061"}}}`,
|
||||
outIndented: `{
|
||||
"": {
|
||||
"44444": {
|
||||
"6666666": "ccccccc",
|
||||
"77777777": "bb",
|
||||
"555555": "aaaa"
|
||||
},
|
||||
"0": {
|
||||
"3333": "bbb",
|
||||
"11": "",
|
||||
"222": "aaaaa"
|
||||
}
|
||||
}
|
||||
}`,
|
||||
outCanonicalized: `{"":{"0":{"11":"","222":"aaaaa","3333":"bbb"},"44444":{"555555":"aaaa","6666666":"ccccccc","77777777":"bb"}}}`,
|
||||
tokens: []Token{
|
||||
BeginObject,
|
||||
String(""),
|
||||
BeginObject,
|
||||
String("44444"),
|
||||
BeginObject,
|
||||
String("6666666"), String("ccccccc"),
|
||||
String("77777777"), String("bb"),
|
||||
String("555555"), String("aaaa"),
|
||||
EndObject,
|
||||
String("0"),
|
||||
BeginObject,
|
||||
String("3333"), String("bbb"),
|
||||
String("11"), String(""),
|
||||
String("222"), String("aaaaa"),
|
||||
EndObject,
|
||||
EndObject,
|
||||
EndObject,
|
||||
},
|
||||
pointers: []Pointer{
|
||||
"",
|
||||
"/", "/",
|
||||
"//44444", "//44444",
|
||||
"//44444/6666666", "//44444/6666666",
|
||||
"//44444/77777777", "//44444/77777777",
|
||||
"//44444/555555", "//44444/555555",
|
||||
"//44444",
|
||||
"//0", "//0",
|
||||
"//0/3333", "//0/3333",
|
||||
"//0/11", "//0/11",
|
||||
"//0/222", "//0/222",
|
||||
"//0",
|
||||
"/",
|
||||
"",
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("ArrayN0"),
|
||||
in: ` [ ] `,
|
||||
outCompacted: `[]`,
|
||||
tokens: []Token{BeginArray, EndArray},
|
||||
pointers: []Pointer{"", ""},
|
||||
}, {
|
||||
name: jsontest.Name("ArrayN1"),
|
||||
in: ` [ 0 ] `,
|
||||
outCompacted: `[0]`,
|
||||
outIndented: `[
|
||||
0
|
||||
]`,
|
||||
tokens: []Token{BeginArray, Uint(0), EndArray},
|
||||
pointers: []Pointer{"", "/0", ""},
|
||||
}, {
|
||||
name: jsontest.Name("ArrayN2"),
|
||||
in: ` [ 0 , 1 ] `,
|
||||
outCompacted: `[0,1]`,
|
||||
outIndented: `[
|
||||
0,
|
||||
1
|
||||
]`,
|
||||
tokens: []Token{BeginArray, Uint(0), Uint(1), EndArray},
|
||||
}, {
|
||||
name: jsontest.Name("ArrayNested"),
|
||||
in: ` [ [ [ [ [ ] ] ] ] ] `,
|
||||
outCompacted: `[[[[[]]]]]`,
|
||||
outIndented: `[
|
||||
[
|
||||
[
|
||||
[
|
||||
[]
|
||||
]
|
||||
]
|
||||
]
|
||||
]`,
|
||||
tokens: []Token{BeginArray, BeginArray, BeginArray, BeginArray, BeginArray, EndArray, EndArray, EndArray, EndArray, EndArray},
|
||||
pointers: []Pointer{
|
||||
"",
|
||||
"/0",
|
||||
"/0/0",
|
||||
"/0/0/0",
|
||||
"/0/0/0/0",
|
||||
"/0/0/0/0",
|
||||
"/0/0/0",
|
||||
"/0/0",
|
||||
"/0",
|
||||
"",
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("Everything"),
|
||||
in: ` {
|
||||
"literals" : [ null , false , true ],
|
||||
"string" : "Hello, 世界" ,
|
||||
"number" : 3.14159 ,
|
||||
"arrayN0" : [ ] ,
|
||||
"arrayN1" : [ 0 ] ,
|
||||
"arrayN2" : [ 0 , 1 ] ,
|
||||
"objectN0" : { } ,
|
||||
"objectN1" : { "0" : 0 } ,
|
||||
"objectN2" : { "0" : 0 , "1" : 1 }
|
||||
} `,
|
||||
outCompacted: `{"literals":[null,false,true],"string":"Hello, 世界","number":3.14159,"arrayN0":[],"arrayN1":[0],"arrayN2":[0,1],"objectN0":{},"objectN1":{"0":0},"objectN2":{"0":0,"1":1}}`,
|
||||
outEscaped: `{"\u006c\u0069\u0074\u0065\u0072\u0061\u006c\u0073":[null,false,true],"\u0073\u0074\u0072\u0069\u006e\u0067":"\u0048\u0065\u006c\u006c\u006f\u002c\u0020\u4e16\u754c","\u006e\u0075\u006d\u0062\u0065\u0072":3.14159,"\u0061\u0072\u0072\u0061\u0079\u004e\u0030":[],"\u0061\u0072\u0072\u0061\u0079\u004e\u0031":[0],"\u0061\u0072\u0072\u0061\u0079\u004e\u0032":[0,1],"\u006f\u0062\u006a\u0065\u0063\u0074\u004e\u0030":{},"\u006f\u0062\u006a\u0065\u0063\u0074\u004e\u0031":{"\u0030":0},"\u006f\u0062\u006a\u0065\u0063\u0074\u004e\u0032":{"\u0030":0,"\u0031":1}}`,
|
||||
outIndented: `{
|
||||
"literals": [
|
||||
null,
|
||||
false,
|
||||
true
|
||||
],
|
||||
"string": "Hello, 世界",
|
||||
"number": 3.14159,
|
||||
"arrayN0": [],
|
||||
"arrayN1": [
|
||||
0
|
||||
],
|
||||
"arrayN2": [
|
||||
0,
|
||||
1
|
||||
],
|
||||
"objectN0": {},
|
||||
"objectN1": {
|
||||
"0": 0
|
||||
},
|
||||
"objectN2": {
|
||||
"0": 0,
|
||||
"1": 1
|
||||
}
|
||||
}`,
|
||||
outCanonicalized: `{"arrayN0":[],"arrayN1":[0],"arrayN2":[0,1],"literals":[null,false,true],"number":3.14159,"objectN0":{},"objectN1":{"0":0},"objectN2":{"0":0,"1":1},"string":"Hello, 世界"}`,
|
||||
tokens: []Token{
|
||||
BeginObject,
|
||||
String("literals"), BeginArray, Null, False, True, EndArray,
|
||||
String("string"), String("Hello, 世界"),
|
||||
String("number"), Float(3.14159),
|
||||
String("arrayN0"), BeginArray, EndArray,
|
||||
String("arrayN1"), BeginArray, Uint(0), EndArray,
|
||||
String("arrayN2"), BeginArray, Uint(0), Uint(1), EndArray,
|
||||
String("objectN0"), BeginObject, EndObject,
|
||||
String("objectN1"), BeginObject, String("0"), Uint(0), EndObject,
|
||||
String("objectN2"), BeginObject, String("0"), Uint(0), String("1"), Uint(1), EndObject,
|
||||
EndObject,
|
||||
},
|
||||
pointers: []Pointer{
|
||||
"",
|
||||
"/literals", "/literals",
|
||||
"/literals/0",
|
||||
"/literals/1",
|
||||
"/literals/2",
|
||||
"/literals",
|
||||
"/string", "/string",
|
||||
"/number", "/number",
|
||||
"/arrayN0", "/arrayN0", "/arrayN0",
|
||||
"/arrayN1", "/arrayN1",
|
||||
"/arrayN1/0",
|
||||
"/arrayN1",
|
||||
"/arrayN2", "/arrayN2",
|
||||
"/arrayN2/0",
|
||||
"/arrayN2/1",
|
||||
"/arrayN2",
|
||||
"/objectN0", "/objectN0", "/objectN0",
|
||||
"/objectN1", "/objectN1",
|
||||
"/objectN1/0", "/objectN1/0",
|
||||
"/objectN1",
|
||||
"/objectN2", "/objectN2",
|
||||
"/objectN2/0", "/objectN2/0",
|
||||
"/objectN2/1", "/objectN2/1",
|
||||
"/objectN2",
|
||||
"",
|
||||
},
|
||||
}}
|
||||
|
||||
// TestCoderInterleaved tests that we can interleave calls that operate on
|
||||
// tokens and raw values. The only error condition is trying to operate on a
|
||||
// raw value when the next token is an end of object or array.
|
||||
func TestCoderInterleaved(t *testing.T) {
|
||||
for _, td := range coderTestdata {
|
||||
// In TokenFirst and ValueFirst, alternate between tokens and values.
|
||||
// In TokenDelims, only use tokens for object and array delimiters.
|
||||
for _, modeName := range []string{"TokenFirst", "ValueFirst", "TokenDelims"} {
|
||||
t.Run(path.Join(td.name.Name, modeName), func(t *testing.T) {
|
||||
testCoderInterleaved(t, td.name.Where, modeName, td)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
func testCoderInterleaved(t *testing.T, where jsontest.CasePos, modeName string, td coderTestdataEntry) {
|
||||
src := strings.NewReader(td.in)
|
||||
dst := new(bytes.Buffer)
|
||||
dec := NewDecoder(src)
|
||||
enc := NewEncoder(dst)
|
||||
tickTock := modeName == "TokenFirst"
|
||||
for {
|
||||
if modeName == "TokenDelims" {
|
||||
switch dec.PeekKind() {
|
||||
case '{', '}', '[', ']':
|
||||
tickTock = true // as token
|
||||
default:
|
||||
tickTock = false // as value
|
||||
}
|
||||
}
|
||||
if tickTock {
|
||||
tok, err := dec.ReadToken()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
t.Fatalf("%s: Decoder.ReadToken error: %v", where, err)
|
||||
}
|
||||
if err := enc.WriteToken(tok); err != nil {
|
||||
t.Fatalf("%s: Encoder.WriteToken error: %v", where, err)
|
||||
}
|
||||
} else {
|
||||
val, err := dec.ReadValue()
|
||||
if err != nil {
|
||||
// It is a syntactic error to call ReadValue
|
||||
// at the end of an object or array.
|
||||
// Retry as a ReadToken call.
|
||||
expectError := dec.PeekKind() == '}' || dec.PeekKind() == ']'
|
||||
if expectError {
|
||||
if !errors.As(err, new(*SyntacticError)) {
|
||||
t.Fatalf("%s: Decoder.ReadToken error is %T, want %T", where, err, new(SyntacticError))
|
||||
}
|
||||
tickTock = !tickTock
|
||||
continue
|
||||
}
|
||||
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
t.Fatalf("%s: Decoder.ReadValue error: %v", where, err)
|
||||
}
|
||||
if err := enc.WriteValue(val); err != nil {
|
||||
t.Fatalf("%s: Encoder.WriteValue error: %v", where, err)
|
||||
}
|
||||
}
|
||||
tickTock = !tickTock
|
||||
}
|
||||
|
||||
got := dst.String()
|
||||
want := td.outCompacted + "\n"
|
||||
if got != want {
|
||||
t.Fatalf("%s: output mismatch:\ngot %q\nwant %q", where, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCoderStackPointer(t *testing.T) {
|
||||
tests := []struct {
|
||||
token Token
|
||||
want Pointer
|
||||
}{
|
||||
{Null, ""},
|
||||
|
||||
{BeginArray, ""},
|
||||
{EndArray, ""},
|
||||
|
||||
{BeginArray, ""},
|
||||
{Bool(true), "/0"},
|
||||
{EndArray, ""},
|
||||
|
||||
{BeginArray, ""},
|
||||
{String("hello"), "/0"},
|
||||
{String("goodbye"), "/1"},
|
||||
{EndArray, ""},
|
||||
|
||||
{BeginObject, ""},
|
||||
{EndObject, ""},
|
||||
|
||||
{BeginObject, ""},
|
||||
{String("hello"), "/hello"},
|
||||
{String("goodbye"), "/hello"},
|
||||
{EndObject, ""},
|
||||
|
||||
{BeginObject, ""},
|
||||
{String(""), "/"},
|
||||
{Null, "/"},
|
||||
{String("0"), "/0"},
|
||||
{Null, "/0"},
|
||||
{String("~"), "/~0"},
|
||||
{Null, "/~0"},
|
||||
{String("/"), "/~1"},
|
||||
{Null, "/~1"},
|
||||
{String("a//b~/c/~d~~e"), "/a~1~1b~0~1c~1~0d~0~0e"},
|
||||
{Null, "/a~1~1b~0~1c~1~0d~0~0e"},
|
||||
{String(" \r\n\t"), "/ \r\n\t"},
|
||||
{Null, "/ \r\n\t"},
|
||||
{EndObject, ""},
|
||||
|
||||
{BeginArray, ""},
|
||||
{BeginObject, "/0"},
|
||||
{String(""), "/0/"},
|
||||
{BeginArray, "/0/"},
|
||||
{BeginObject, "/0//0"},
|
||||
{String("#"), "/0//0/#"},
|
||||
{Null, "/0//0/#"},
|
||||
{EndObject, "/0//0"},
|
||||
{EndArray, "/0/"},
|
||||
{EndObject, "/0"},
|
||||
{EndArray, ""},
|
||||
}
|
||||
|
||||
for _, allowDupes := range []bool{false, true} {
|
||||
var name string
|
||||
switch allowDupes {
|
||||
case false:
|
||||
name = "RejectDuplicateNames"
|
||||
case true:
|
||||
name = "AllowDuplicateNames"
|
||||
}
|
||||
|
||||
t.Run(name, func(t *testing.T) {
|
||||
bb := new(bytes.Buffer)
|
||||
|
||||
enc := NewEncoder(bb, AllowDuplicateNames(allowDupes))
|
||||
for i, tt := range tests {
|
||||
if err := enc.WriteToken(tt.token); err != nil {
|
||||
t.Fatalf("%d: Encoder.WriteToken error: %v", i, err)
|
||||
}
|
||||
if got := enc.StackPointer(); got != tests[i].want {
|
||||
t.Fatalf("%d: Encoder.StackPointer = %v, want %v", i, got, tests[i].want)
|
||||
}
|
||||
}
|
||||
|
||||
dec := NewDecoder(bb, AllowDuplicateNames(allowDupes))
|
||||
for i := range tests {
|
||||
if _, err := dec.ReadToken(); err != nil {
|
||||
t.Fatalf("%d: Decoder.ReadToken error: %v", i, err)
|
||||
}
|
||||
if got := dec.StackPointer(); got != tests[i].want {
|
||||
t.Fatalf("%d: Decoder.StackPointer = %v, want %v", i, got, tests[i].want)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCoderMaxDepth(t *testing.T) {
|
||||
trimArray := func(b []byte) []byte { return b[len(`[`) : len(b)-len(`]`)] }
|
||||
maxArrays := []byte(strings.Repeat(`[`, maxNestingDepth+1) + strings.Repeat(`]`, maxNestingDepth+1))
|
||||
trimObject := func(b []byte) []byte { return b[len(`{"":`) : len(b)-len(`}`)] }
|
||||
maxObjects := []byte(strings.Repeat(`{"":`, maxNestingDepth+1) + `""` + strings.Repeat(`}`, maxNestingDepth+1))
|
||||
|
||||
t.Run("Decoder", func(t *testing.T) {
|
||||
var dec Decoder
|
||||
checkReadToken := func(t *testing.T, wantKind Kind, wantErr error) {
|
||||
t.Helper()
|
||||
if tok, err := dec.ReadToken(); tok.Kind() != wantKind || !equalError(err, wantErr) {
|
||||
t.Fatalf("Decoder.ReadToken = (%q, %v), want (%q, %v)", byte(tok.Kind()), err, byte(wantKind), wantErr)
|
||||
}
|
||||
}
|
||||
checkReadValue := func(t *testing.T, wantLen int, wantErr error) {
|
||||
t.Helper()
|
||||
if val, err := dec.ReadValue(); len(val) != wantLen || !equalError(err, wantErr) {
|
||||
t.Fatalf("Decoder.ReadValue = (%d, %v), want (%d, %v)", len(val), err, wantLen, wantErr)
|
||||
}
|
||||
}
|
||||
|
||||
t.Run("ArraysValid/SingleValue", func(t *testing.T) {
|
||||
dec.s.reset(trimArray(maxArrays), nil)
|
||||
checkReadValue(t, maxNestingDepth*len(`[]`), nil)
|
||||
})
|
||||
t.Run("ArraysValid/TokenThenValue", func(t *testing.T) {
|
||||
dec.s.reset(trimArray(maxArrays), nil)
|
||||
checkReadToken(t, '[', nil)
|
||||
checkReadValue(t, (maxNestingDepth-1)*len(`[]`), nil)
|
||||
checkReadToken(t, ']', nil)
|
||||
})
|
||||
t.Run("ArraysValid/AllTokens", func(t *testing.T) {
|
||||
dec.s.reset(trimArray(maxArrays), nil)
|
||||
for range maxNestingDepth {
|
||||
checkReadToken(t, '[', nil)
|
||||
}
|
||||
for range maxNestingDepth {
|
||||
checkReadToken(t, ']', nil)
|
||||
}
|
||||
})
|
||||
|
||||
wantErr := &SyntacticError{
|
||||
ByteOffset: maxNestingDepth,
|
||||
JSONPointer: Pointer(strings.Repeat("/0", maxNestingDepth)),
|
||||
Err: errMaxDepth,
|
||||
}
|
||||
t.Run("ArraysInvalid/SingleValue", func(t *testing.T) {
|
||||
dec.s.reset(maxArrays, nil)
|
||||
checkReadValue(t, 0, wantErr)
|
||||
})
|
||||
t.Run("ArraysInvalid/TokenThenValue", func(t *testing.T) {
|
||||
dec.s.reset(maxArrays, nil)
|
||||
checkReadToken(t, '[', nil)
|
||||
checkReadValue(t, 0, wantErr)
|
||||
})
|
||||
t.Run("ArraysInvalid/AllTokens", func(t *testing.T) {
|
||||
dec.s.reset(maxArrays, nil)
|
||||
for range maxNestingDepth {
|
||||
checkReadToken(t, '[', nil)
|
||||
}
|
||||
checkReadValue(t, 0, wantErr)
|
||||
})
|
||||
|
||||
t.Run("ObjectsValid/SingleValue", func(t *testing.T) {
|
||||
dec.s.reset(trimObject(maxObjects), nil)
|
||||
checkReadValue(t, maxNestingDepth*len(`{"":}`)+len(`""`), nil)
|
||||
})
|
||||
t.Run("ObjectsValid/TokenThenValue", func(t *testing.T) {
|
||||
dec.s.reset(trimObject(maxObjects), nil)
|
||||
checkReadToken(t, '{', nil)
|
||||
checkReadToken(t, '"', nil)
|
||||
checkReadValue(t, (maxNestingDepth-1)*len(`{"":}`)+len(`""`), nil)
|
||||
checkReadToken(t, '}', nil)
|
||||
})
|
||||
t.Run("ObjectsValid/AllTokens", func(t *testing.T) {
|
||||
dec.s.reset(trimObject(maxObjects), nil)
|
||||
for range maxNestingDepth {
|
||||
checkReadToken(t, '{', nil)
|
||||
checkReadToken(t, '"', nil)
|
||||
}
|
||||
checkReadToken(t, '"', nil)
|
||||
for range maxNestingDepth {
|
||||
checkReadToken(t, '}', nil)
|
||||
}
|
||||
})
|
||||
|
||||
wantErr = &SyntacticError{
|
||||
ByteOffset: maxNestingDepth * int64(len(`{"":`)),
|
||||
JSONPointer: Pointer(strings.Repeat("/", maxNestingDepth)),
|
||||
Err: errMaxDepth,
|
||||
}
|
||||
t.Run("ObjectsInvalid/SingleValue", func(t *testing.T) {
|
||||
dec.s.reset(maxObjects, nil)
|
||||
checkReadValue(t, 0, wantErr)
|
||||
})
|
||||
t.Run("ObjectsInvalid/TokenThenValue", func(t *testing.T) {
|
||||
dec.s.reset(maxObjects, nil)
|
||||
checkReadToken(t, '{', nil)
|
||||
checkReadToken(t, '"', nil)
|
||||
checkReadValue(t, 0, wantErr)
|
||||
})
|
||||
t.Run("ObjectsInvalid/AllTokens", func(t *testing.T) {
|
||||
dec.s.reset(maxObjects, nil)
|
||||
for range maxNestingDepth {
|
||||
checkReadToken(t, '{', nil)
|
||||
checkReadToken(t, '"', nil)
|
||||
}
|
||||
checkReadToken(t, 0, wantErr)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Encoder", func(t *testing.T) {
|
||||
var enc Encoder
|
||||
checkWriteToken := func(t *testing.T, tok Token, wantErr error) {
|
||||
t.Helper()
|
||||
if err := enc.WriteToken(tok); !equalError(err, wantErr) {
|
||||
t.Fatalf("Encoder.WriteToken = %v, want %v", err, wantErr)
|
||||
}
|
||||
}
|
||||
checkWriteValue := func(t *testing.T, val Value, wantErr error) {
|
||||
t.Helper()
|
||||
if err := enc.WriteValue(val); !equalError(err, wantErr) {
|
||||
t.Fatalf("Encoder.WriteValue = %v, want %v", err, wantErr)
|
||||
}
|
||||
}
|
||||
|
||||
wantErr := &SyntacticError{
|
||||
ByteOffset: maxNestingDepth,
|
||||
JSONPointer: Pointer(strings.Repeat("/0", maxNestingDepth)),
|
||||
Err: errMaxDepth,
|
||||
}
|
||||
t.Run("Arrays/SingleValue", func(t *testing.T) {
|
||||
enc.s.reset(enc.s.Buf[:0], nil)
|
||||
checkWriteValue(t, maxArrays, wantErr)
|
||||
checkWriteValue(t, trimArray(maxArrays), nil)
|
||||
})
|
||||
t.Run("Arrays/TokenThenValue", func(t *testing.T) {
|
||||
enc.s.reset(enc.s.Buf[:0], nil)
|
||||
checkWriteToken(t, BeginArray, nil)
|
||||
checkWriteValue(t, trimArray(maxArrays), wantErr)
|
||||
checkWriteValue(t, trimArray(trimArray(maxArrays)), nil)
|
||||
checkWriteToken(t, EndArray, nil)
|
||||
})
|
||||
t.Run("Arrays/AllTokens", func(t *testing.T) {
|
||||
enc.s.reset(enc.s.Buf[:0], nil)
|
||||
for range maxNestingDepth {
|
||||
checkWriteToken(t, BeginArray, nil)
|
||||
}
|
||||
checkWriteToken(t, BeginArray, wantErr)
|
||||
for range maxNestingDepth {
|
||||
checkWriteToken(t, EndArray, nil)
|
||||
}
|
||||
})
|
||||
|
||||
wantErr = &SyntacticError{
|
||||
ByteOffset: maxNestingDepth * int64(len(`{"":`)),
|
||||
JSONPointer: Pointer(strings.Repeat("/", maxNestingDepth)),
|
||||
Err: errMaxDepth,
|
||||
}
|
||||
t.Run("Objects/SingleValue", func(t *testing.T) {
|
||||
enc.s.reset(enc.s.Buf[:0], nil)
|
||||
checkWriteValue(t, maxObjects, wantErr)
|
||||
checkWriteValue(t, trimObject(maxObjects), nil)
|
||||
})
|
||||
t.Run("Objects/TokenThenValue", func(t *testing.T) {
|
||||
enc.s.reset(enc.s.Buf[:0], nil)
|
||||
checkWriteToken(t, BeginObject, nil)
|
||||
checkWriteToken(t, String(""), nil)
|
||||
checkWriteValue(t, trimObject(maxObjects), wantErr)
|
||||
checkWriteValue(t, trimObject(trimObject(maxObjects)), nil)
|
||||
checkWriteToken(t, EndObject, nil)
|
||||
})
|
||||
t.Run("Objects/AllTokens", func(t *testing.T) {
|
||||
enc.s.reset(enc.s.Buf[:0], nil)
|
||||
for range maxNestingDepth - 1 {
|
||||
checkWriteToken(t, BeginObject, nil)
|
||||
checkWriteToken(t, String(""), nil)
|
||||
}
|
||||
checkWriteToken(t, BeginObject, nil)
|
||||
checkWriteToken(t, String(""), nil)
|
||||
checkWriteToken(t, BeginObject, wantErr)
|
||||
checkWriteToken(t, String(""), nil)
|
||||
for range maxNestingDepth {
|
||||
checkWriteToken(t, EndObject, nil)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// FaultyBuffer implements io.Reader and io.Writer.
|
||||
// It may process fewer bytes than the provided buffer
|
||||
// and may randomly return an error.
|
||||
type FaultyBuffer struct {
|
||||
B []byte
|
||||
|
||||
// MaxBytes is the maximum number of bytes read/written.
|
||||
// A random number of bytes within [0, MaxBytes] are processed.
|
||||
// A non-positive value is treated as infinity.
|
||||
MaxBytes int
|
||||
|
||||
// MayError specifies whether to randomly provide this error.
|
||||
// Even if an error is returned, no bytes are dropped.
|
||||
MayError error
|
||||
|
||||
// Rand to use for pseudo-random behavior.
|
||||
// If nil, it will be initialized with rand.NewSource(0).
|
||||
Rand rand.Source
|
||||
}
|
||||
|
||||
func (p *FaultyBuffer) Read(b []byte) (int, error) {
|
||||
b = b[:copy(b[:p.mayTruncate(len(b))], p.B)]
|
||||
p.B = p.B[len(b):]
|
||||
if len(p.B) == 0 && (len(b) == 0 || p.randN(2) == 0) {
|
||||
return len(b), io.EOF
|
||||
}
|
||||
return len(b), p.mayError()
|
||||
}
|
||||
|
||||
func (p *FaultyBuffer) Write(b []byte) (int, error) {
|
||||
b2 := b[:p.mayTruncate(len(b))]
|
||||
p.B = append(p.B, b2...)
|
||||
if len(b2) < len(b) {
|
||||
return len(b2), io.ErrShortWrite
|
||||
}
|
||||
return len(b2), p.mayError()
|
||||
}
|
||||
|
||||
// mayTruncate may return a value between [0, n].
|
||||
func (p *FaultyBuffer) mayTruncate(n int) int {
|
||||
if p.MaxBytes > 0 {
|
||||
if n > p.MaxBytes {
|
||||
n = p.MaxBytes
|
||||
}
|
||||
return p.randN(n + 1)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// mayError may return a non-nil error.
|
||||
func (p *FaultyBuffer) mayError() error {
|
||||
if p.MayError != nil && p.randN(2) == 0 {
|
||||
return p.MayError
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *FaultyBuffer) randN(n int) int {
|
||||
if p.Rand == nil {
|
||||
p.Rand = rand.NewSource(0)
|
||||
}
|
||||
return int(p.Rand.Int63() % int64(n))
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,737 +0,0 @@
|
||||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"path"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"encoding/json/internal/jsonflags"
|
||||
"encoding/json/internal/jsontest"
|
||||
"encoding/json/internal/jsonwire"
|
||||
)
|
||||
|
||||
// TestEncoder tests whether we can produce JSON with either tokens or raw values.
|
||||
func TestEncoder(t *testing.T) {
|
||||
for _, td := range coderTestdata {
|
||||
for _, formatName := range []string{"Compact", "Indented"} {
|
||||
for _, typeName := range []string{"Token", "Value", "TokenDelims"} {
|
||||
t.Run(path.Join(td.name.Name, typeName, formatName), func(t *testing.T) {
|
||||
testEncoder(t, td.name.Where, formatName, typeName, td)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
func testEncoder(t *testing.T, where jsontest.CasePos, formatName, typeName string, td coderTestdataEntry) {
|
||||
var want string
|
||||
var opts []Options
|
||||
dst := new(bytes.Buffer)
|
||||
opts = append(opts, jsonflags.OmitTopLevelNewline|1)
|
||||
want = td.outCompacted
|
||||
switch formatName {
|
||||
case "Indented":
|
||||
opts = append(opts, Multiline(true))
|
||||
opts = append(opts, WithIndentPrefix("\t"))
|
||||
opts = append(opts, WithIndent(" "))
|
||||
if td.outIndented != "" {
|
||||
want = td.outIndented
|
||||
}
|
||||
}
|
||||
enc := NewEncoder(dst, opts...)
|
||||
|
||||
switch typeName {
|
||||
case "Token":
|
||||
var pointers []Pointer
|
||||
for _, tok := range td.tokens {
|
||||
if err := enc.WriteToken(tok); err != nil {
|
||||
t.Fatalf("%s: Encoder.WriteToken error: %v", where, err)
|
||||
}
|
||||
if td.pointers != nil {
|
||||
pointers = append(pointers, enc.StackPointer())
|
||||
}
|
||||
}
|
||||
if !slices.Equal(pointers, td.pointers) {
|
||||
t.Fatalf("%s: pointers mismatch:\ngot %q\nwant %q", where, pointers, td.pointers)
|
||||
}
|
||||
case "Value":
|
||||
if err := enc.WriteValue(Value(td.in)); err != nil {
|
||||
t.Fatalf("%s: Encoder.WriteValue error: %v", where, err)
|
||||
}
|
||||
case "TokenDelims":
|
||||
// Use WriteToken for object/array delimiters, WriteValue otherwise.
|
||||
for _, tok := range td.tokens {
|
||||
switch tok.Kind() {
|
||||
case '{', '}', '[', ']':
|
||||
if err := enc.WriteToken(tok); err != nil {
|
||||
t.Fatalf("%s: Encoder.WriteToken error: %v", where, err)
|
||||
}
|
||||
default:
|
||||
val := Value(tok.String())
|
||||
if tok.Kind() == '"' {
|
||||
val, _ = jsonwire.AppendQuote(nil, tok.String(), &jsonflags.Flags{})
|
||||
}
|
||||
if err := enc.WriteValue(val); err != nil {
|
||||
t.Fatalf("%s: Encoder.WriteValue error: %v", where, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
got := dst.String()
|
||||
if got != want {
|
||||
t.Errorf("%s: output mismatch:\ngot %q\nwant %q", where, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
// TestFaultyEncoder tests that temporary I/O errors are not fatal.
|
||||
func TestFaultyEncoder(t *testing.T) {
|
||||
for _, td := range coderTestdata {
|
||||
for _, typeName := range []string{"Token", "Value"} {
|
||||
t.Run(path.Join(td.name.Name, typeName), func(t *testing.T) {
|
||||
testFaultyEncoder(t, td.name.Where, typeName, td)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
func testFaultyEncoder(t *testing.T, where jsontest.CasePos, typeName string, td coderTestdataEntry) {
|
||||
b := &FaultyBuffer{
|
||||
MaxBytes: 1,
|
||||
MayError: io.ErrShortWrite,
|
||||
}
|
||||
|
||||
// Write all the tokens.
|
||||
// Even if the underlying io.Writer may be faulty,
|
||||
// writing a valid token or value is guaranteed to at least
|
||||
// be appended to the internal buffer.
|
||||
// In other words, syntactic errors occur before I/O errors.
|
||||
enc := NewEncoder(b)
|
||||
switch typeName {
|
||||
case "Token":
|
||||
for i, tok := range td.tokens {
|
||||
err := enc.WriteToken(tok)
|
||||
if err != nil && !errors.Is(err, io.ErrShortWrite) {
|
||||
t.Fatalf("%s: %d: Encoder.WriteToken error: %v", where, i, err)
|
||||
}
|
||||
}
|
||||
case "Value":
|
||||
err := enc.WriteValue(Value(td.in))
|
||||
if err != nil && !errors.Is(err, io.ErrShortWrite) {
|
||||
t.Fatalf("%s: Encoder.WriteValue error: %v", where, err)
|
||||
}
|
||||
}
|
||||
gotOutput := string(append(b.B, enc.s.unflushedBuffer()...))
|
||||
wantOutput := td.outCompacted + "\n"
|
||||
if gotOutput != wantOutput {
|
||||
t.Fatalf("%s: output mismatch:\ngot %s\nwant %s", where, gotOutput, wantOutput)
|
||||
}
|
||||
}
|
||||
|
||||
type encoderMethodCall struct {
|
||||
in tokOrVal
|
||||
wantErr error
|
||||
wantPointer Pointer
|
||||
}
|
||||
|
||||
var encoderErrorTestdata = []struct {
|
||||
name jsontest.CaseName
|
||||
opts []Options
|
||||
calls []encoderMethodCall
|
||||
wantOut string
|
||||
}{{
|
||||
name: jsontest.Name("InvalidToken"),
|
||||
calls: []encoderMethodCall{
|
||||
{zeroToken, E(errInvalidToken), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidValue"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`#`), newInvalidCharacterError("#", "at start of value"), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidValue/DoubleZero"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`00`), newInvalidCharacterError("0", "after top-level value").withPos(`0`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedValue"),
|
||||
calls: []encoderMethodCall{
|
||||
{zeroValue, E(io.ErrUnexpectedEOF).withPos("", ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedNull"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`nul`), E(io.ErrUnexpectedEOF).withPos("nul", ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidNull"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`nulL`), newInvalidCharacterError("L", "in literal null (expecting 'l')").withPos(`nul`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedFalse"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`fals`), E(io.ErrUnexpectedEOF).withPos("fals", ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidFalse"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`falsE`), newInvalidCharacterError("E", "in literal false (expecting 'e')").withPos(`fals`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedTrue"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`tru`), E(io.ErrUnexpectedEOF).withPos(`tru`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidTrue"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`truE`), newInvalidCharacterError("E", "in literal true (expecting 'e')").withPos(`tru`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedString"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`"star`), E(io.ErrUnexpectedEOF).withPos(`"star`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidString"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`"ok` + "\x00"), newInvalidCharacterError("\x00", `in string (expecting non-control character)`).withPos(`"ok`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("ValidString/AllowInvalidUTF8/Token"),
|
||||
opts: []Options{AllowInvalidUTF8(true)},
|
||||
calls: []encoderMethodCall{
|
||||
{String("living\xde\xad\xbe\xef"), nil, ""},
|
||||
},
|
||||
wantOut: "\"living\xde\xad\ufffd\ufffd\"\n",
|
||||
}, {
|
||||
name: jsontest.Name("ValidString/AllowInvalidUTF8/Value"),
|
||||
opts: []Options{AllowInvalidUTF8(true)},
|
||||
calls: []encoderMethodCall{
|
||||
{Value("\"living\xde\xad\xbe\xef\""), nil, ""},
|
||||
},
|
||||
wantOut: "\"living\xde\xad\ufffd\ufffd\"\n",
|
||||
}, {
|
||||
name: jsontest.Name("InvalidString/RejectInvalidUTF8"),
|
||||
opts: []Options{AllowInvalidUTF8(false)},
|
||||
calls: []encoderMethodCall{
|
||||
{String("living\xde\xad\xbe\xef"), E(jsonwire.ErrInvalidUTF8), ""},
|
||||
{Value("\"living\xde\xad\xbe\xef\""), E(jsonwire.ErrInvalidUTF8).withPos("\"living\xde\xad", ""), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("name"), nil, ""},
|
||||
{BeginArray, nil, ""},
|
||||
{String("living\xde\xad\xbe\xef"), E(jsonwire.ErrInvalidUTF8).withPos(`{"name":[`, "/name/0"), ""},
|
||||
{Value("\"living\xde\xad\xbe\xef\""), E(jsonwire.ErrInvalidUTF8).withPos("{\"name\":[\"living\xde\xad", "/name/0"), ""},
|
||||
},
|
||||
wantOut: `{"name":[`,
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedNumber"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`0.`), E(io.ErrUnexpectedEOF).withPos("0", ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidNumber"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`0.e`), newInvalidCharacterError("e", "in number (expecting digit)").withPos(`0.`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedObject/AfterStart"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{`), E(io.ErrUnexpectedEOF).withPos("{", ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedObject/AfterName"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{"X"`), E(io.ErrUnexpectedEOF).withPos(`{"X"`, "/X"), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedObject/AfterColon"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{"X":`), E(io.ErrUnexpectedEOF).withPos(`{"X":`, "/X"), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedObject/AfterValue"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{"0":0`), E(io.ErrUnexpectedEOF).withPos(`{"0":0`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedObject/AfterComma"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{"0":0,`), E(io.ErrUnexpectedEOF).withPos(`{"0":0,`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/MissingColon"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "fizz" "buzz" } `), newInvalidCharacterError("\"", "after object name (expecting ':')").withPos(` { "fizz" `, "/fizz"), ""},
|
||||
{Value(` { "fizz" , "buzz" } `), newInvalidCharacterError(",", "after object name (expecting ':')").withPos(` { "fizz" `, "/fizz"), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/MissingComma"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "fizz" : "buzz" "gazz" } `), newInvalidCharacterError("\"", "after object value (expecting ',' or '}')").withPos(` { "fizz" : "buzz" `, ""), ""},
|
||||
{Value(` { "fizz" : "buzz" : "gazz" } `), newInvalidCharacterError(":", "after object value (expecting ',' or '}')").withPos(` { "fizz" : "buzz" `, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/ExtraComma"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { , } `), newInvalidCharacterError(",", `at start of string (expecting '"')`).withPos(` { `, ""), ""},
|
||||
{Value(` { "fizz" : "buzz" , } `), newInvalidCharacterError("}", `at start of string (expecting '"')`).withPos(` { "fizz" : "buzz" , `, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/InvalidName"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{ null }`), newInvalidCharacterError("n", `at start of string (expecting '"')`).withPos(`{ `, ""), ""},
|
||||
{Value(`{ false }`), newInvalidCharacterError("f", `at start of string (expecting '"')`).withPos(`{ `, ""), ""},
|
||||
{Value(`{ true }`), newInvalidCharacterError("t", `at start of string (expecting '"')`).withPos(`{ `, ""), ""},
|
||||
{Value(`{ 0 }`), newInvalidCharacterError("0", `at start of string (expecting '"')`).withPos(`{ `, ""), ""},
|
||||
{Value(`{ {} }`), newInvalidCharacterError("{", `at start of string (expecting '"')`).withPos(`{ `, ""), ""},
|
||||
{Value(`{ [] }`), newInvalidCharacterError("[", `at start of string (expecting '"')`).withPos(`{ `, ""), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{Null, E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Value(`null`), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{False, E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Value(`false`), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{True, E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Value(`true`), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Uint(0), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Value(`0`), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{BeginObject, E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Value(`{}`), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{BeginArray, E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{Value(`[]`), E(ErrNonStringName).withPos(`{`, ""), ""},
|
||||
{EndObject, nil, ""},
|
||||
},
|
||||
wantOut: "{}\n",
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/InvalidValue"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`{ "0": x }`), newInvalidCharacterError("x", `at start of value`).withPos(`{ "0": `, "/0"), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/MismatchingDelim"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { ] `), newInvalidCharacterError("]", `at start of string (expecting '"')`).withPos(` { `, ""), ""},
|
||||
{Value(` { "0":0 ] `), newInvalidCharacterError("]", `after object value (expecting ',' or '}')`).withPos(` { "0":0 `, ""), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{EndArray, E(errMismatchDelim).withPos(`{`, ""), ""},
|
||||
{Value(`]`), newInvalidCharacterError("]", "at start of value").withPos(`{`, ""), ""},
|
||||
{EndObject, nil, ""},
|
||||
},
|
||||
wantOut: "{}\n",
|
||||
}, {
|
||||
name: jsontest.Name("ValidObject/UniqueNames"),
|
||||
calls: []encoderMethodCall{
|
||||
{BeginObject, nil, ""},
|
||||
{String("0"), nil, ""},
|
||||
{Uint(0), nil, ""},
|
||||
{String("1"), nil, ""},
|
||||
{Uint(1), nil, ""},
|
||||
{EndObject, nil, ""},
|
||||
{Value(` { "0" : 0 , "1" : 1 } `), nil, ""},
|
||||
},
|
||||
wantOut: `{"0":0,"1":1}` + "\n" + `{"0":0,"1":1}` + "\n",
|
||||
}, {
|
||||
name: jsontest.Name("ValidObject/DuplicateNames"),
|
||||
opts: []Options{AllowDuplicateNames(true)},
|
||||
calls: []encoderMethodCall{
|
||||
{BeginObject, nil, ""},
|
||||
{String("0"), nil, ""},
|
||||
{Uint(0), nil, ""},
|
||||
{String("0"), nil, ""},
|
||||
{Uint(0), nil, ""},
|
||||
{EndObject, nil, ""},
|
||||
{Value(` { "0" : 0 , "0" : 0 } `), nil, ""},
|
||||
},
|
||||
wantOut: `{"0":0,"0":0}` + "\n" + `{"0":0,"0":0}` + "\n",
|
||||
}, {
|
||||
name: jsontest.Name("InvalidObject/DuplicateNames"),
|
||||
calls: []encoderMethodCall{
|
||||
{BeginObject, nil, ""},
|
||||
{String("X"), nil, ""},
|
||||
{BeginObject, nil, ""},
|
||||
{EndObject, nil, ""},
|
||||
{String("X"), E(ErrDuplicateName).withPos(`{"X":{},`, "/X"), "/X"},
|
||||
{Value(`"X"`), E(ErrDuplicateName).withPos(`{"X":{},`, "/X"), "/X"},
|
||||
{String("Y"), nil, ""},
|
||||
{BeginObject, nil, ""},
|
||||
{EndObject, nil, ""},
|
||||
{String("X"), E(ErrDuplicateName).withPos(`{"X":{},"Y":{},`, "/X"), "/Y"},
|
||||
{Value(`"X"`), E(ErrDuplicateName).withPos(`{"X":{},"Y":{},`, "/X"), "/Y"},
|
||||
{String("Y"), E(ErrDuplicateName).withPos(`{"X":{},"Y":{},`, "/Y"), "/Y"},
|
||||
{Value(`"Y"`), E(ErrDuplicateName).withPos(`{"X":{},"Y":{},`, "/Y"), "/Y"},
|
||||
{EndObject, nil, ""},
|
||||
{Value(` { "X" : 0 , "Y" : 1 , "X" : 0 } `), E(ErrDuplicateName).withPos(`{"X":{},"Y":{}}`+"\n"+` { "X" : 0 , "Y" : 1 , `, "/X"), ""},
|
||||
},
|
||||
wantOut: `{"X":{},"Y":{}}` + "\n",
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedArray/AfterStart"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`[`), E(io.ErrUnexpectedEOF).withPos(`[`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedArray/AfterValue"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`[0`), E(io.ErrUnexpectedEOF).withPos(`[0`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedArray/AfterComma"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`[0,`), E(io.ErrUnexpectedEOF).withPos(`[0,`, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("TruncatedArray/MissingComma"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ "fizz" "buzz" ] `), newInvalidCharacterError("\"", "after array value (expecting ',' or ']')").withPos(` [ "fizz" `, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("InvalidArray/MismatchingDelim"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ } `), newInvalidCharacterError("}", `at start of value`).withPos(` [ `, "/0"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{EndObject, E(errMismatchDelim).withPos(`[`, "/0"), ""},
|
||||
{Value(`}`), newInvalidCharacterError("}", "at start of value").withPos(`[`, "/0"), ""},
|
||||
{EndArray, nil, ""},
|
||||
},
|
||||
wantOut: "[]\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/Object/SpaceAfterColon"),
|
||||
opts: []Options{SpaceAfterColon(true)},
|
||||
calls: []encoderMethodCall{{Value(`{"fizz":"buzz","wizz":"wuzz"}`), nil, ""}},
|
||||
wantOut: "{\"fizz\": \"buzz\",\"wizz\": \"wuzz\"}\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/Object/SpaceAfterComma"),
|
||||
opts: []Options{SpaceAfterComma(true)},
|
||||
calls: []encoderMethodCall{{Value(`{"fizz":"buzz","wizz":"wuzz"}`), nil, ""}},
|
||||
wantOut: "{\"fizz\":\"buzz\", \"wizz\":\"wuzz\"}\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/Object/SpaceAfterColonAndComma"),
|
||||
opts: []Options{SpaceAfterColon(true), SpaceAfterComma(true)},
|
||||
calls: []encoderMethodCall{{Value(`{"fizz":"buzz","wizz":"wuzz"}`), nil, ""}},
|
||||
wantOut: "{\"fizz\": \"buzz\", \"wizz\": \"wuzz\"}\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/Object/NoSpaceAfterColon+SpaceAfterComma+Multiline"),
|
||||
opts: []Options{SpaceAfterColon(false), SpaceAfterComma(true), Multiline(true)},
|
||||
calls: []encoderMethodCall{{Value(`{"fizz":"buzz","wizz":"wuzz"}`), nil, ""}},
|
||||
wantOut: "{\n\t\"fizz\":\"buzz\", \n\t\"wizz\":\"wuzz\"\n}\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/Array/SpaceAfterComma"),
|
||||
opts: []Options{SpaceAfterComma(true)},
|
||||
calls: []encoderMethodCall{{Value(`["fizz","buzz"]`), nil, ""}},
|
||||
wantOut: "[\"fizz\", \"buzz\"]\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/Array/NoSpaceAfterComma+Multiline"),
|
||||
opts: []Options{SpaceAfterComma(false), Multiline(true)},
|
||||
calls: []encoderMethodCall{{Value(`["fizz","buzz"]`), nil, ""}},
|
||||
wantOut: "[\n\t\"fizz\",\n\t\"buzz\"\n]\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/ReorderWithWhitespace"),
|
||||
opts: []Options{
|
||||
AllowDuplicateNames(true),
|
||||
AllowInvalidUTF8(true),
|
||||
ReorderRawObjects(true),
|
||||
SpaceAfterComma(true),
|
||||
SpaceAfterColon(false),
|
||||
Multiline(true),
|
||||
WithIndentPrefix(" "),
|
||||
WithIndent("\t"),
|
||||
PreserveRawStrings(true),
|
||||
},
|
||||
calls: []encoderMethodCall{
|
||||
{BeginArray, nil, ""},
|
||||
{BeginArray, nil, ""},
|
||||
{Value(` { "fizz" : "buzz" ,
|
||||
"zip" : {
|
||||
"x` + "\xfd" + `x" : 123 , "x` + "\xff" + `x" : 123, "x` + "\xfe" + `x" : 123
|
||||
},
|
||||
"zap" : {
|
||||
"xxx" : 333, "xxx": 1, "xxx": 22
|
||||
},
|
||||
"alpha" : "bravo" } `), nil, ""},
|
||||
{EndArray, nil, ""},
|
||||
{EndArray, nil, ""},
|
||||
},
|
||||
wantOut: "[\n \t[\n \t\t{\n \t\t\t\"alpha\":\"bravo\", \n \t\t\t\"fizz\":\"buzz\", \n \t\t\t\"zap\":{\n \t\t\t\t\"xxx\":1, \n \t\t\t\t\"xxx\":22, \n \t\t\t\t\"xxx\":333\n \t\t\t}, \n \t\t\t\"zip\":{\n \t\t\t\t\"x\xfdx\":123, \n \t\t\t\t\"x\xfex\":123, \n \t\t\t\t\"x\xffx\":123\n \t\t\t}\n \t\t}\n \t]\n ]\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/CanonicalizeRawInts"),
|
||||
opts: []Options{CanonicalizeRawInts(true), SpaceAfterComma(true)},
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`[0.100,5.0,1E6,-9223372036854775808,-10,-1,-0,0,1,10,9223372036854775807]`), nil, ""},
|
||||
},
|
||||
wantOut: "[0.100, 5.0, 1E6, -9223372036854776000, -10, -1, 0, 0, 1, 10, 9223372036854776000]\n",
|
||||
}, {
|
||||
name: jsontest.Name("Format/CanonicalizeRawFloats"),
|
||||
opts: []Options{CanonicalizeRawFloats(true), SpaceAfterComma(true)},
|
||||
calls: []encoderMethodCall{
|
||||
{Value(`[0.100,5.0,1E6,-9223372036854775808,-10,-1,-0,0,1,10,9223372036854775807]`), nil, ""},
|
||||
},
|
||||
wantOut: "[0.1, 5, 1000000, -9223372036854775808, -10, -1, 0, 0, 1, 10, 9223372036854775807]\n",
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` "a` + "\xff" + `0" `), E(jsonwire.ErrInvalidUTF8).withPos(` "a`, ""), ""},
|
||||
{String(`a` + "\xff" + `0`), E(jsonwire.ErrInvalidUTF8).withPos(``, ""), ""},
|
||||
},
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/0"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ "a` + "\xff" + `1" ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ "a`, "/0"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{Value(` "a` + "\xff" + `1" `), E(jsonwire.ErrInvalidUTF8).withPos(`[ "a`, "/0"), ""},
|
||||
{String(`a` + "\xff" + `1`), E(jsonwire.ErrInvalidUTF8).withPos(`[`, "/0"), ""},
|
||||
},
|
||||
wantOut: `[`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ "a1" , "b` + "\xff" + `1" ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ "a1" , "b`, "/1"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{String("a1"), nil, ""},
|
||||
{Value(` "b` + "\xff" + `1" `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1", "b`, "/1"), ""},
|
||||
{String(`b` + "\xff" + `1`), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",`, "/1"), ""},
|
||||
},
|
||||
wantOut: `["a1"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/0/0"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ [ "a` + "\xff" + `2" ] ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ [ "a`, "/0/0"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{Value(` [ "a` + "\xff" + `2" ] `), E(jsonwire.ErrInvalidUTF8).withPos(`[ [ "a`, "/0/0"), ""},
|
||||
{BeginArray, nil, "/0"},
|
||||
{Value(` "a` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`[[ "a`, "/0/0"), "/0"},
|
||||
{String(`a` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`[[`, "/0/0"), "/0"},
|
||||
},
|
||||
wantOut: `[[`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/1/0"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ "a1" , [ "a` + "\xff" + `2" ] ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ "a1" , [ "a`, "/1/0"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{String("a1"), nil, "/0"},
|
||||
{Value(` [ "a` + "\xff" + `2" ] `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1", [ "a`, "/1/0"), ""},
|
||||
{BeginArray, nil, "/1"},
|
||||
{Value(` "a` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",[ "a`, "/1/0"), "/1"},
|
||||
{String(`a` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",[`, "/1/0"), "/1"},
|
||||
},
|
||||
wantOut: `["a1",[`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/0/1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ [ "a2" , "b` + "\xff" + `2" ] ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ [ "a2" , "b`, "/0/1"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{Value(` [ "a2" , "b` + "\xff" + `2" ] `), E(jsonwire.ErrInvalidUTF8).withPos(`[ [ "a2" , "b`, "/0/1"), ""},
|
||||
{BeginArray, nil, "/0"},
|
||||
{String("a2"), nil, "/0/0"},
|
||||
{Value(` "b` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`[["a2", "b`, "/0/1"), "/0/0"},
|
||||
{String(`b` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`[["a2",`, "/0/1"), "/0/0"},
|
||||
},
|
||||
wantOut: `[["a2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/1/1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ "a1" , [ "a2" , "b` + "\xff" + `2" ] ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ "a1" , [ "a2" , "b`, "/1/1"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{String("a1"), nil, "/0"},
|
||||
{Value(` [ "a2" , "b` + "\xff" + `2" ] `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1", [ "a2" , "b`, "/1/1"), ""},
|
||||
{BeginArray, nil, "/1"},
|
||||
{String("a2"), nil, "/1/0"},
|
||||
{Value(` "b` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",["a2", "b`, "/1/1"), "/1/0"},
|
||||
{String(`b` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",["a2",`, "/1/1"), "/1/0"},
|
||||
},
|
||||
wantOut: `["a1",["a2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/a1-"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a` + "\xff" + `1" : "b1" } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a`, ""), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{Value(` "a` + "\xff" + `1" `), E(jsonwire.ErrInvalidUTF8).withPos(`{ "a`, ""), ""},
|
||||
{String(`a` + "\xff" + `1`), E(jsonwire.ErrInvalidUTF8).withPos(`{`, ""), ""},
|
||||
},
|
||||
wantOut: `{`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/a1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : "b` + "\xff" + `1" } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : "b`, "/a1"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{Value(` "b` + "\xff" + `1" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1": "b`, "/a1"), ""},
|
||||
{String(`b` + "\xff" + `1`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":`, "/a1"), ""},
|
||||
},
|
||||
wantOut: `{"a1"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/c1-"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : "b1" , "c` + "\xff" + `1" : "d1" } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : "b1" , "c`, ""), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{String("b1"), nil, "/a1"},
|
||||
{Value(` "c` + "\xff" + `1" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1": "c`, ""), "/a1"},
|
||||
{String(`c` + "\xff" + `1`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1":`, ""), "/a1"},
|
||||
},
|
||||
wantOut: `{"a1":"b1"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/c1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : "b1" , "c1" : "d` + "\xff" + `1" } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : "b1" , "c1" : "d`, "/c1"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{String("b1"), nil, "/a1"},
|
||||
{String("c1"), nil, "/c1"},
|
||||
{Value(` "d` + "\xff" + `1" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1":"c1": "d`, "/c1"), "/c1"},
|
||||
{String(`d` + "\xff" + `1`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1":"c1":`, "/c1"), "/c1"},
|
||||
},
|
||||
wantOut: `{"a1":"b1","c1"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/a1/a2-"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : { "a` + "\xff" + `2" : "b2" } } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : { "a`, "/a1"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{Value(` { "a` + "\xff" + `2" : "b2" } `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1": { "a`, "/a1"), ""},
|
||||
{BeginObject, nil, "/a1"},
|
||||
{Value(` "a` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{ "a`, "/a1"), "/a1"},
|
||||
{String(`a` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{`, "/a1"), "/a1"},
|
||||
},
|
||||
wantOut: `{"a1":{`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/a1/a2"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : { "a2" : "b` + "\xff" + `2" } } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : { "a2" : "b`, "/a1/a2"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{Value(` { "a2" : "b` + "\xff" + `2" } `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1": { "a2" : "b`, "/a1/a2"), ""},
|
||||
{BeginObject, nil, "/a1"},
|
||||
{String("a2"), nil, "/a1/a2"},
|
||||
{Value(` "b` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{"a2": "b`, "/a1/a2"), "/a1/a2"},
|
||||
{String(`b` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{"a2":`, "/a1/a2"), "/a1/a2"},
|
||||
},
|
||||
wantOut: `{"a1":{"a2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/a1/c2-"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : { "a2" : "b2" , "c` + "\xff" + `2" : "d2" } } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : { "a2" : "b2" , "c`, "/a1"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{BeginObject, nil, "/a1"},
|
||||
{String("a2"), nil, "/a1/a2"},
|
||||
{String("b2"), nil, "/a1/a2"},
|
||||
{Value(` "c` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{"a2":"b2", "c`, "/a1"), "/a1/a2"},
|
||||
{String(`c` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{"a2":"b2",`, "/a1"), "/a1/a2"},
|
||||
},
|
||||
wantOut: `{"a1":{"a2":"b2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/a1/c2"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : { "a2" : "b2" , "c2" : "d` + "\xff" + `2" } } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : { "a2" : "b2" , "c2" : "d`, "/a1/c2"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{Value(` { "a2" : "b2" , "c2" : "d` + "\xff" + `2" } `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1": { "a2" : "b2" , "c2" : "d`, "/a1/c2"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a2"), nil, "/a1/a2"},
|
||||
{String("b2"), nil, "/a1/a2"},
|
||||
{String("c2"), nil, "/a1/c2"},
|
||||
{Value(` "d` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{"a2":"b2","c2": "d`, "/a1/c2"), "/a1/c2"},
|
||||
{String(`d` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":{"a2":"b2","c2":`, "/a1/c2"), "/a1/c2"},
|
||||
},
|
||||
wantOut: `{"a1":{"a2":"b2","c2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/1/a2"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ "a1" , { "a2" : "b` + "\xff" + `2" } ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ "a1" , { "a2" : "b`, "/1/a2"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{String("a1"), nil, "/0"},
|
||||
{Value(` { "a2" : "b` + "\xff" + `2" } `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1", { "a2" : "b`, "/1/a2"), ""},
|
||||
{BeginObject, nil, "/1"},
|
||||
{String("a2"), nil, "/1/a2"},
|
||||
{Value(` "b` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",{"a2": "b`, "/1/a2"), "/1/a2"},
|
||||
{String(`b` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`["a1",{"a2":`, "/1/a2"), "/1/a2"},
|
||||
},
|
||||
wantOut: `["a1",{"a2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/c1/1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` { "a1" : "b1" , "c1" : [ "a2" , "b` + "\xff" + `2" ] } `), E(jsonwire.ErrInvalidUTF8).withPos(` { "a1" : "b1" , "c1" : [ "a2" , "b`, "/c1/1"), ""},
|
||||
{BeginObject, nil, ""},
|
||||
{String("a1"), nil, "/a1"},
|
||||
{String("b1"), nil, "/a1"},
|
||||
{String("c1"), nil, "/c1"},
|
||||
{Value(` [ "a2" , "b` + "\xff" + `2" ] `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1","c1": [ "a2" , "b`, "/c1/1"), ""},
|
||||
{BeginArray, nil, "/c1"},
|
||||
{String("a2"), nil, "/c1/0"},
|
||||
{Value(` "b` + "\xff" + `2" `), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1","c1":["a2", "b`, "/c1/1"), "/c1/0"},
|
||||
{String(`b` + "\xff" + `2`), E(jsonwire.ErrInvalidUTF8).withPos(`{"a1":"b1","c1":["a2",`, "/c1/1"), "/c1/0"},
|
||||
},
|
||||
wantOut: `{"a1":"b1","c1":["a2"`,
|
||||
}, {
|
||||
name: jsontest.Name("ErrorPosition/0/a1/1/c3/1"),
|
||||
calls: []encoderMethodCall{
|
||||
{Value(` [ { "a1" : [ "a2" , { "a3" : "b3" , "c3" : [ "a4" , "b` + "\xff" + `4" ] } ] } ] `), E(jsonwire.ErrInvalidUTF8).withPos(` [ { "a1" : [ "a2" , { "a3" : "b3" , "c3" : [ "a4" , "b`, "/0/a1/1/c3/1"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{Value(` { "a1" : [ "a2" , { "a3" : "b3" , "c3" : [ "a4" , "b` + "\xff" + `4" ] } ] } `), E(jsonwire.ErrInvalidUTF8).withPos(`[ { "a1" : [ "a2" , { "a3" : "b3" , "c3" : [ "a4" , "b`, "/0/a1/1/c3/1"), ""},
|
||||
{BeginObject, nil, "/0"},
|
||||
{String("a1"), nil, "/0/a1"},
|
||||
{Value(` [ "a2" , { "a3" : "b3" , "c3" : [ "a4" , "b` + "\xff" + `4" ] } ] `), E(jsonwire.ErrInvalidUTF8).withPos(`[{"a1": [ "a2" , { "a3" : "b3" , "c3" : [ "a4" , "b`, "/0/a1/1/c3/1"), ""},
|
||||
{BeginArray, nil, ""},
|
||||
{String("a2"), nil, "/0/a1/0"},
|
||||
{Value(` { "a3" : "b3" , "c3" : [ "a4" , "b` + "\xff" + `4" ] } `), E(jsonwire.ErrInvalidUTF8).withPos(`[{"a1":["a2", { "a3" : "b3" , "c3" : [ "a4" , "b`, "/0/a1/1/c3/1"), ""},
|
||||
{BeginObject, nil, "/0/a1/1"},
|
||||
{String("a3"), nil, "/0/a1/1/a3"},
|
||||
{String("b3"), nil, "/0/a1/1/a3"},
|
||||
{String("c3"), nil, "/0/a1/1/c3"},
|
||||
{Value(` [ "a4" , "b` + "\xff" + `4" ] `), E(jsonwire.ErrInvalidUTF8).withPos(`[{"a1":["a2",{"a3":"b3","c3": [ "a4" , "b`, "/0/a1/1/c3/1"), ""},
|
||||
{BeginArray, nil, "/0/a1/1/c3"},
|
||||
{String("a4"), nil, "/0/a1/1/c3/0"},
|
||||
{Value(` "b` + "\xff" + `4" `), E(jsonwire.ErrInvalidUTF8).withPos(`[{"a1":["a2",{"a3":"b3","c3":["a4", "b`, "/0/a1/1/c3/1"), "/0/a1/1/c3/0"},
|
||||
{String(`b` + "\xff" + `4`), E(jsonwire.ErrInvalidUTF8).withPos(`[{"a1":["a2",{"a3":"b3","c3":["a4",`, "/0/a1/1/c3/1"), "/0/a1/1/c3/0"},
|
||||
},
|
||||
wantOut: `[{"a1":["a2",{"a3":"b3","c3":["a4"`,
|
||||
}}
|
||||
|
||||
// TestEncoderErrors test that Encoder errors occur when we expect and
|
||||
// leaves the Encoder in a consistent state.
|
||||
func TestEncoderErrors(t *testing.T) {
|
||||
for _, td := range encoderErrorTestdata {
|
||||
t.Run(path.Join(td.name.Name), func(t *testing.T) {
|
||||
testEncoderErrors(t, td.name.Where, td.opts, td.calls, td.wantOut)
|
||||
})
|
||||
}
|
||||
}
|
||||
func testEncoderErrors(t *testing.T, where jsontest.CasePos, opts []Options, calls []encoderMethodCall, wantOut string) {
|
||||
dst := new(bytes.Buffer)
|
||||
enc := NewEncoder(dst, opts...)
|
||||
for i, call := range calls {
|
||||
var gotErr error
|
||||
switch tokVal := call.in.(type) {
|
||||
case Token:
|
||||
gotErr = enc.WriteToken(tokVal)
|
||||
case Value:
|
||||
gotErr = enc.WriteValue(tokVal)
|
||||
}
|
||||
if !equalError(gotErr, call.wantErr) {
|
||||
t.Fatalf("%s: %d: error mismatch:\ngot %v\nwant %v", where, i, gotErr, call.wantErr)
|
||||
}
|
||||
if call.wantPointer != "" {
|
||||
gotPointer := enc.StackPointer()
|
||||
if gotPointer != call.wantPointer {
|
||||
t.Fatalf("%s: %d: Encoder.StackPointer = %s, want %s", where, i, gotPointer, call.wantPointer)
|
||||
}
|
||||
}
|
||||
}
|
||||
gotOut := dst.String() + string(enc.s.unflushedBuffer())
|
||||
if gotOut != wantOut {
|
||||
t.Fatalf("%s: output mismatch:\ngot %q\nwant %q", where, gotOut, wantOut)
|
||||
}
|
||||
gotOffset := int(enc.OutputOffset())
|
||||
wantOffset := len(wantOut)
|
||||
if gotOffset != wantOffset {
|
||||
t.Fatalf("%s: Encoder.OutputOffset = %v, want %v", where, gotOffset, wantOffset)
|
||||
}
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
// Copyright 2023 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"encoding/json/jsontext"
|
||||
"encoding/json/v2"
|
||||
)
|
||||
|
||||
// This example demonstrates the use of the [Encoder] and [Decoder] to
|
||||
// parse and modify JSON without unmarshaling it into a concrete Go type.
|
||||
func Example_stringReplace() {
|
||||
// Example input with non-idiomatic use of "Golang" instead of "Go".
|
||||
const input = `{
|
||||
"title": "Golang version 1 is released",
|
||||
"author": "Andrew Gerrand",
|
||||
"date": "2012-03-28",
|
||||
"text": "Today marks a major milestone in the development of the Golang programming language.",
|
||||
"otherArticles": [
|
||||
"Twelve Years of Golang",
|
||||
"The Laws of Reflection",
|
||||
"Learn Golang from your browser"
|
||||
]
|
||||
}`
|
||||
|
||||
// Using a Decoder and Encoder, we can parse through every token,
|
||||
// check and modify the token if necessary, and
|
||||
// write the token to the output.
|
||||
var replacements []jsontext.Pointer
|
||||
in := strings.NewReader(input)
|
||||
dec := jsontext.NewDecoder(in)
|
||||
out := new(bytes.Buffer)
|
||||
enc := jsontext.NewEncoder(out, jsontext.Multiline(true)) // expand for readability
|
||||
for {
|
||||
// Read a token from the input.
|
||||
tok, err := dec.ReadToken()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Check whether the token contains the string "Golang" and
|
||||
// replace each occurrence with "Go" instead.
|
||||
if tok.Kind() == '"' && strings.Contains(tok.String(), "Golang") {
|
||||
replacements = append(replacements, dec.StackPointer())
|
||||
tok = jsontext.String(strings.ReplaceAll(tok.String(), "Golang", "Go"))
|
||||
}
|
||||
|
||||
// Write the (possibly modified) token to the output.
|
||||
if err := enc.WriteToken(tok); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Print the list of replacements and the adjusted JSON output.
|
||||
if len(replacements) > 0 {
|
||||
fmt.Println(`Replaced "Golang" with "Go" in:`)
|
||||
for _, where := range replacements {
|
||||
fmt.Println("\t" + where)
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
fmt.Println("Result:", out.String())
|
||||
|
||||
// Output:
|
||||
// Replaced "Golang" with "Go" in:
|
||||
// /title
|
||||
// /text
|
||||
// /otherArticles/0
|
||||
// /otherArticles/2
|
||||
//
|
||||
// Result: {
|
||||
// "title": "Go version 1 is released",
|
||||
// "author": "Andrew Gerrand",
|
||||
// "date": "2012-03-28",
|
||||
// "text": "Today marks a major milestone in the development of the Go programming language.",
|
||||
// "otherArticles": [
|
||||
// "Twelve Years of Go",
|
||||
// "The Laws of Reflection",
|
||||
// "Learn Go from your browser"
|
||||
// ]
|
||||
// }
|
||||
}
|
||||
|
||||
// Directly embedding JSON within HTML requires special handling for safety.
|
||||
// Escape certain runes to prevent JSON directly treated as HTML
|
||||
// from being able to perform <script> injection.
|
||||
//
|
||||
// This example shows how to obtain equivalent behavior provided by the
|
||||
// v1 [encoding/json] package that is no longer directly supported by this package.
|
||||
// Newly written code that intermix JSON and HTML should instead be using the
|
||||
// [github.com/google/safehtml] module for safety purposes.
|
||||
func ExampleEscapeForHTML() {
|
||||
page := struct {
|
||||
Title string
|
||||
Body string
|
||||
}{
|
||||
Title: "Example Embedded Javascript",
|
||||
Body: `<script> console.log("Hello, world!"); </script>`,
|
||||
}
|
||||
|
||||
b, err := json.Marshal(&page,
|
||||
// Escape certain runes within a JSON string so that
|
||||
// JSON will be safe to directly embed inside HTML.
|
||||
jsontext.EscapeForHTML(true),
|
||||
jsontext.EscapeForJS(true),
|
||||
jsontext.Multiline(true)) // expand for readability
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println(string(b))
|
||||
|
||||
// Output:
|
||||
// {
|
||||
// "Title": "Example Embedded Javascript",
|
||||
// "Body": "\u003cscript\u003e console.log(\"Hello, world!\"); \u003c/script\u003e"
|
||||
// }
|
||||
}
|
||||
@@ -1,236 +0,0 @@
|
||||
// Copyright 2023 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"math/rand"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"encoding/json/internal/jsontest"
|
||||
)
|
||||
|
||||
func FuzzCoder(f *testing.F) {
|
||||
// Add a number of inputs to the corpus including valid and invalid data.
|
||||
for _, td := range coderTestdata {
|
||||
f.Add(int64(0), []byte(td.in))
|
||||
}
|
||||
for _, td := range decoderErrorTestdata {
|
||||
f.Add(int64(0), []byte(td.in))
|
||||
}
|
||||
for _, td := range encoderErrorTestdata {
|
||||
f.Add(int64(0), []byte(td.wantOut))
|
||||
}
|
||||
for _, td := range jsontest.Data {
|
||||
f.Add(int64(0), td.Data())
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, seed int64, b []byte) {
|
||||
var tokVals []tokOrVal
|
||||
rn := rand.NewSource(seed)
|
||||
|
||||
// Read a sequence of tokens or values. Skip the test for any errors
|
||||
// since we expect this with randomly generated fuzz inputs.
|
||||
src := bytes.NewReader(b)
|
||||
dec := NewDecoder(src)
|
||||
for {
|
||||
if rn.Int63()%8 > 0 {
|
||||
tok, err := dec.ReadToken()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
t.Skipf("Decoder.ReadToken error: %v", err)
|
||||
}
|
||||
tokVals = append(tokVals, tok.Clone())
|
||||
} else {
|
||||
val, err := dec.ReadValue()
|
||||
if err != nil {
|
||||
expectError := dec.PeekKind() == '}' || dec.PeekKind() == ']'
|
||||
if expectError && errors.As(err, new(*SyntacticError)) {
|
||||
continue
|
||||
}
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
t.Skipf("Decoder.ReadValue error: %v", err)
|
||||
}
|
||||
tokVals = append(tokVals, append(zeroValue, val...))
|
||||
}
|
||||
}
|
||||
|
||||
// Write a sequence of tokens or values. Fail the test for any errors
|
||||
// since the previous stage guarantees that the input is valid.
|
||||
dst := new(bytes.Buffer)
|
||||
enc := NewEncoder(dst)
|
||||
for _, tokVal := range tokVals {
|
||||
switch tokVal := tokVal.(type) {
|
||||
case Token:
|
||||
if err := enc.WriteToken(tokVal); err != nil {
|
||||
t.Fatalf("Encoder.WriteToken error: %v", err)
|
||||
}
|
||||
case Value:
|
||||
if err := enc.WriteValue(tokVal); err != nil {
|
||||
t.Fatalf("Encoder.WriteValue error: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Encoded output and original input must decode to the same thing.
|
||||
var got, want []Token
|
||||
for dec := NewDecoder(bytes.NewReader(b)); dec.PeekKind() > 0; {
|
||||
tok, err := dec.ReadToken()
|
||||
if err != nil {
|
||||
t.Fatalf("Decoder.ReadToken error: %v", err)
|
||||
}
|
||||
got = append(got, tok.Clone())
|
||||
}
|
||||
for dec := NewDecoder(dst); dec.PeekKind() > 0; {
|
||||
tok, err := dec.ReadToken()
|
||||
if err != nil {
|
||||
t.Fatalf("Decoder.ReadToken error: %v", err)
|
||||
}
|
||||
want = append(want, tok.Clone())
|
||||
}
|
||||
if !equalTokens(got, want) {
|
||||
t.Fatalf("mismatching output:\ngot %v\nwant %v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func FuzzResumableDecoder(f *testing.F) {
|
||||
for _, td := range resumableDecoderTestdata {
|
||||
f.Add(int64(0), []byte(td))
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, seed int64, b []byte) {
|
||||
rn := rand.NewSource(seed)
|
||||
|
||||
// Regardless of how many bytes the underlying io.Reader produces,
|
||||
// the provided tokens, values, and errors should always be identical.
|
||||
t.Run("ReadToken", func(t *testing.T) {
|
||||
decGot := NewDecoder(&FaultyBuffer{B: b, MaxBytes: 8, Rand: rn})
|
||||
decWant := NewDecoder(bytes.NewReader(b))
|
||||
gotTok, gotErr := decGot.ReadToken()
|
||||
wantTok, wantErr := decWant.ReadToken()
|
||||
if gotTok.String() != wantTok.String() || !equalError(gotErr, wantErr) {
|
||||
t.Errorf("Decoder.ReadToken = (%v, %v), want (%v, %v)", gotTok, gotErr, wantTok, wantErr)
|
||||
}
|
||||
})
|
||||
t.Run("ReadValue", func(t *testing.T) {
|
||||
decGot := NewDecoder(&FaultyBuffer{B: b, MaxBytes: 8, Rand: rn})
|
||||
decWant := NewDecoder(bytes.NewReader(b))
|
||||
gotVal, gotErr := decGot.ReadValue()
|
||||
wantVal, wantErr := decWant.ReadValue()
|
||||
if !slices.Equal(gotVal, wantVal) || !equalError(gotErr, wantErr) {
|
||||
t.Errorf("Decoder.ReadValue = (%s, %v), want (%s, %v)", gotVal, gotErr, wantVal, wantErr)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func FuzzValueFormat(f *testing.F) {
|
||||
for _, td := range valueTestdata {
|
||||
f.Add(int64(0), []byte(td.in))
|
||||
}
|
||||
|
||||
// isValid reports whether b is valid according to the specified options.
|
||||
isValid := func(b []byte, opts ...Options) bool {
|
||||
d := NewDecoder(bytes.NewReader(b), opts...)
|
||||
_, errVal := d.ReadValue()
|
||||
_, errEOF := d.ReadToken()
|
||||
return errVal == nil && errEOF == io.EOF
|
||||
}
|
||||
|
||||
// stripWhitespace removes all JSON whitespace characters from the input.
|
||||
stripWhitespace := func(in []byte) (out []byte) {
|
||||
out = make([]byte, 0, len(in))
|
||||
for _, c := range in {
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
default:
|
||||
out = append(out, c)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
allOptions := []Options{
|
||||
AllowDuplicateNames(true),
|
||||
AllowInvalidUTF8(true),
|
||||
EscapeForHTML(true),
|
||||
EscapeForJS(true),
|
||||
PreserveRawStrings(true),
|
||||
CanonicalizeRawInts(true),
|
||||
CanonicalizeRawFloats(true),
|
||||
ReorderRawObjects(true),
|
||||
SpaceAfterColon(true),
|
||||
SpaceAfterComma(true),
|
||||
Multiline(true),
|
||||
WithIndent("\t"),
|
||||
WithIndentPrefix(" "),
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, seed int64, b []byte) {
|
||||
validRFC7159 := isValid(b, AllowInvalidUTF8(true), AllowDuplicateNames(true))
|
||||
validRFC8259 := isValid(b, AllowInvalidUTF8(false), AllowDuplicateNames(true))
|
||||
validRFC7493 := isValid(b, AllowInvalidUTF8(false), AllowDuplicateNames(false))
|
||||
switch {
|
||||
case !validRFC7159 && validRFC8259:
|
||||
t.Errorf("invalid input per RFC 7159 implies invalid per RFC 8259")
|
||||
case !validRFC8259 && validRFC7493:
|
||||
t.Errorf("invalid input per RFC 8259 implies invalid per RFC 7493")
|
||||
}
|
||||
|
||||
gotValid := Value(b).IsValid()
|
||||
wantValid := validRFC7493
|
||||
if gotValid != wantValid {
|
||||
t.Errorf("Value.IsValid = %v, want %v", gotValid, wantValid)
|
||||
}
|
||||
|
||||
gotCompacted := Value(string(b))
|
||||
gotCompactOk := gotCompacted.Compact() == nil
|
||||
wantCompactOk := validRFC7159
|
||||
if !bytes.Equal(stripWhitespace(gotCompacted), stripWhitespace(b)) {
|
||||
t.Errorf("stripWhitespace(Value.Compact) = %s, want %s", stripWhitespace(gotCompacted), stripWhitespace(b))
|
||||
}
|
||||
if gotCompactOk != wantCompactOk {
|
||||
t.Errorf("Value.Compact success mismatch: got %v, want %v", gotCompactOk, wantCompactOk)
|
||||
}
|
||||
|
||||
gotIndented := Value(string(b))
|
||||
gotIndentOk := gotIndented.Indent() == nil
|
||||
wantIndentOk := validRFC7159
|
||||
if !bytes.Equal(stripWhitespace(gotIndented), stripWhitespace(b)) {
|
||||
t.Errorf("stripWhitespace(Value.Indent) = %s, want %s", stripWhitespace(gotIndented), stripWhitespace(b))
|
||||
}
|
||||
if gotIndentOk != wantIndentOk {
|
||||
t.Errorf("Value.Indent success mismatch: got %v, want %v", gotIndentOk, wantIndentOk)
|
||||
}
|
||||
|
||||
gotCanonicalized := Value(string(b))
|
||||
gotCanonicalizeOk := gotCanonicalized.Canonicalize() == nil
|
||||
wantCanonicalizeOk := validRFC7493
|
||||
if gotCanonicalizeOk != wantCanonicalizeOk {
|
||||
t.Errorf("Value.Canonicalize success mismatch: got %v, want %v", gotCanonicalizeOk, wantCanonicalizeOk)
|
||||
}
|
||||
|
||||
// Random options should not result in a panic.
|
||||
var opts []Options
|
||||
rn := rand.New(rand.NewSource(seed))
|
||||
for _, opt := range allOptions {
|
||||
if rn.Intn(len(allOptions)/4) == 0 {
|
||||
opts = append(opts, opt)
|
||||
}
|
||||
}
|
||||
v := Value(b)
|
||||
v.Format(opts...) // should not panic
|
||||
})
|
||||
}
|
||||
@@ -1,396 +0,0 @@
|
||||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func TestPointer(t *testing.T) {
|
||||
tests := []struct {
|
||||
in Pointer
|
||||
wantParent Pointer
|
||||
wantLast string
|
||||
wantTokens []string
|
||||
wantValid bool
|
||||
}{
|
||||
{"", "", "", nil, true},
|
||||
{"a", "", "a", []string{"a"}, false},
|
||||
{"~", "", "~", []string{"~"}, false},
|
||||
{"/a", "", "a", []string{"a"}, true},
|
||||
{"/foo/bar", "/foo", "bar", []string{"foo", "bar"}, true},
|
||||
{"///", "//", "", []string{"", "", ""}, true},
|
||||
{"/~0~1", "", "~/", []string{"~/"}, true},
|
||||
{"/\xde\xad\xbe\xef", "", "\xde\xad\xbe\xef", []string{"\xde\xad\xbe\xef"}, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
if got := tt.in.Parent(); got != tt.wantParent {
|
||||
t.Errorf("Pointer(%q).Parent = %q, want %q", tt.in, got, tt.wantParent)
|
||||
}
|
||||
if got := tt.in.LastToken(); got != tt.wantLast {
|
||||
t.Errorf("Pointer(%q).Last = %q, want %q", tt.in, got, tt.wantLast)
|
||||
}
|
||||
if strings.HasPrefix(string(tt.in), "/") {
|
||||
wantRoundtrip := tt.in
|
||||
if !utf8.ValidString(string(wantRoundtrip)) {
|
||||
// Replace bytes of invalid UTF-8 with Unicode replacement character.
|
||||
wantRoundtrip = Pointer([]rune(wantRoundtrip))
|
||||
}
|
||||
if got := tt.in.Parent().AppendToken(tt.in.LastToken()); got != wantRoundtrip {
|
||||
t.Errorf("Pointer(%q).Parent().AppendToken(LastToken()) = %q, want %q", tt.in, got, tt.in)
|
||||
}
|
||||
in := tt.in
|
||||
for {
|
||||
if (in + "x").Contains(tt.in) {
|
||||
t.Errorf("Pointer(%q).Contains(%q) = true, want false", in+"x", tt.in)
|
||||
}
|
||||
if !in.Contains(tt.in) {
|
||||
t.Errorf("Pointer(%q).Contains(%q) = false, want true", in, tt.in)
|
||||
}
|
||||
if in == in.Parent() {
|
||||
break
|
||||
}
|
||||
in = in.Parent()
|
||||
}
|
||||
}
|
||||
if got := slices.Collect(tt.in.Tokens()); !slices.Equal(got, tt.wantTokens) {
|
||||
t.Errorf("Pointer(%q).Tokens = %q, want %q", tt.in, got, tt.wantTokens)
|
||||
}
|
||||
if got := tt.in.IsValid(); got != tt.wantValid {
|
||||
t.Errorf("Pointer(%q).IsValid = %v, want %v", tt.in, got, tt.wantValid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestStateMachine(t *testing.T) {
|
||||
// To test a state machine, we pass an ordered sequence of operations and
|
||||
// check whether the current state is as expected.
|
||||
// The operation type is a union type of various possible operations,
|
||||
// which either call mutating methods on the state machine or
|
||||
// call accessor methods on state machine and verify the results.
|
||||
type operation any
|
||||
type (
|
||||
// stackLengths checks the results of stateEntry.length accessors.
|
||||
stackLengths []int64
|
||||
|
||||
// appendTokens is sequence of token kinds to append where
|
||||
// none of them are expected to fail.
|
||||
//
|
||||
// For example: `[nft]` is equivalent to the following sequence:
|
||||
//
|
||||
// pushArray()
|
||||
// appendLiteral()
|
||||
// appendString()
|
||||
// appendNumber()
|
||||
// popArray()
|
||||
//
|
||||
appendTokens string
|
||||
|
||||
// appendToken is a single token kind to append with the expected error.
|
||||
appendToken struct {
|
||||
kind Kind
|
||||
want error
|
||||
}
|
||||
|
||||
// needDelim checks the result of the needDelim accessor.
|
||||
needDelim struct {
|
||||
next Kind
|
||||
want byte
|
||||
}
|
||||
)
|
||||
|
||||
// Each entry is a sequence of tokens to pass to the state machine.
|
||||
tests := []struct {
|
||||
label string
|
||||
ops []operation
|
||||
}{{
|
||||
"TopLevelValues",
|
||||
[]operation{
|
||||
stackLengths{0},
|
||||
needDelim{'n', 0},
|
||||
appendTokens(`nft`),
|
||||
stackLengths{3},
|
||||
needDelim{'"', 0},
|
||||
appendTokens(`"0[]{}`),
|
||||
stackLengths{7},
|
||||
},
|
||||
}, {
|
||||
"ArrayValues",
|
||||
[]operation{
|
||||
stackLengths{0},
|
||||
needDelim{'[', 0},
|
||||
appendTokens(`[`),
|
||||
stackLengths{1, 0},
|
||||
needDelim{'n', 0},
|
||||
appendTokens(`nft`),
|
||||
stackLengths{1, 3},
|
||||
needDelim{'"', ','},
|
||||
appendTokens(`"0[]{}`),
|
||||
stackLengths{1, 7},
|
||||
needDelim{']', 0},
|
||||
appendTokens(`]`),
|
||||
stackLengths{1},
|
||||
},
|
||||
}, {
|
||||
"ObjectValues",
|
||||
[]operation{
|
||||
stackLengths{0},
|
||||
needDelim{'{', 0},
|
||||
appendTokens(`{`),
|
||||
stackLengths{1, 0},
|
||||
needDelim{'"', 0},
|
||||
appendTokens(`"`),
|
||||
stackLengths{1, 1},
|
||||
needDelim{'n', ':'},
|
||||
appendTokens(`n`),
|
||||
stackLengths{1, 2},
|
||||
needDelim{'"', ','},
|
||||
appendTokens(`"f"t`),
|
||||
stackLengths{1, 6},
|
||||
appendTokens(`"""0"[]"{}`),
|
||||
stackLengths{1, 14},
|
||||
needDelim{'}', 0},
|
||||
appendTokens(`}`),
|
||||
stackLengths{1},
|
||||
},
|
||||
}, {
|
||||
"ObjectCardinality",
|
||||
[]operation{
|
||||
appendTokens(`{`),
|
||||
|
||||
// Appending any kind other than string for object name is an error.
|
||||
appendToken{'n', ErrNonStringName},
|
||||
appendToken{'f', ErrNonStringName},
|
||||
appendToken{'t', ErrNonStringName},
|
||||
appendToken{'0', ErrNonStringName},
|
||||
appendToken{'{', ErrNonStringName},
|
||||
appendToken{'[', ErrNonStringName},
|
||||
appendTokens(`"`),
|
||||
|
||||
// Appending '}' without first appending any value is an error.
|
||||
appendToken{'}', errMissingValue},
|
||||
appendTokens(`"`),
|
||||
|
||||
appendTokens(`}`),
|
||||
},
|
||||
}, {
|
||||
"MismatchingDelims",
|
||||
[]operation{
|
||||
appendToken{'}', errMismatchDelim}, // appending '}' without preceding '{'
|
||||
appendTokens(`[[{`),
|
||||
appendToken{']', errMismatchDelim}, // appending ']' that mismatches preceding '{'
|
||||
appendTokens(`}]`),
|
||||
appendToken{'}', errMismatchDelim}, // appending '}' that mismatches preceding '['
|
||||
appendTokens(`]`),
|
||||
appendToken{']', errMismatchDelim}, // appending ']' without preceding '['
|
||||
},
|
||||
}}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.label, func(t *testing.T) {
|
||||
// Flatten appendTokens to sequence of appendToken entries.
|
||||
var ops []operation
|
||||
for _, op := range tt.ops {
|
||||
if toks, ok := op.(appendTokens); ok {
|
||||
for _, k := range []byte(toks) {
|
||||
ops = append(ops, appendToken{Kind(k), nil})
|
||||
}
|
||||
continue
|
||||
}
|
||||
ops = append(ops, op)
|
||||
}
|
||||
|
||||
// Append each token to the state machine and check the output.
|
||||
var state stateMachine
|
||||
state.reset()
|
||||
var sequence []Kind
|
||||
for _, op := range ops {
|
||||
switch op := op.(type) {
|
||||
case stackLengths:
|
||||
var got []int64
|
||||
for i := range state.Depth() {
|
||||
e := state.index(i)
|
||||
got = append(got, e.Length())
|
||||
}
|
||||
want := []int64(op)
|
||||
if !slices.Equal(got, want) {
|
||||
t.Fatalf("%s: stack lengths mismatch:\ngot %v\nwant %v", sequence, got, want)
|
||||
}
|
||||
case appendToken:
|
||||
got := state.append(op.kind)
|
||||
if !equalError(got, op.want) {
|
||||
t.Fatalf("%s: append('%c') = %v, want %v", sequence, op.kind, got, op.want)
|
||||
}
|
||||
if got == nil {
|
||||
sequence = append(sequence, op.kind)
|
||||
}
|
||||
case needDelim:
|
||||
if got := state.needDelim(op.next); got != op.want {
|
||||
t.Fatalf("%s: needDelim('%c') = '%c', want '%c'", sequence, op.next, got, op.want)
|
||||
}
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown operation: %T", op))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// append is a thin wrapper over the other append, pop, or push methods
|
||||
// based on the token kind.
|
||||
func (s *stateMachine) append(k Kind) error {
|
||||
switch k {
|
||||
case 'n', 'f', 't':
|
||||
return s.appendLiteral()
|
||||
case '"':
|
||||
return s.appendString()
|
||||
case '0':
|
||||
return s.appendNumber()
|
||||
case '{':
|
||||
return s.pushObject()
|
||||
case '}':
|
||||
return s.popObject()
|
||||
case '[':
|
||||
return s.pushArray()
|
||||
case ']':
|
||||
return s.popArray()
|
||||
default:
|
||||
panic(fmt.Sprintf("invalid token kind: '%c'", k))
|
||||
}
|
||||
}
|
||||
|
||||
func TestObjectNamespace(t *testing.T) {
|
||||
type operation any
|
||||
type (
|
||||
insert struct {
|
||||
name string
|
||||
wantInserted bool
|
||||
}
|
||||
removeLast struct{}
|
||||
)
|
||||
|
||||
// Sequence of insert operations to perform (order matters).
|
||||
ops := []operation{
|
||||
insert{`""`, true},
|
||||
removeLast{},
|
||||
insert{`""`, true},
|
||||
insert{`""`, false},
|
||||
|
||||
// Test insertion of the same name with different formatting.
|
||||
insert{`"alpha"`, true},
|
||||
insert{`"ALPHA"`, true}, // case-sensitive matching
|
||||
insert{`"alpha"`, false},
|
||||
insert{`"\u0061\u006c\u0070\u0068\u0061"`, false}, // unescapes to "alpha"
|
||||
removeLast{}, // removes "ALPHA"
|
||||
insert{`"alpha"`, false},
|
||||
removeLast{}, // removes "alpha"
|
||||
insert{`"alpha"`, true},
|
||||
removeLast{},
|
||||
|
||||
// Bulk insert simple names.
|
||||
insert{`"alpha"`, true},
|
||||
insert{`"bravo"`, true},
|
||||
insert{`"charlie"`, true},
|
||||
insert{`"delta"`, true},
|
||||
insert{`"echo"`, true},
|
||||
insert{`"foxtrot"`, true},
|
||||
insert{`"golf"`, true},
|
||||
insert{`"hotel"`, true},
|
||||
insert{`"india"`, true},
|
||||
insert{`"juliet"`, true},
|
||||
insert{`"kilo"`, true},
|
||||
insert{`"lima"`, true},
|
||||
insert{`"mike"`, true},
|
||||
insert{`"november"`, true},
|
||||
insert{`"oscar"`, true},
|
||||
insert{`"papa"`, true},
|
||||
insert{`"quebec"`, true},
|
||||
insert{`"romeo"`, true},
|
||||
insert{`"sierra"`, true},
|
||||
insert{`"tango"`, true},
|
||||
insert{`"uniform"`, true},
|
||||
insert{`"victor"`, true},
|
||||
insert{`"whiskey"`, true},
|
||||
insert{`"xray"`, true},
|
||||
insert{`"yankee"`, true},
|
||||
insert{`"zulu"`, true},
|
||||
|
||||
// Test insertion of invalid UTF-8.
|
||||
insert{`"` + "\ufffd" + `"`, true},
|
||||
insert{`"` + "\ufffd" + `"`, false},
|
||||
insert{`"\ufffd"`, false}, // unescapes to Unicode replacement character
|
||||
insert{`"\uFFFD"`, false}, // unescapes to Unicode replacement character
|
||||
insert{`"` + "\xff" + `"`, false}, // mangles as Unicode replacement character
|
||||
removeLast{},
|
||||
insert{`"` + "\ufffd" + `"`, true},
|
||||
|
||||
// Test insertion of unicode characters.
|
||||
insert{`"☺☻☹"`, true},
|
||||
insert{`"☺☻☹"`, false},
|
||||
removeLast{},
|
||||
insert{`"☺☻☹"`, true},
|
||||
}
|
||||
|
||||
// Execute the sequence of operations twice:
|
||||
// 1) on a fresh namespace and 2) on a namespace that has been reset.
|
||||
var ns objectNamespace
|
||||
wantNames := []string{}
|
||||
for _, reset := range []bool{false, true} {
|
||||
if reset {
|
||||
ns.reset()
|
||||
wantNames = nil
|
||||
}
|
||||
|
||||
// Execute the operations and ensure the state is consistent.
|
||||
for i, op := range ops {
|
||||
switch op := op.(type) {
|
||||
case insert:
|
||||
gotInserted := ns.insertQuoted([]byte(op.name), false)
|
||||
if gotInserted != op.wantInserted {
|
||||
t.Fatalf("%d: objectNamespace{%v}.insert(%v) = %v, want %v", i, strings.Join(wantNames, " "), op.name, gotInserted, op.wantInserted)
|
||||
}
|
||||
if gotInserted {
|
||||
b, _ := AppendUnquote(nil, []byte(op.name))
|
||||
wantNames = append(wantNames, string(b))
|
||||
}
|
||||
case removeLast:
|
||||
ns.removeLast()
|
||||
wantNames = wantNames[:len(wantNames)-1]
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown operation: %T", op))
|
||||
}
|
||||
|
||||
// Check that the namespace is consistent.
|
||||
gotNames := []string{}
|
||||
for i := range ns.length() {
|
||||
gotNames = append(gotNames, string(ns.getUnquoted(i)))
|
||||
}
|
||||
if !slices.Equal(gotNames, wantNames) {
|
||||
t.Fatalf("%d: objectNamespace = {%v}, want {%v}", i, strings.Join(gotNames, " "), strings.Join(wantNames, " "))
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that we have not switched to using a Go map.
|
||||
if ns.mapNames != nil {
|
||||
t.Errorf("objectNamespace.mapNames = non-nil, want nil")
|
||||
}
|
||||
|
||||
// Insert a large number of names.
|
||||
for i := range 64 {
|
||||
ns.InsertUnquoted([]byte(fmt.Sprintf(`name%d`, i)))
|
||||
}
|
||||
|
||||
// Verify that we did switch to using a Go map.
|
||||
if ns.mapNames == nil {
|
||||
t.Errorf("objectNamespace.mapNames = nil, want non-nil")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,168 +0,0 @@
|
||||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTokenStringAllocations(t *testing.T) {
|
||||
if testing.CoverMode() != "" {
|
||||
t.Skip("coverage mode breaks the compiler optimization this depends on")
|
||||
}
|
||||
|
||||
tok := rawToken(`"hello"`)
|
||||
var m map[string]bool
|
||||
got := int(testing.AllocsPerRun(10, func() {
|
||||
// This function uses tok.String() is a non-escaping manner
|
||||
// (i.e., looking it up in a Go map). It should not allocate.
|
||||
if m[tok.String()] {
|
||||
panic("never executed")
|
||||
}
|
||||
}))
|
||||
if got > 0 {
|
||||
t.Errorf("Token.String allocated %d times, want 0", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTokenAccessors(t *testing.T) {
|
||||
type token struct {
|
||||
Bool bool
|
||||
String string
|
||||
Float float64
|
||||
Int int64
|
||||
Uint uint64
|
||||
Kind Kind
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
in Token
|
||||
want token
|
||||
}{
|
||||
{Token{}, token{String: "<invalid jsontext.Token>"}},
|
||||
{Null, token{String: "null", Kind: 'n'}},
|
||||
{False, token{Bool: false, String: "false", Kind: 'f'}},
|
||||
{True, token{Bool: true, String: "true", Kind: 't'}},
|
||||
{Bool(false), token{Bool: false, String: "false", Kind: 'f'}},
|
||||
{Bool(true), token{Bool: true, String: "true", Kind: 't'}},
|
||||
{BeginObject, token{String: "{", Kind: '{'}},
|
||||
{EndObject, token{String: "}", Kind: '}'}},
|
||||
{BeginArray, token{String: "[", Kind: '['}},
|
||||
{EndArray, token{String: "]", Kind: ']'}},
|
||||
{String(""), token{String: "", Kind: '"'}},
|
||||
{String("hello, world!"), token{String: "hello, world!", Kind: '"'}},
|
||||
{rawToken(`"hello, world!"`), token{String: "hello, world!", Kind: '"'}},
|
||||
{Float(0), token{String: "0", Float: 0, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{Float(math.Copysign(0, -1)), token{String: "-0", Float: math.Copysign(0, -1), Int: 0, Uint: 0, Kind: '0'}},
|
||||
{Float(math.NaN()), token{String: "NaN", Float: math.NaN(), Int: 0, Uint: 0, Kind: '"'}},
|
||||
{Float(math.Inf(+1)), token{String: "Infinity", Float: math.Inf(+1), Kind: '"'}},
|
||||
{Float(math.Inf(-1)), token{String: "-Infinity", Float: math.Inf(-1), Kind: '"'}},
|
||||
{Int(minInt64), token{String: "-9223372036854775808", Float: minInt64, Int: minInt64, Uint: minUint64, Kind: '0'}},
|
||||
{Int(minInt64 + 1), token{String: "-9223372036854775807", Float: minInt64 + 1, Int: minInt64 + 1, Uint: minUint64, Kind: '0'}},
|
||||
{Int(-1), token{String: "-1", Float: -1, Int: -1, Uint: minUint64, Kind: '0'}},
|
||||
{Int(0), token{String: "0", Float: 0, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{Int(+1), token{String: "1", Float: +1, Int: +1, Uint: +1, Kind: '0'}},
|
||||
{Int(maxInt64 - 1), token{String: "9223372036854775806", Float: maxInt64 - 1, Int: maxInt64 - 1, Uint: maxInt64 - 1, Kind: '0'}},
|
||||
{Int(maxInt64), token{String: "9223372036854775807", Float: maxInt64, Int: maxInt64, Uint: maxInt64, Kind: '0'}},
|
||||
{Uint(minUint64), token{String: "0", Kind: '0'}},
|
||||
{Uint(minUint64 + 1), token{String: "1", Float: minUint64 + 1, Int: minUint64 + 1, Uint: minUint64 + 1, Kind: '0'}},
|
||||
{Uint(maxUint64 - 1), token{String: "18446744073709551614", Float: maxUint64 - 1, Int: maxInt64, Uint: maxUint64 - 1, Kind: '0'}},
|
||||
{Uint(maxUint64), token{String: "18446744073709551615", Float: maxUint64, Int: maxInt64, Uint: maxUint64, Kind: '0'}},
|
||||
{rawToken(`-0`), token{String: "-0", Float: math.Copysign(0, -1), Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`1e1000`), token{String: "1e1000", Float: math.MaxFloat64, Int: maxInt64, Uint: maxUint64, Kind: '0'}},
|
||||
{rawToken(`-1e1000`), token{String: "-1e1000", Float: -math.MaxFloat64, Int: minInt64, Uint: minUint64, Kind: '0'}},
|
||||
{rawToken(`0.1`), token{String: "0.1", Float: 0.1, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`0.5`), token{String: "0.5", Float: 0.5, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`0.9`), token{String: "0.9", Float: 0.9, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`1.1`), token{String: "1.1", Float: 1.1, Int: 1, Uint: 1, Kind: '0'}},
|
||||
{rawToken(`-0.1`), token{String: "-0.1", Float: -0.1, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`-0.5`), token{String: "-0.5", Float: -0.5, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`-0.9`), token{String: "-0.9", Float: -0.9, Int: 0, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`-1.1`), token{String: "-1.1", Float: -1.1, Int: -1, Uint: 0, Kind: '0'}},
|
||||
{rawToken(`99999999999999999999`), token{String: "99999999999999999999", Float: 1e20 - 1, Int: maxInt64, Uint: maxUint64, Kind: '0'}},
|
||||
{rawToken(`-99999999999999999999`), token{String: "-99999999999999999999", Float: -1e20 - 1, Int: minInt64, Uint: minUint64, Kind: '0'}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run("", func(t *testing.T) {
|
||||
got := token{
|
||||
Bool: func() bool {
|
||||
defer func() { recover() }()
|
||||
return tt.in.Bool()
|
||||
}(),
|
||||
String: tt.in.String(),
|
||||
Float: func() float64 {
|
||||
defer func() { recover() }()
|
||||
return tt.in.Float()
|
||||
}(),
|
||||
Int: func() int64 {
|
||||
defer func() { recover() }()
|
||||
return tt.in.Int()
|
||||
}(),
|
||||
Uint: func() uint64 {
|
||||
defer func() { recover() }()
|
||||
return tt.in.Uint()
|
||||
}(),
|
||||
Kind: tt.in.Kind(),
|
||||
}
|
||||
|
||||
if got.Bool != tt.want.Bool {
|
||||
t.Errorf("Token(%s).Bool() = %v, want %v", tt.in, got.Bool, tt.want.Bool)
|
||||
}
|
||||
if got.String != tt.want.String {
|
||||
t.Errorf("Token(%s).String() = %v, want %v", tt.in, got.String, tt.want.String)
|
||||
}
|
||||
if math.Float64bits(got.Float) != math.Float64bits(tt.want.Float) {
|
||||
t.Errorf("Token(%s).Float() = %v, want %v", tt.in, got.Float, tt.want.Float)
|
||||
}
|
||||
if got.Int != tt.want.Int {
|
||||
t.Errorf("Token(%s).Int() = %v, want %v", tt.in, got.Int, tt.want.Int)
|
||||
}
|
||||
if got.Uint != tt.want.Uint {
|
||||
t.Errorf("Token(%s).Uint() = %v, want %v", tt.in, got.Uint, tt.want.Uint)
|
||||
}
|
||||
if got.Kind != tt.want.Kind {
|
||||
t.Errorf("Token(%s).Kind() = %v, want %v", tt.in, got.Kind, tt.want.Kind)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTokenClone(t *testing.T) {
|
||||
tests := []struct {
|
||||
in Token
|
||||
wantExactRaw bool
|
||||
}{
|
||||
{Token{}, true},
|
||||
{Null, true},
|
||||
{False, true},
|
||||
{True, true},
|
||||
{BeginObject, true},
|
||||
{EndObject, true},
|
||||
{BeginArray, true},
|
||||
{EndArray, true},
|
||||
{String("hello, world!"), true},
|
||||
{rawToken(`"hello, world!"`), false},
|
||||
{Float(3.14159), true},
|
||||
{rawToken(`3.14159`), false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run("", func(t *testing.T) {
|
||||
got := tt.in.Clone()
|
||||
if !reflect.DeepEqual(got, tt.in) {
|
||||
t.Errorf("Token(%s) == Token(%s).Clone() = false, want true", tt.in, tt.in)
|
||||
}
|
||||
gotExactRaw := got.raw == tt.in.raw
|
||||
if gotExactRaw != tt.wantExactRaw {
|
||||
t.Errorf("Token(%s).raw == Token(%s).Clone().raw = %v, want %v", tt.in, tt.in, gotExactRaw, tt.wantExactRaw)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
// Copyright 2020 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build goexperiment.jsonv2
|
||||
|
||||
package jsontext
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"encoding/json/internal/jsontest"
|
||||
"encoding/json/internal/jsonwire"
|
||||
)
|
||||
|
||||
type valueTestdataEntry struct {
|
||||
name jsontest.CaseName
|
||||
in string
|
||||
wantValid bool
|
||||
wantCompacted string
|
||||
wantCompactErr error // implies wantCompacted is in
|
||||
wantIndented string // wantCompacted if empty; uses "\t" for indent prefix and " " for indent
|
||||
wantIndentErr error // implies wantCompacted is in
|
||||
wantCanonicalized string // wantCompacted if empty
|
||||
wantCanonicalizeErr error // implies wantCompacted is in
|
||||
}
|
||||
|
||||
var valueTestdata = append(func() (out []valueTestdataEntry) {
|
||||
// Initialize valueTestdata from coderTestdata.
|
||||
for _, td := range coderTestdata {
|
||||
// NOTE: The Compact method preserves the raw formatting of strings,
|
||||
// while the Encoder (by default) does not.
|
||||
if td.name.Name == "ComplicatedString" {
|
||||
td.outCompacted = strings.TrimSpace(td.in)
|
||||
}
|
||||
out = append(out, valueTestdataEntry{
|
||||
name: td.name,
|
||||
in: td.in,
|
||||
wantValid: true,
|
||||
wantCompacted: td.outCompacted,
|
||||
wantIndented: td.outIndented,
|
||||
wantCanonicalized: td.outCanonicalized,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}(), []valueTestdataEntry{{
|
||||
name: jsontest.Name("RFC8785/Primitives"),
|
||||
in: `{
|
||||
"numbers": [333333333.33333329, 1E30, 4.50,
|
||||
2e-3, 0.000000000000000000000000001, -0],
|
||||
"string": "\u20ac$\u000F\u000aA'\u0042\u0022\u005c\\\"\/",
|
||||
"literals": [null, true, false]
|
||||
}`,
|
||||
wantValid: true,
|
||||
wantCompacted: `{"numbers":[333333333.33333329,1E30,4.50,2e-3,0.000000000000000000000000001,-0],"string":"\u20ac$\u000F\u000aA'\u0042\u0022\u005c\\\"\/","literals":[null,true,false]}`,
|
||||
wantIndented: `{
|
||||
"numbers": [
|
||||
333333333.33333329,
|
||||
1E30,
|
||||
4.50,
|
||||
2e-3,
|
||||
0.000000000000000000000000001,
|
||||
-0
|
||||
],
|
||||
"string": "\u20ac$\u000F\u000aA'\u0042\u0022\u005c\\\"\/",
|
||||
"literals": [
|
||||
null,
|
||||
true,
|
||||
false
|
||||
]
|
||||
}`,
|
||||
wantCanonicalized: `{"literals":[null,true,false],"numbers":[333333333.3333333,1e+30,4.5,0.002,1e-27,0],"string":"€$\u000f\nA'B\"\\\\\"/"}`,
|
||||
}, {
|
||||
name: jsontest.Name("RFC8785/ObjectOrdering"),
|
||||
in: `{
|
||||
"\u20ac": "Euro Sign",
|
||||
"\r": "Carriage Return",
|
||||
"\ufb33": "Hebrew Letter Dalet With Dagesh",
|
||||
"1": "One",
|
||||
"\ud83d\ude00": "Emoji: Grinning Face",
|
||||
"\u0080": "Control",
|
||||
"\u00f6": "Latin Small Letter O With Diaeresis"
|
||||
}`,
|
||||
wantValid: true,
|
||||
wantCompacted: `{"\u20ac":"Euro Sign","\r":"Carriage Return","\ufb33":"Hebrew Letter Dalet With Dagesh","1":"One","\ud83d\ude00":"Emoji: Grinning Face","\u0080":"Control","\u00f6":"Latin Small Letter O With Diaeresis"}`,
|
||||
wantIndented: `{
|
||||
"\u20ac": "Euro Sign",
|
||||
"\r": "Carriage Return",
|
||||
"\ufb33": "Hebrew Letter Dalet With Dagesh",
|
||||
"1": "One",
|
||||
"\ud83d\ude00": "Emoji: Grinning Face",
|
||||
"\u0080": "Control",
|
||||
"\u00f6": "Latin Small Letter O With Diaeresis"
|
||||
}`,
|
||||
wantCanonicalized: `{"\r":"Carriage Return","1":"One","":"Control","ö":"Latin Small Letter O With Diaeresis","€":"Euro Sign","😀":"Emoji: Grinning Face","דּ":"Hebrew Letter Dalet With Dagesh"}`,
|
||||
}, {
|
||||
name: jsontest.Name("LargeIntegers"),
|
||||
in: ` [ -9223372036854775808 , 9223372036854775807 ] `,
|
||||
wantValid: true,
|
||||
wantCompacted: `[-9223372036854775808,9223372036854775807]`,
|
||||
wantIndented: `[
|
||||
-9223372036854775808,
|
||||
9223372036854775807
|
||||
]`,
|
||||
wantCanonicalized: `[-9223372036854776000,9223372036854776000]`, // NOTE: Loss of precision due to numbers being treated as floats.
|
||||
}, {
|
||||
name: jsontest.Name("InvalidUTF8"),
|
||||
in: ` "living` + "\xde\xad\xbe\xef" + `\ufffd<66>" `,
|
||||
wantValid: false, // uses RFC 7493 as the definition; which validates UTF-8
|
||||
wantCompacted: `"living` + "\xde\xad\xbe\xef" + `\ufffd<66>"`,
|
||||
wantCanonicalizeErr: E(jsonwire.ErrInvalidUTF8).withPos(` "living`+"\xde\xad", ""),
|
||||
}, {
|
||||
name: jsontest.Name("InvalidUTF8/SurrogateHalf"),
|
||||
in: `"\ud800"`,
|
||||
wantValid: false, // uses RFC 7493 as the definition; which validates UTF-8
|
||||
wantCompacted: `"\ud800"`,
|
||||
wantCanonicalizeErr: newInvalidEscapeSequenceError(`\ud800"`).withPos(`"`, ""),
|
||||
}, {
|
||||
name: jsontest.Name("UppercaseEscaped"),
|
||||
in: `"\u000B"`,
|
||||
wantValid: true,
|
||||
wantCompacted: `"\u000B"`,
|
||||
wantCanonicalized: `"\u000b"`,
|
||||
}, {
|
||||
name: jsontest.Name("DuplicateNames"),
|
||||
in: ` { "0" : 0 , "1" : 1 , "0" : 0 }`,
|
||||
wantValid: false, // uses RFC 7493 as the definition; which does check for object uniqueness
|
||||
wantCompacted: `{"0":0,"1":1,"0":0}`,
|
||||
wantIndented: `{
|
||||
"0": 0,
|
||||
"1": 1,
|
||||
"0": 0
|
||||
}`,
|
||||
wantCanonicalizeErr: E(ErrDuplicateName).withPos(` { "0" : 0 , "1" : 1 , `, "/0"),
|
||||
}, {
|
||||
name: jsontest.Name("Whitespace"),
|
||||
in: " \n\r\t",
|
||||
wantValid: false,
|
||||
wantCompacted: " \n\r\t",
|
||||
wantCompactErr: E(io.ErrUnexpectedEOF).withPos(" \n\r\t", ""),
|
||||
wantIndentErr: E(io.ErrUnexpectedEOF).withPos(" \n\r\t", ""),
|
||||
wantCanonicalizeErr: E(io.ErrUnexpectedEOF).withPos(" \n\r\t", ""),
|
||||
}}...)
|
||||
|
||||
func TestValueMethods(t *testing.T) {
|
||||
for _, td := range valueTestdata {
|
||||
t.Run(td.name.Name, func(t *testing.T) {
|
||||
if td.wantIndented == "" {
|
||||
td.wantIndented = td.wantCompacted
|
||||
}
|
||||
if td.wantCanonicalized == "" {
|
||||
td.wantCanonicalized = td.wantCompacted
|
||||
}
|
||||
if td.wantCompactErr != nil {
|
||||
td.wantCompacted = td.in
|
||||
}
|
||||
if td.wantIndentErr != nil {
|
||||
td.wantIndented = td.in
|
||||
}
|
||||
if td.wantCanonicalizeErr != nil {
|
||||
td.wantCanonicalized = td.in
|
||||
}
|
||||
|
||||
v := Value(td.in)
|
||||
gotValid := v.IsValid()
|
||||
if gotValid != td.wantValid {
|
||||
t.Errorf("%s: Value.IsValid = %v, want %v", td.name.Where, gotValid, td.wantValid)
|
||||
}
|
||||
|
||||
gotCompacted := Value(td.in)
|
||||
gotCompactErr := gotCompacted.Compact()
|
||||
if string(gotCompacted) != td.wantCompacted {
|
||||
t.Errorf("%s: Value.Compact = %s, want %s", td.name.Where, gotCompacted, td.wantCompacted)
|
||||
}
|
||||
if !equalError(gotCompactErr, td.wantCompactErr) {
|
||||
t.Errorf("%s: Value.Compact error mismatch:\ngot %v\nwant %v", td.name.Where, gotCompactErr, td.wantCompactErr)
|
||||
}
|
||||
|
||||
gotIndented := Value(td.in)
|
||||
gotIndentErr := gotIndented.Indent(WithIndentPrefix("\t"), WithIndent(" "))
|
||||
if string(gotIndented) != td.wantIndented {
|
||||
t.Errorf("%s: Value.Indent = %s, want %s", td.name.Where, gotIndented, td.wantIndented)
|
||||
}
|
||||
if !equalError(gotIndentErr, td.wantIndentErr) {
|
||||
t.Errorf("%s: Value.Indent error mismatch:\ngot %v\nwant %v", td.name.Where, gotIndentErr, td.wantIndentErr)
|
||||
}
|
||||
|
||||
gotCanonicalized := Value(td.in)
|
||||
gotCanonicalizeErr := gotCanonicalized.Canonicalize()
|
||||
if string(gotCanonicalized) != td.wantCanonicalized {
|
||||
t.Errorf("%s: Value.Canonicalize = %s, want %s", td.name.Where, gotCanonicalized, td.wantCanonicalized)
|
||||
}
|
||||
if !equalError(gotCanonicalizeErr, td.wantCanonicalizeErr) {
|
||||
t.Errorf("%s: Value.Canonicalize error mismatch:\ngot %v\nwant %v", td.name.Where, gotCanonicalizeErr, td.wantCanonicalizeErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user